prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>plotMesonetOrgData.py<|end_file_name|><|fim▁begin|>import matplotlib.pyplot as plt
import numpy as np
from matplotlib.lines import lineStyles
Light_cnames={'mistyrose':'#FFE4E1','navajowhite':'#FFDEAD','seashell':'#FFF5EE','papayawhip':'#FFEFD5','blanchedalmond':'#FFEBCD','white':'#FFFFFF','mintcream':'#F5FFFA','antiquewhite':'#FAEBD7','moccasin':'#FFE4B5','ivory':'#FFFFF0','lightgoldenrodyellow':'#FAFAD2','lightblue':'#ADD8E6','floralwhite':'#FFFAF0','ghostwhite':'#F8F8FF','honeydew':'#F0FFF0','linen':'#FAF0E6','snow':'#FFFAFA','lightcyan':'#E0FFFF','cornsilk':'#FFF8DC','bisque':'#FFE4C4','aliceblue':'#F0F8FF','gainsboro':'#DCDCDC','lemonchiffon':'#FFFACD','lightyellow':'#FFFFE0','lavenderblush':'#FFF0F5','whitesmoke':'#F5F5F5','beige':'#F5F5DC','azure':'#F0FFFF','oldlace':'#FDF5E6'}
def plot10seperate():
mons=["201603","201604","201605","201606","201607","201608","201609","201610","201611","201612","201701","201702","201703","201704","201705","201706"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
rootpath="F:/workspace/git/TranWeatherProject/data/mesonet_data/"
for mon in mons:
for day in days:
print mon+day
fileName=rootpath+mon+day+".txt"
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])<|fim▁hole|> fig=plt.figure(1)
fig.add_subplot(10,1,1)
plt.plot(X,day_data[0][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[0][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,2)
plt.plot(X,day_data[1][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[1][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,3)
plt.plot(X,day_data[2][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[2][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,4)
plt.plot(X,day_data[3][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[3][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,5)
plt.plot(X,day_data[4][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[4][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,6)
plt.plot(X,day_data[5][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[5][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,7)
plt.plot(X,day_data[6][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[6][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,8)
plt.plot(X,day_data[7][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[7][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,9)
plt.plot(X,day_data[8][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period From 00:00am ~23:59')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[8][0]+" Station Date: "+mon+day +"Temperature")
fig.add_subplot(10,1,10)
plt.plot(X,day_data[9][2],'b*-',linewidth='2.0', markersize=5,label='Temperature')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-20.0,60.0])
plt.xlabel('time Period')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.title(day_data[9][0]+" Station Date: "+mon+day +"Temperature")
plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
plt.show()
fig.savefig('F:/workspace/git/TranWeatherProject/outputs/mesonetPlots/'+str(mon+day)+'.png')
plt.close()
import os
def plotSignle():
mons=["201603","201604","201605","201606","201607","201608","201609"]
#mons=["201604"]
#mons=["201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
#days=[""]
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="precip"
rootpath="F:/workspace/git/Graph-MP/data/mesonet_data/"+var_type+"/"
for mon in mons:
for day in days:
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
plt.ylim([-5.0,70.0])
plt.ylabel('Avg. Wind Speed(mph)')
plt.title(mon+day +"Every 5min Avg. Wind")
elif type=="temp":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +"Temperature")
else:
plt.ylim([-1.0,2.0])
plt.ylabel('Precipitation Est (Inch)')
plt.title(mon+day +"Precipitation")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
print len(X)
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
def expAvg(fileName):
expAvgs=[]
expMin=[]
expMax=[]
with open(fileName,"r") as oF:
for line in oF.readlines():
expAvgs.append(float(line.strip().split()[0]))
expMin.append(float(line.strip().split()[1]))
expMax.append(float(line.strip().split()[3]))
return expAvgs,expMin,expMax
def plotCaseDays():
dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="temp"
rootpath="F:/workspace/git/TranWeatherProject/data/mesonet_data/"+var_type+"/"
#expRoot="F:/workspace/git/TranWeatherProject/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
# if date not in dates:
# print "Not ",date
# continue
#expAvgs=expAvg(expRoot+mon+day+".txt")
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='2.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='2.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
#plt.ylim([-5.0,70.0])
plt.ylabel('Avg. Wind Speed(mph)')
plt.title(mon+day +"Every 5min Avg. Wind")
else:
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +"Temperature")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/'+var_type+'_CaseStudy/'+str(mon+day)+'.png', dpi=300)
plt.close()
def plotSingleDays():
fileName="F:/workspace/git/Graph-MP/data/mesonet_data/test_4.txt"
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:288])
day_data.append((sta_name,'201603001',data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.0', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.0', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
# if var_type=="wind":
# #plt.ylim([-5.0,70.0])
# plt.ylabel('Avg. Wind Speed(mph)')
# plt.title(mon+day +"Every 5min Avg. Wind")
# else:
# plt.ylim([-10.0,100.0])
# plt.ylabel('Temperature(F)')
# plt.title(mon+day +"Temperature")
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title('201603001 ' +"Temperature")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/data/mesonet_data/201603001_4.png', dpi=300)
plt.close()
import time
def loadTop(fileName):
results=[]
with open(fileName,"r") as rF:
for i,line in enumerate(rF.readlines()):
terms=line.strip().split(" ")
results.append((int(terms[0]),map(int,terms[1].split(",")),terms[2],map(int,terms[3].split(","))))
if i>19 :
break
return results
def plotCaseDaysSingleStation():
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
vars=['i0','i1','i2','i3','i4','i5','i6','i7','i8','i9']
topResults=loadTop("F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/CP/2/20multi_TopK_result-CP_baseMeanDiff_20_s_2_wMax_18_filter_TIncld_0.7_Top.txt")
for result in topResults:
dates=[]
top=result[0]+1
vals=result[1]
dates.append(result[2])
for i,var in enumerate(vars):
if i in vals:
exec "%s=%s"%(vars[i], 1)
else:
exec "%s=%s"%(vars[i], 0)
print i0,i1,i2,i3,i4,i5,i6,i7,i8,i9
# i0=0
# i1=0
# i2=0
# i3=1
# i4=1
# i5=1
# i6=1
# i7=0
# i8=0
# i9=0
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_type="wind"
rootpath="F:/workspace/git/Graph-MP/data/mesonet_data/"+var_type+"/"
rootpath2="F:/workspace/git/Graph-MP/data/mesonet_data/temp/"
rootpath3="F:/workspace/git/Graph-MP/data/mesonet_data/precip/"
#expRoot="F:/workspace/git/TranWeatherProject/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
if date not in dates:
#print "Not ",date
continue
#expAvgs=expAvg(expRoot+mon+day+".txt")
fileName=rootpath+mon+day+".txt"
fileName2=rootpath2+mon+day+".txt"
fileName3=rootpath3+mon+day+".txt"
print fileName
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
if not os.path.exists(fileName2):
print "File Not Found",fileName2
continue
if not os.path.exists(fileName3):
print "File Not Found",fileName2
continue
day_data=[]
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
day_data2=[]
with open(fileName2,"r") as df2:
for line in df2.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data2.append((sta_name,mon+day,data))
day_data3=[]
with open(fileName3,"r") as df3:
for line in df3.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data3.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
print day_data[i3][2]
fig=plt.figure(1)
if i0!=0:
plt.plot(X,day_data[0][2],'b-',linewidth='0.5', markersize=5,label='Wind '+sta_names[int(day_data[0][0])]+day_data[0][0])
if i1!=0:
plt.plot(X,day_data[1][2],'r-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
if i2!=0:
plt.plot(X,day_data[2][2],'k-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
if i3!=0:
plt.plot(X,day_data[3][2],'g-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
if i4!=0:
plt.plot(X,day_data[4][2],'y-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
if i5!=0:
plt.plot(X,day_data[5][2],'c-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
if i6!=0:
plt.plot(X,day_data[6][2],'m-',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
if i7!=0:
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
if i8!=0:
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
if i9!=0:
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='0.5', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.axvline(x=result[3][0], ymin=-1.0, ymax=50.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-1.0, ymax=50.0,color='k',linestyle='--')
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylim([-1.0,50.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Wind")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.yticks(np.arange(-1, 50, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
# fig.subplots_adjust(bottom = 2)
# fig.subplots_adjust(top = 2)
# fig.subplots_adjust(right = 2)
# fig.subplots_adjust(left = 0)
#plt.plot(X,day_data2[i][2],'r-',linewidth='1.0', markersize=5,label='Temp '+sta_names[int(day_data2[i][0])]+day_data2[i][0])
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_wind_'+str(mon+day)+'.png', dpi=300)
fig.clf()
fig=plt.figure(2)
if i0!=0:
plt.plot(X,day_data2[0][2],'b-',linewidth='0.5', markersize=5)
if i1!=0:
plt.plot(X,day_data2[1][2],'r-',linewidth='0.5', markersize=5)
if i2!=0:
plt.plot(X,day_data2[2][2],'k-',linewidth='0.5', markersize=5)
if i3!=0:
plt.plot(X,day_data2[3][2],'g-',linewidth='0.5', markersize=5)
if i4!=0:
plt.plot(X,day_data2[4][2],'y-',linewidth='0.5', markersize=5)
if i5!=0:
plt.plot(X,day_data2[5][2],'c-',linewidth='0.5', markersize=5)
if i6!=0:
plt.plot(X,day_data2[6][2],'m-',linewidth='0.5', markersize=5)
if i7!=0:
plt.plot(X,day_data2[7][2],color ='#B47CC7',linewidth='0.5', markersize=5)
if i8!=0:
plt.plot(X,day_data2[8][2],color='#FBC15E',linewidth='0.5', markersize=5)
if i9!=0:
plt.plot(X,day_data2[9][2],color='#e5ee38',linewidth='0.5', markersize=5)
# if var_type=="wind":
# plt.ylim([-1.0,50.0])
# plt.ylabel('Avg. Wind Speed(mph)')
# plt.title(mon+day +"Every 5min Avg. Wind")
# else:
# plt.ylim([-10.0,100.0])
# plt.ylabel('Temperature(F)')
# plt.title(mon+day +"Temperature")
plt.axvline(x=result[3][0], ymin=-10.0, ymax=100.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-10.0, ymax=100.0,color='k',linestyle='--')
plt.ylim([-10.0,100.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Temperature ")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
#
# fig.subplots_adjust(bottom = 0)
# fig.subplots_adjust(top = 1)
# fig.subplots_adjust(right = 1)
# fig.subplots_adjust(left = 0)
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_temp_'+str(mon+day)+'.png', dpi=300)
fig.clf()
fig=plt.figure(3)
if i0!=0:
plt.plot(X,day_data3[0][2],'b-',linewidth='0.5', markersize=5)
if i1!=0:
plt.plot(X,day_data3[1][2],'r-',linewidth='0.5', markersize=5)
if i2!=0:
plt.plot(X,day_data3[2][2],'k-',linewidth='0.5', markersize=5)
if i3!=0:
plt.plot(X,day_data3[3][2],'g-',linewidth='0.5', markersize=5)
if i4!=0:
plt.plot(X,day_data3[4][2],'y-',linewidth='0.5', markersize=5)
if i5!=0:
plt.plot(X,day_data3[5][2],'c-',linewidth='0.5', markersize=5)
if i6!=0:
plt.plot(X,day_data3[6][2],'m-',linewidth='0.5', markersize=5)
if i7!=0:
plt.plot(X,day_data3[7][2],color ='#B47CC7',linewidth='0.5', markersize=5)
if i8!=0:
plt.plot(X,day_data3[8][2],color='#FBC15E',linewidth='0.5', markersize=5)
if i9!=0:
plt.plot(X,day_data3[9][2],color='#e5ee38',linewidth='0.5', markersize=5)
# if var_type=="wind":
# plt.ylim([-1.0,50.0])
# plt.ylabel('Avg. Wind Speed(mph)')
# plt.title(mon+day +"Every 5min Avg. Wind")
# else:
# plt.ylim([-10.0,100.0])
# plt.ylabel('Temperature(F)')
# plt.title(mon+day +"Temperature")
plt.axvline(x=result[3][0], ymin=-0.2, ymax=2.0,color='k',linestyle='--')
plt.axvline(x=result[3][1], ymin=-0.2, ymax=2.0,color='k',linestyle='--')
plt.ylim([-0.2,2.0])
plt.title("Top"+str(result[0]+1)+" "+mon+day +"Precipitation ")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(-0.2, 2.0, 0.5),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,each 5min')
plt.grid()
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=8)
# fig.subplots_adjust(bottom = 0)
# fig.subplots_adjust(top = 1)
# fig.subplots_adjust(right = 1)
# fig.subplots_adjust(left = 0)
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/mesonetPlots/multi_CaseStudy/mvPlots/'+str(top)+'_precip_'+str(mon+day)+'.png', dpi=300)
fig.clf()
plt.close()
def plotAllDays():
root="F:/workspace/git/WeatherTransportationProject/"
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
dates=[]
#"201603","201604","201605","201606","201607","201608"
mons=["201609","201610","201611","201612","201701","201702","201703","201704","201705","201706"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
sta_names={0:"BATA",1:"SBRI",2:"WATE",3:"JORD",4:"CSQR",5:"WEST",6:"COLD",7:"SPRA",8:"COBL",9:"STEP"}
var_types=["temp","temp9","press","wind","windDir","windMax","rh","rad"]
#var_types=["wind"]
for var_type in var_types:
rootpath=root+"data/mesonet_data/"+var_type+"/"
#expRoot="F:/workspace/git/Graph-MP/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
# if date in dates:
# print "Not ",date
# continue
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for line in df.readlines():
terms=line.strip().split()
sta_name=terms[0]
data=map(float,terms[1:])
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print sta_names[int(day_data[0][0])]
fig=plt.figure(1)
plt.plot(X,day_data[0][2],'b-',linewidth='1.5', markersize=5,label=sta_names[int(day_data[0][0])]+day_data[0][0])
plt.plot(X,day_data[1][2],'r-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[1][0])])+day_data[1][0])
plt.plot(X,day_data[2][2],'k-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[2][0])])+day_data[2][0])
plt.plot(X,day_data[3][2],'g-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[3][0])])+day_data[3][0])
plt.plot(X,day_data[4][2],'y-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[4][0])])+day_data[4][0])
plt.plot(X,day_data[5][2],'c-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[5][0])])+day_data[5][0])
plt.plot(X,day_data[6][2],'m-',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[6][0])])+day_data[6][0])
plt.plot(X,day_data[7][2],color ='#B47CC7',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[7][0])])+day_data[7][0])
plt.plot(X,day_data[8][2],color='#FBC15E',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[8][0])])+day_data[8][0])
plt.plot(X,day_data[9][2],color='#e5ee38',linewidth='1.5', markersize=5,label=str(sta_names[int(day_data[9][0])])+day_data[9][0])
plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3,ncol=1, mode="expand", borderaxespad=0.)
plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
if var_type=="wind":
plt.ylim([-5.0,70.0])
plt.ylabel('Average Wind Speed(mph)')
plt.title(mon+day +" Every 5min Average Wind Speed")
elif var_type=="windMax":
plt.ylim([-5.0,70.0])
plt.ylabel('Max Wind Speed(mph)')
plt.title(mon+day +"Every 5min Max Wind")
elif var_type=="windDir":
#plt.ylim([-5.0,70.0])
plt.ylabel('Max Wind Speed(mph)')
plt.title(mon+day +" Wind Direction Degree")
elif var_type=="temp":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +" 2m Temperature")
elif var_type=="temp9":
plt.ylim([-10.0,100.0])
plt.ylabel('Temperature(F)')
plt.title(mon+day +" 9m Temperature")
elif var_type=="press":
#plt.ylim([-10.0,100.0])
plt.ylabel('Pressure(mbar)')
plt.title(mon+day +" Pressure")
elif var_type=="rad":
#plt.ylim([-10.0,100.0])
plt.ylabel('Solar Radiation(W/m^2)')
plt.title(mon+day +" Solar Radiation")
elif var_type=="rh":
plt.ylim([0.0,100.0])
plt.ylabel('Relative Humidity %')
plt.title(mon+day +" rh")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time from 00:00 ~23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.legend(loc='best',fontsize=10)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig(root+'/outputs/mesonetPlots/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
def plotTravTimeAllDays():
import matplotlib
#dates=["20160301","20160302","20160308","20160309","20160312","20160313","20160324","20160325","20160328","20160405","20160412","20160419","20160421","20160514","20160529","20160621","20160628","20160813","20160911","20160922"]
dates=[]
mons=["201603","201604","201605","201606","201607","201608","201609"]
days=['01','02','03','04','05','06','07','08','09','10','11','12','13','14','15','16','17','18','19','20','21','22','23','24','25','26','27','28','29','30','31']
var_types=["TravelTimeToWest","TravelTimeToWest"]
#var_types=["wind"]
colors=[]
for name, hex in matplotlib.colors.cnames.iteritems():
if name not in Light_cnames.keys():
colors.append(hex)
for var_type in var_types:
rootpath="F:/workspace/git/Graph-MP/data/trafficData/I90_TravelTime/"+var_type+"/"
#expRoot="F:/workspace/git/Graph-MP/data/mesonet_data/mesonetExpData/statExpData/"
for mon in mons:
for day in days:
date=str(mon+day)
# if date in dates:
# print "Not ",date
# continue
fileName=rootpath+mon+day+".txt"
print fileName
day_data=[]
if not os.path.exists(fileName):
print "File Not Found",fileName
continue
with open(fileName,"r") as df:
for idx,line in enumerate(df.readlines()):
terms=line.strip().split()
sta_name="TMC "+str(idx)
data=map(float,terms)
day_data.append((sta_name,mon+day,data))
X=[i for i in range(0,len(day_data[0][2]))]
label=[(str(i)+"\n"+str(i*5/60)+"h") for i in range(0,len(day_data[0][2])+1,12)]
labelY=[str(i) for i in range(0,100+1,5)]
print len(day_data)
fig=plt.figure(1)
for i in range(len(day_data)):
plt.plot(X,day_data[i][2],colors[i],linewidth='0.5', markersize=5,label=day_data[i][0])
# art = []
# lgd = plt.legend(loc=3, bbox_to_anchor=(0, -0.5), ncol=5)
# art.append(lgd)
#plt.plot([0.2,0.1,0.0],[0.5,0.5,0.5])
plt.ylabel('Traveling Time (sec)')
if var_type=="TravelTimeToWest":
plt.title(mon+day +" Travel Time I90 East To West")
else:
plt.title(mon+day +" Travel Time I90 West To East")
#plt.xticks(np.arange(min(X), max(X)+2, 12.0))
plt.xticks(np.arange(min(X), max(X)+2, 12.0),label)
#plt.yticks(np.arange(0, 100, 5.0),labelY)
plt.tick_params(axis='both', which='major', labelsize=7)
plt.xlabel('Time: 00:00 ~ 23:59,every 5min')
#plt.xlim([0.2,0.0])
plt.ylim([0.0,3600.0])
# plt.legend(loc='best',fontsize=10)
plt.grid()
#plt.tight_layout(pad=0.4, w_pad=0.5, h_pad=1.0)
#plt.show()
fig.savefig('F:/workspace/git/Graph-MP/outputs/trafficData/'+var_type+'_plots/'+str(mon+day)+'.png')
plt.close()
plotAllDays()<|fim▁end|>
|
day_data.append((sta_name,mon+day,data))
X=[(i*5.0/60.0) for i in range(1,len(day_data[0][2]),1)]
|
<|file_name|>0017_merge_20181221_1508.py<|end_file_name|><|fim▁begin|># Generated by Django 2.1.4 on 2018-12-21 14:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('mainapp', '0016_auto_20181202_2205'),
('mainapp', '0016_auto_20181221_1432'),<|fim▁hole|> operations = [
]<|fim▁end|>
|
]
|
<|file_name|>issue-8506.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#[allow(dead_code)];
enum Either {
One,
Other(~str,~str)
}
static one : Either = One;<|fim▁hole|>pub fn main () { }<|fim▁end|>
| |
<|file_name|>predictor.rs<|end_file_name|><|fim▁begin|>// The MIT License (MIT)
//<|fim▁hole|>// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
use bitstring::Bitstring;
/// An object capable of predicting observations and rewards based on
/// experience. Predictors have an abstract notion of history which
/// grows over time and represents the experience.
pub trait Predictor {
/// Returns the size of the currently tracked history.
fn history_size(&self) -> usize;
/// Appends the provided bit string to the tracked history.
fn update(&mut self, bitstring: &Bitstring);
/// Reverts the context tree to a previous state by undoing update
/// operations. The specified size must be at most the current size.
fn revert_to_history_size(&mut self, target_size: usize);
/// Returns the probability, given the current history, that "bits" are the
/// next observed symbols.
fn predict(&mut self, bits: &Bitstring) -> f64;
}<|fim▁end|>
|
// Copyright (c) 2015 dinowernli
//
|
<|file_name|>markdown_to_html.py<|end_file_name|><|fim▁begin|># coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import codecs
import os
import re
from pkg_resources import resource_string
from pygments.formatters.html import HtmlFormatter
from pygments.styles import get_all_styles
from pants.backend.docgen.targets.doc import Page
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.generator import Generator
from pants.base.workunit import WorkUnitLabel
from pants.binaries import binary_util
from pants.build_graph.address import Address
from pants.task.task import Task
from pants.util.dirutil import safe_mkdir
def util():
"""Indirection function so we can lazy-import our utils.
It's an expensive import that invokes re.compile a lot (via markdown and pygments),
so we don't want to incur that cost unless we must.
"""
from pants.backend.docgen.tasks import markdown_to_html_utils
return markdown_to_html_utils
class MarkdownToHtml(Task):
"""Generate HTML from Markdown docs."""
@classmethod
def register_options(cls, register):
register('--code-style', choices=list(get_all_styles()), default='friendly',
fingerprint=True,
help='Use this stylesheet for code highlights.')
register('--open', type=bool,
help='Open the generated documents in a browser.')
register('--fragment', type=bool,
fingerprint=True,
help='Generate a fragment of html to embed in a page.')
register('--ignore-failure', type=bool,
fingerprint=True,
help='Do not consider rendering errors to be build errors.')
@classmethod
def product_types(cls):
return ['markdown_html', 'wiki_html']
def __init__(self, *args, **kwargs):
super(MarkdownToHtml, self).__init__(*args, **kwargs)
self._templates_dir = os.path.join('templates', 'markdown')
self.open = self.get_options().open
self.fragment = self.get_options().fragment
self.code_style = self.get_options().code_style
def execute(self):
# TODO(John Sirois): consider adding change detection
outdir = os.path.join(self.get_options().pants_distdir, 'markdown')
css_path = os.path.join(outdir, 'css', 'codehighlight.css')
css = util().emit_codehighlight_css(css_path, self.code_style)
if css:
self.context.log.info('Emitted {}'.format(css))
def is_page(target):
return isinstance(target, Page)
roots = set()
interior_nodes = set()
if self.open:
dependencies_by_page = self.context.dependents(on_predicate=is_page, from_predicate=is_page)
roots.update(dependencies_by_page.keys())
for dependencies in dependencies_by_page.values():
interior_nodes.update(dependencies)
roots.difference_update(dependencies)
for page in self.context.targets(is_page):
# There are no in or out edges so we need to show show this isolated page.
if not page.dependencies and page not in interior_nodes:
roots.add(page)
with self.context.new_workunit(name='render', labels=[WorkUnitLabel.MULTITOOL]):
plaingenmap = self.context.products.get('markdown_html')
wikigenmap = self.context.products.get('wiki_html')
show = []
for page in self.context.targets(is_page):
def process_page(key, outdir, url_builder, genmap, fragment=False):
if page.format == 'rst':
with self.context.new_workunit(name='rst') as workunit:
html_path = self.process_rst(
workunit,
page,
os.path.join(outdir, util().page_to_html_path(page)),
os.path.join(page.payload.sources.rel_path, page.source),
self.fragment or fragment,
)
else:
with self.context.new_workunit(name='md'):
html_path = self.process_md(
os.path.join(outdir, util().page_to_html_path(page)),
os.path.join(page.payload.sources.rel_path, page.source),
self.fragment or fragment,
url_builder,
css=css,
)
self.context.log.info('Processed {} to {}'.format(page.source, html_path))
relpath = os.path.relpath(html_path, outdir)
genmap.add(key, outdir, [relpath])
return html_path
def url_builder(linked_page):
dest = util().page_to_html_path(linked_page)<|fim▁hole|>
page_path = os.path.join(outdir, 'html')
html = process_page(page, page_path, url_builder, plaingenmap)
if css and not self.fragment:
plaingenmap.add(page, self.workdir, list(css_path))
if self.open and page in roots:
show.append(html)
if page.provides:
for wiki in page.provides:
basedir = os.path.join(self.workdir, str(hash(wiki)))
process_page((wiki, page), basedir, wiki.wiki.url_builder, wikigenmap, fragment=True)
if show:
binary_util.ui_open(*show)
PANTS_LINK = re.compile(r'''pants\(['"]([^)]+)['"]\)(#.*)?''')
def process_md(self, output_path, source, fragmented, url_builder, css=None):
def parse_url(spec):
match = self.PANTS_LINK.match(spec)
if match:
address = Address.parse(match.group(1), relative_to=get_buildroot())
page = self.context.build_graph.get_target(address)
anchor = match.group(2) or ''
if not page:
raise TaskError('Invalid markdown link to pants target: "{}". '.format(match.group(1)) +
'Is your page missing a dependency on this target?')
alias, url = url_builder(page)
return alias, url + anchor
else:
return spec, spec
def build_url(label):
components = label.split('|', 1)
if len(components) == 1:
return parse_url(label.strip())
else:
alias, link = components
_, url = parse_url(link.strip())
return alias, url
wikilinks = util().WikilinksExtension(build_url)
safe_mkdir(os.path.dirname(output_path))
with codecs.open(output_path, 'w', 'utf-8') as output:
source_path = os.path.join(get_buildroot(), source)
with codecs.open(source_path, 'r', 'utf-8') as source_stream:
md_html = util().markdown.markdown(
source_stream.read(),
extensions=['codehilite(guess_lang=False)',
'extra',
'tables',
'toc',
wikilinks,
util().IncludeExcerptExtension(source_path)],
)
if fragmented:
style_css = (HtmlFormatter(style=self.code_style)).get_style_defs('.codehilite')
template = resource_string(__name__,
os.path.join(self._templates_dir, 'fragment.mustache'))
generator = Generator(template, style_css=style_css, md_html=md_html)
generator.write(output)
else:
style_link = os.path.relpath(css, os.path.dirname(output_path))
template = resource_string(__name__, os.path.join(self._templates_dir, 'page.mustache'))
generator = Generator(template, style_link=style_link, md_html=md_html)
generator.write(output)
return output.name
def process_rst(self, workunit, page, output_path, source, fragmented):
source_path = os.path.join(get_buildroot(), source)
with codecs.open(source_path, 'r', 'utf-8') as source_stream:
rst_html, returncode = util().rst_to_html(source_stream.read(),
stderr=workunit.output('stderr'))
if returncode != 0:
message = '{} rendered with errors.'.format(source_path)
if self.get_options().ignore_failure:
self.context.log.warn(message)
else:
raise TaskError(message, exit_code=returncode, failed_targets=[page])
template_path = os.path.join(self._templates_dir,
'fragment.mustache' if fragmented else 'page.mustache')
template = resource_string(__name__, template_path)
generator = Generator(template, md_html=rst_html)
safe_mkdir(os.path.dirname(output_path))
with codecs.open(output_path, 'w', 'utf-8') as output:
generator.write(output)
return output.name<|fim▁end|>
|
src_dir = os.path.dirname(util().page_to_html_path(page))
return linked_page.name, os.path.relpath(dest, src_dir)
|
<|file_name|>ExReadFermiCatalog.py<|end_file_name|><|fim▁begin|># author David Sanchez [email protected]
# ------ Imports --------------- #
import numpy
from Plot.PlotLibrary import *
from Catalog.ReadFermiCatalog import *
from environ import FERMI_CATALOG_DIR
# ------------------------------ #
#look for this 2FGL source
source = "2FGL J1015.1+4925"
#source = "1FHL J2158.8-3013"
#source = "3FGL J2158.8-3013"
Cat = FermiCatalogReader(source,FERMI_CATALOG_DIR,"e2dnde","TeV")
#print some information
print "2FGL association ",Cat.Association('3FGL')
print "3FGL Name ",Cat.Association('2FHL','3FGL_name')
print "3FGL Var Index ",Cat.GetVarIndex("3FGL")
#create a spectrum for a given catalog and compute the model+butterfly
Cat.MakeSpectrum("3FGL",1e-4,0.3)
enerbut,but,enerphi,phi = Cat.Plot("3FGL")
Cat.MakeSpectrum("2FGL",1e-4,0.3)
enerbut2FGL,but2FGL,enerphi2FGL,phi2FGL = Cat.Plot("2FGL")
Cat.MakeSpectrum("2FHL",5e-2,2)
enerbut2FHL,but2FHL,enerphi2FHL,phi2FHL = Cat.Plot("2FHL")
# read DATA Point <|fim▁hole|>dep = ep-ener
c=Cat.ReadPL('3FGL')[3]
dnde = (-c+1)*flux*numpy.power(ener*1e6,-c+2)/(numpy.power((ep*1e6),-c+1)-numpy.power((em*1e6),-c+1))*1.6e-6
ddnde = dnde*dflux/flux
#plot
import matplotlib.pyplot as plt
plt.loglog()
plt.plot(enerbut, but, 'b-',label = "3FGL")
plt.plot(enerphi,phi, 'b-')
plt.plot(enerbut2FGL,but2FGL,'g-',label = "2FGL")
plt.plot(enerphi2FGL,phi2FGL,'g-')
plt.plot(enerbut2FHL,but2FHL,'r-',label = "2FHL")
plt.plot(enerphi2FHL,phi2FHL,'r-')
plt.errorbar(ener, dnde, xerr= [dem,dep], yerr = ddnde,fmt='o')
plt.legend(loc = 3)
plt.ylabel('E2dN/dE(erg.cm-2.s-1)')
plt.xlabel('energy (TeV)')
plt.show()<|fim▁end|>
|
em,ep,flux,dflux = Cat.GetDataPoints('3FGL') #energy in TeV since the user ask for that in the call of Cat
ener = numpy.sqrt(em*ep)
dem = ener-em
|
<|file_name|>car.rs<|end_file_name|><|fim▁begin|>use super::camera::Camera;
use crate::color::*;
use cgmath::{Vector2, Vector3};
// Present a car that can be drawed, check for collision
// with other car and bullet, turn left/right, move forward
// and jump.
pub trait Car {
fn render(&self, _: &Camera) -> Vec<([Vector2<f64>; 2], Color)>;
fn crashed(&self, _: &Self) -> bool;
fn hit(&self, _: &[Vector3<f64>; 3]) -> bool;
fn forward(&mut self, dt: f64, outside_speed: f64);
fn turn_left(&mut self, dt: f64);
fn turn_right(&mut self, dt: f64);
fn update_jump(&mut self, dt: f64);
fn jump(&mut self);
fn pos(&self) -> Vector3<f64>;
fn turn_speed(&self) -> f64;
}
// Car with shape of a box
#[derive(Clone)]
pub struct BoxCar {
pub size: Vector3<f64>,
pub position: Vector3<f64>,
pub speed: f64,
pub turn_speed: f64,
pub color: Color,
pub jump_v: f64,
pub jump_a: f64,
pub jumping: bool,
pub current_t: f64,
pub jump_turn_decrease: f64,
}
impl Car for BoxCar {
fn render(&self, camera: &Camera) -> Vec<([Vector2<f64>; 2], Color)> {
let mut front = [self.position; 4];
front[0].y += self.size.y;
front[1].y += self.size.y;
front[0].x -= self.size.x / 2.;
front[3].x -= self.size.x / 2.;
front[1].x += self.size.x / 2.;
front[2].x += self.size.x / 2.;
let mut rear = front;
for x in &mut rear {
x.z += self.size.z;
}
let mut ret = Vec::new();
for i in 0..4 {
ret.push(camera.render_line(&front[i], &front[(i + 1) % 4]));
ret.push(camera.render_line(&rear[i], &rear[(i + 1) % 4]));
ret.push(camera.render_line(&front[i], &rear[i]));
}
ret.into_iter()
.filter_map(|x| x.map(|x| (x, self.color)))
.collect()
}
fn turn_left(&mut self, dt: f64) {
self.position.x -= dt * self.turn_speed();
}
fn turn_right(&mut self, dt: f64) {
self.position.x += dt * self.turn_speed();
}
fn pos(&self) -> Vector3<f64> {
self.position
}
fn crashed(&self, a: &Self) -> bool {
(f64::abs(self.position.x - a.position.x) < (self.size.x + a.size.x) / 2.)
&& ((self.position.z < a.position.z && a.position.z - self.position.z < self.size.z)
|| (self.position.z >= a.position.z && self.position.z - a.position.z < a.size.z))
&& ((self.position.y < a.position.y && a.position.y - self.position.y < self.size.y)
|| (self.position.y >= a.position.y && self.position.y - a.position.y < a.size.y))
}
fn jump(&mut self) {
if self.jumping || self.position.y > 0. {
} else {
self.jumping = true;
self.current_t = 0.;<|fim▁hole|> self.update_jump(dt);
}
fn update_jump(&mut self, dt: f64) {
if self.jumping {
self.current_t += dt;
self.position.y = self.current_t * (self.jump_v - 0.5 * self.jump_a * self.current_t);
if self.position.y < 0. {
self.position.y = 0.;
self.jumping = false;
}
}
}
fn hit(&self, bullet: &[Vector3<f64>; 3]) -> bool {
let (x, y) = (bullet[0], bullet[0] + bullet[1]);
let check = |x: &Vector3<f64>| {
f64::abs(x.x - self.position.x) < self.size.x / 2.
&& x.y >= self.position.y
&& x.y - self.position.y < self.size.y
&& x.z >= self.position.z
&& x.z - self.position.z < self.size.z
};
check(&x) || check(&y)
}
fn turn_speed(&self) -> f64 {
if self.jumping {
self.turn_speed / self.jump_turn_decrease
} else {
self.turn_speed
}
}
}<|fim▁end|>
|
}
}
fn forward(&mut self, dt: f64, outside_speed: f64) {
self.position.z -= dt * (self.speed + outside_speed);
|
<|file_name|>call.py<|end_file_name|><|fim▁begin|># Copyright 2015 Google Inc. All Rights Reserved.
#<|fim▁hole|># You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""'functions call' command."""
from googlecloudsdk.api_lib.functions import util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
class Call(base.Command):
"""Call function synchronously for testing."""
@staticmethod
def Args(parser):
"""Register flags for this command."""
parser.add_argument(
'name', help='Name of the function to be called.',
type=util.ValidateFunctionNameOrRaise)
parser.add_argument(
'--data', default='',
help='Data passed to the function (JSON string)')
@util.CatchHTTPErrorRaiseHTTPException
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Function call results (error or result with execution id)
"""
project = properties.VALUES.core.project.Get(required=True)
registry = self.context['registry']
client = self.context['functions_client']
messages = self.context['functions_messages']
function_ref = registry.Parse(
args.name, params={'projectsId': project, 'locationsId': args.region},
collection='cloudfunctions.projects.locations.functions')
return client.projects_locations_functions.Call(
messages.CloudfunctionsProjectsLocationsFunctionsCallRequest(
name=function_ref.RelativeName(),
callFunctionRequest=messages.CallFunctionRequest(data=args.data)))<|fim▁end|>
|
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>extern crate log;
extern crate flexi_logger;
extern crate ansi_term;
extern crate wrust_types;
extern crate wrust_conf;<|fim▁hole|>use wrust_types::{Error, Result};
use wrust_conf::{Conf, FromConf};
use conf::{LogConf, LogDevice};
/// Initialize logging system using configuration given
pub fn init(config: LogConf) -> Result<()> {
use flexi_logger::{self, LogConfig};
// Start from the default logger configuration
let mut flexi_config = LogConfig::new();
flexi_config.print_message = false;
flexi_config.duplicate_error = false;
flexi_config.duplicate_info = false;
// Setup flexi logger based on log device
match config.device {
LogDevice::Stderr(colorize) => {
flexi_config.format = if colorize {
colorized_format
}
else {
simple_format
};
},
LogDevice::File(directory_opt, rotate_size_opt) => {
flexi_config.format = simple_format;
flexi_config.log_to_file = true;
flexi_config.directory = directory_opt;
flexi_config.rotate_over_size = rotate_size_opt;
},
};
// Initialize logger
match flexi_logger::init(flexi_config, Some(config.level.to_string())) {
Ok(_) => Ok(()),
Err(_) => Error::new("Logger initialization failed").result()
}
}
pub fn init_from_conf(config: &Conf, xpath: &str) -> Result<()> {
// Initialize logger
match LogConf::from_conf(&config, xpath) {
Ok(settings) => init(settings),
Err(err) => Error::new("Logger configuration failed").because(err).result()
}
}
fn colorized_format(record: &LogRecord) -> String {
use ansi_term::Colour::{Red, Green, Yellow, Purple, Cyan};
match record.level() {
LogLevel::Error => format!("{} {} in {} ({}:{})", Red.bold().paint("[!]"), record.args(), record.location().module_path(), record.location().file(), record.location().line()),
LogLevel::Warn => format!("{} {} in {} ({}:{})", Yellow.paint("[W]"), record.args(), record.location().module_path(), record.location().file(), record.location().line()),
LogLevel::Info => format!("{}: {}", Green.paint("[I]"), record.args()),
LogLevel::Debug => format!("{}: {}", Purple.paint("[D]"), record.args()),
LogLevel::Trace => format!("{}: {}", Cyan.paint("[T]"), record.args()),
}
}
fn simple_format(record: &LogRecord) -> String {
match record.level() {
LogLevel::Error => format!("[!] {} in {} ({}:{})", record.args(), record.location().module_path(), record.location().file(), record.location().line()),
LogLevel::Warn => format!("[W] {} in {} ({}:{})", record.args(), record.location().module_path(), record.location().file(), record.location().line()),
LogLevel::Info => format!("[I]: {}", record.args()),
LogLevel::Debug => format!("[D]: {}", record.args()),
LogLevel::Trace => format!("[E]: {}", record.args()),
}
}<|fim▁end|>
|
pub mod conf;
use log::{LogLevel, LogRecord};
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
from datetime import datetime, timedelta
from django.conf import settings
from django.contrib.auth import get_user_model
from django.contrib.auth.decorators import login_required
from django.core.exceptions import PermissionDenied
from django.db import transaction
from django.http import HttpResponse
from django.shortcuts import render, redirect, get_object_or_404 as getObj
from django.utils import timezone
from django.utils.translation import ugettext
from openpyxl import Workbook
from openpyxl.writer.excel import save_virtual_workbook
from webframe.functions import getDateTime, getDate, FMT_DATE, FMT_DATETIME, getEndOfDay
from .models import Record
import logging
logger=logging.getLogger('sugar.views')
def _getUser(req, username=None):
if username:
if req.user.is_superuser or req.user.username==username:
return getObj(get_user_model(), username=username)
else:
return req.user
raise PermissionDenied()
def index(req):
if req.user.is_authenticated():
return redirect('dashboard', username=req.user.username)
return render(req, 'webframe/empty.html')
@login_required
def dashboard(req, username=None):
user=_getUser(req, username)
if req.method=='POST':<|fim▁hole|> with transaction.atomic():
r=Record()
r.owner=user
r.date=getDateTime(req.POST.get('date'))
r.sugar=req.POST.get('sugar', '0')
r.sugar=float(r.sugar) if r.sugar else 0
r.pulse=req.POST.get('pulse', '0')
r.pulse=int(r.pulse) if r.pulse else 0
r.sys=req.POST.get('sys', '0')
r.sys=int(r.sys) if r.sys else 0
r.dia=req.POST.get('dia', '0')
r.dia=int(r.dia) if r.dia else 0
r.save()
return redirect('reports-user', username=username) if username else redirect('reports')
return render(req, 'sugar/dashboard.html', {})
@login_required
def reports(req, username=None):
user=_getUser(req, username)
params=dict()
params['to']=getDate(req.GET.get('to', None), timezone.now())
params['to']=getEndOfDay(params['to']) #Due to the system should include the selected date instead
params['from']=getDate(req.GET.get('from', None), params['to']-timedelta(days=30))
params['target']=Record.objects.filter(owner=user, date__range=(params['from'], params['to'])).order_by('date')
return render(req, 'sugar/reports.html', params)
@login_required
def downloads(req, username=None):
user=_getUser(req, username)
params=dict()
params['to']=getDate(req.GET.get('to', None), datetime.now())
params['from']=getDate(req.GET.get('from', None), params['to']-timedelta(days=30))
params['target']=Record.objects.filter(owner=user, date__range=(params['from'], params['to'])).order_by('date')
logger.debug(params['target'])
filename=ugettext('From %(from)s to %(to)s'%params)
wb=Workbook()
ws=wb.active
ws.merge_cells('A1:G1')
ws['A1']=filename
ws['A2']=ugettext('Record.owner')
ws['B2']=user.get_full_name() if user.get_full_name() else user.username
ws['A3']=ugettext('from')
ws['B3']=params['from'].strftime(FMT_DATE)
ws['A4']=ugettext('to')
ws['B4']=params['to'].strftime(FMT_DATE)
ws.cell(row=5, column=3, value=ugettext('Record.date'))
ws.cell(row=5, column=4, value=ugettext('Record.sugar'))
ws.cell(row=5, column=5, value=ugettext('Record.pulse'))
ws.cell(row=5, column=6, value=ugettext('Record.sys'))
ws.cell(row=5, column=7, value=ugettext('Record.dia'))
row=6
for r in params['target']:
ws.cell(row=row, column=3, value=timezone.localtime(r.date).strftime(FMT_DATETIME))
ws.cell(row=row, column=4, value=r.sugar)
ws.cell(row=row, column=5, value=r.pulse)
ws.cell(row=row, column=6, value=r.sys)
ws.cell(row=row, column=7, value=r.dia)
row+=1
rst=HttpResponse(save_virtual_workbook(wb), content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
rst['Content-Disposition'] = 'attachment; filename=\"%s.xlsx\"'%filename
return rst<|fim▁end|>
|
logger.info('Saving record to user<%s>:%s...'%(user.id, user.username))
|
<|file_name|>test_groups.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright © 2013-2014 Red Hat, Inc.
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions
# of the GNU General Public License v.2, or (at your option) any later
# version. This program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY expressed or implied, including the
# implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details. You
# should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# Any Red Hat trademarks that are incorporated in the source
# code or documentation are not subject to the GNU General Public
# License and may only be used or replicated with the express permission
# of Red Hat, Inc.
#
'''
pkgdb tests for the Collection object.
'''
__requires__ = ['SQLAlchemy >= 0.7']
import pkg_resources
import json
import unittest
import sys
import os
from mock import patch
sys.path.insert(0, os.path.join(os.path.dirname(
os.path.abspath(__file__)), '..'))
import pkgdb2
import pkgdb2.lib.model as model
from tests import (Modeltests, FakeFasUser,
FakeFasGroupValid, create_package_acl,
create_package_acl2, user_set)
class PkgdbGrouptests(Modeltests):
""" PkgdbGroup tests. """
def setUp(self):
""" Set up the environnment, ran before every tests. """
super(PkgdbGrouptests, self).setUp()
pkgdb2.APP.config['TESTING'] = True
pkgdb2.SESSION = self.session
pkgdb2.api.extras.SESSION = self.session
pkgdb2.ui.SESSION = self.session
pkgdb2.ui.acls.SESSION = self.session
pkgdb2.ui.admin.SESSION = self.session
pkgdb2.ui.collections.SESSION = self.session
pkgdb2.ui.packagers.SESSION = self.session
pkgdb2.ui.packages.SESSION = self.session
self.app = pkgdb2.APP.test_client()
# Let's make sure the cache is empty for the tests
pkgdb2.CACHE.invalidate()
def set_group_acls(self):
''' Create some Group ACLs. '''
fedocal_pkg = model.Package.by_name(self.session, 'rpms', 'fedocal')
devel_collec = model.Collection.by_name(self.session, 'master')
f18_collec = model.Collection.by_name(self.session, 'f18')
pklist_fedocal_f18 = model.PackageListing.by_pkgid_collectionid(
self.session, fedocal_pkg.id, f18_collec.id)
pklist_fedocal_devel = model.PackageListing.by_pkgid_collectionid(
self.session, fedocal_pkg.id, devel_collec.id)
packager = model.PackageListingAcl(
fas_name='group::infra-sig',
packagelisting_id=pklist_fedocal_f18.id,
acl='commit',
status='Approved',
)
self.session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::infra-sig',
packagelisting_id=pklist_fedocal_devel.id,
acl='commit',
status='Approved',
)
self.session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::infra-sig',
packagelisting_id=pklist_fedocal_f18.id,
acl='watchbugzilla',
status='Approved',
)
self.session.add(packager)
packager = model.PackageListingAcl(
fas_name='group::infra-sig',
packagelisting_id=pklist_fedocal_devel.id,
acl='watchbugzilla',
status='Approved',
)
self.session.add(packager)
self.session.commit()
def test_api_bugzilla_group(self):
""" Test the api_bugzilla function. """
create_package_acl2(self.session)
self.set_group_acls()
output = self.app.get('/api/bugzilla/')
self.assertEqual(output.status_code, 200)
expected = """# Package Database VCS Acls
# Text Format
# Collection|Package|Description|Owner|Initial QA|Initial CCList
# Backslashes (\) are escaped as \u005c Pipes (|) are escaped as \u007c
Fedora|fedocal|A web-based calendar for Fedora|pingou||group::infra-sig,pingou
Fedora|geany|A fast and lightweight IDE using GTK2|group::gtk-sig||
Fedora|guake|Top down terminal for GNOME|pingou||spot"""
self.assertEqual(output.data, expected)
output = self.app.get('/api/bugzilla/?format=json')
self.assertEqual(output.status_code, 200)
expected = {
u'bugzillaAcls': {
'Fedora': {
"fedocal": {
"owner": "pingou",
"cclist": {
"groups": ["@infra-sig"],
"people": ["pingou"]
},
"qacontact": None,
"summary": "A web-based calendar for Fedora"
},
'geany': {
'owner': '@gtk-sig',
'cclist': {
'groups': [],
'people': []
},
'qacontact': None,
'summary': 'A fast and lightweight IDE using '
'GTK2'
},
'guake': {
'owner': 'pingou',
'cclist': {
'groups': [],
'people': ['spot']
},
'qacontact': None,
'summary': 'Top down terminal for GNOME'
}
}
},
'title': 'Fedora Package Database -- Bugzilla ACLs'
}
data = json.loads(output.data)
self.assertEqual(data, expected)
@patch('pkgdb2.lib.utils')
@patch('pkgdb2.packager_login_required')
def test_package_give_group(self, login_func, mock_func):
""" Test the package_give function to a group. """
login_func.return_value = None
create_package_acl(self.session)
mock_func.get_packagers.return_value = ['spot']
group = FakeFasGroupValid()
group.name = 'gtk-sig'
mock_func.get_fas_group.return_value = group
mock_func.log.return_value = ''
user = FakeFasUser()
with user_set(pkgdb2.APP, user):
output = self.app.get('/package/rpms/guake/give')
self.assertEqual(output.status_code, 200)
self.assertTrue(
'<h1>Give Point of Contact of package: guake</h1>'
in output.data)
self.assertTrue(
'<input id="csrf_token" name="csrf_token"' in output.data)
csrf_token = output.data.split(
'name="csrf_token" type="hidden" value="')[1].split('">')[0]
data = {
'branches': 'master',
'poc': 'spot',
'csrf_token': csrf_token,<|fim▁hole|> self.assertEqual(output.status_code, 200)
self.assertTrue(
'rpms/<span property="doap:name">guake</span>'
in output.data)
self.assertEqual(
output.data.count('<a href="/packager/spot/">'), 2)
user.username = 'spot'
user.groups.append('gtk-sig')
with user_set(pkgdb2.APP, user):
output = self.app.get('/package/rpms/guake/give')
self.assertEqual(output.status_code, 200)
self.assertTrue(
'<h1>Give Point of Contact of package: guake</h1>'
in output.data)
self.assertTrue(
'<input id="csrf_token" name="csrf_token"' in output.data)
csrf_token = output.data.split(
'name="csrf_token" type="hidden" value="')[1].split('">')[0]
data = {
'branches': 'master',
'poc': 'group::gtk-sig',
'csrf_token': csrf_token,
}
output = self.app.post('/package/rpms/guake/give', data=data,
follow_redirects=True)
self.assertEqual(output.status_code, 200)
self.assertEqual(
output.data.count('<a href="/packager/spot/">'), 2)
self.assertEqual(
output.data.count('<a href="/packager/group::gtk-sig/">'),
1)
output = self.app.get('/package/rpms/guake/give')
self.assertEqual(output.status_code, 200)
self.assertTrue(
'<h1>Give Point of Contact of package: guake</h1>'
in output.data)
self.assertTrue(
'<td><select id="branches" multiple name="branches">'
'</select></td>'
in output.data)
if __name__ == '__main__':
SUITE = unittest.TestLoader().loadTestsFromTestCase(PkgdbGrouptests)
unittest.TextTestRunner(verbosity=2).run(SUITE)<|fim▁end|>
|
}
output = self.app.post('/package/rpms/guake/give', data=data,
follow_redirects=True)
|
<|file_name|>generated.rs<|end_file_name|><|fim▁begin|>// =================================================================
//
// * WARNING *
//
// This file is generated!
//
// Changes made to this file will be overwritten. If changes are
// required to the generated code, the service_crategen project
// must be updated to generate the changes.
//
// =================================================================
use std::error::Error;
use std::fmt;
use std::io;
#[allow(warnings)]
use futures::future;
use futures::Future;
use rusoto_core::region;
use rusoto_core::request::{BufferedHttpResponse, DispatchSignedRequest};
use rusoto_core::{Client, RusotoFuture};
use rusoto_core::credential::{CredentialsError, ProvideAwsCredentials};
use rusoto_core::request::HttpDispatchError;
use rusoto_core::param::{Params, ServiceParams};
use rusoto_core::signature::SignedRequest;
use serde_json;
use serde_json::from_slice;
use serde_json::Value as SerdeJsonValue;
/// <p>The input for the DeleteThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct DeleteThingShadowRequest {
/// <p>The name of the thing.</p>
#[serde(rename = "thingName")]
pub thing_name: String,
}
/// <p>The output from the DeleteThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct DeleteThingShadowResponse {
/// <p>The state information, in JSON format.</p>
pub payload: Vec<u8>,
}
/// <p>The input for the GetThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct GetThingShadowRequest {
/// <p>The name of the thing.</p>
#[serde(rename = "thingName")]
pub thing_name: String,
}
/// <p>The output from the GetThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct GetThingShadowResponse {
/// <p>The state information, in JSON format.</p>
pub payload: Option<Vec<u8>>,
}
/// <p>The input for the Publish operation.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct PublishRequest {
/// <p>The state information, in JSON format.</p>
#[serde(rename = "payload")]
#[serde(
deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob",
serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob",
default
)]
#[serde(skip_serializing_if = "Option::is_none")]
pub payload: Option<Vec<u8>>,
/// <p>The Quality of Service (QoS) level.</p>
#[serde(rename = "qos")]
#[serde(skip_serializing_if = "Option::is_none")]
pub qos: Option<i64>,
/// <p>The name of the MQTT topic.</p>
#[serde(rename = "topic")]
pub topic: String,
}
/// <p>The input for the UpdateThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub struct UpdateThingShadowRequest {
/// <p>The state information, in JSON format.</p>
#[serde(rename = "payload")]
#[serde(
deserialize_with = "::rusoto_core::serialization::SerdeBlob::deserialize_blob",
serialize_with = "::rusoto_core::serialization::SerdeBlob::serialize_blob",
default
)]
pub payload: Vec<u8>,
/// <p>The name of the thing.</p>
#[serde(rename = "thingName")]<|fim▁hole|>/// <p>The output from the UpdateThingShadow operation.</p>
#[derive(Default, Debug, Clone, PartialEq)]
pub struct UpdateThingShadowResponse {
/// <p>The state information, in JSON format.</p>
pub payload: Option<Vec<u8>>,
}
/// Errors returned by DeleteThingShadow
#[derive(Debug, PartialEq)]
pub enum DeleteThingShadowError {
/// <p>An unexpected error has occurred.</p>
InternalFailure(String),
/// <p>The request is not valid.</p>
InvalidRequest(String),
/// <p>The specified combination of HTTP verb and URI is not supported.</p>
MethodNotAllowed(String),
/// <p>The specified resource does not exist.</p>
ResourceNotFound(String),
/// <p>The service is temporarily unavailable.</p>
ServiceUnavailable(String),
/// <p>The rate exceeds the limit.</p>
Throttling(String),
/// <p>You are not authorized to perform this operation.</p>
Unauthorized(String),
/// <p>The document encoding is not supported.</p>
UnsupportedDocumentEncoding(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl DeleteThingShadowError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> DeleteThingShadowError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"InternalFailureException" => {
return DeleteThingShadowError::InternalFailure(String::from(error_message))
}
"InvalidRequestException" => {
return DeleteThingShadowError::InvalidRequest(String::from(error_message))
}
"MethodNotAllowedException" => {
return DeleteThingShadowError::MethodNotAllowed(String::from(error_message))
}
"ResourceNotFoundException" => {
return DeleteThingShadowError::ResourceNotFound(String::from(error_message))
}
"ServiceUnavailableException" => {
return DeleteThingShadowError::ServiceUnavailable(String::from(error_message))
}
"ThrottlingException" => {
return DeleteThingShadowError::Throttling(String::from(error_message))
}
"UnauthorizedException" => {
return DeleteThingShadowError::Unauthorized(String::from(error_message))
}
"UnsupportedDocumentEncodingException" => {
return DeleteThingShadowError::UnsupportedDocumentEncoding(String::from(
error_message,
))
}
"ValidationException" => {
return DeleteThingShadowError::Validation(error_message.to_string())
}
_ => {}
}
}
return DeleteThingShadowError::Unknown(res);
}
}
impl From<serde_json::error::Error> for DeleteThingShadowError {
fn from(err: serde_json::error::Error) -> DeleteThingShadowError {
DeleteThingShadowError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for DeleteThingShadowError {
fn from(err: CredentialsError) -> DeleteThingShadowError {
DeleteThingShadowError::Credentials(err)
}
}
impl From<HttpDispatchError> for DeleteThingShadowError {
fn from(err: HttpDispatchError) -> DeleteThingShadowError {
DeleteThingShadowError::HttpDispatch(err)
}
}
impl From<io::Error> for DeleteThingShadowError {
fn from(err: io::Error) -> DeleteThingShadowError {
DeleteThingShadowError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for DeleteThingShadowError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for DeleteThingShadowError {
fn description(&self) -> &str {
match *self {
DeleteThingShadowError::InternalFailure(ref cause) => cause,
DeleteThingShadowError::InvalidRequest(ref cause) => cause,
DeleteThingShadowError::MethodNotAllowed(ref cause) => cause,
DeleteThingShadowError::ResourceNotFound(ref cause) => cause,
DeleteThingShadowError::ServiceUnavailable(ref cause) => cause,
DeleteThingShadowError::Throttling(ref cause) => cause,
DeleteThingShadowError::Unauthorized(ref cause) => cause,
DeleteThingShadowError::UnsupportedDocumentEncoding(ref cause) => cause,
DeleteThingShadowError::Validation(ref cause) => cause,
DeleteThingShadowError::Credentials(ref err) => err.description(),
DeleteThingShadowError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
DeleteThingShadowError::ParseError(ref cause) => cause,
DeleteThingShadowError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by GetThingShadow
#[derive(Debug, PartialEq)]
pub enum GetThingShadowError {
/// <p>An unexpected error has occurred.</p>
InternalFailure(String),
/// <p>The request is not valid.</p>
InvalidRequest(String),
/// <p>The specified combination of HTTP verb and URI is not supported.</p>
MethodNotAllowed(String),
/// <p>The specified resource does not exist.</p>
ResourceNotFound(String),
/// <p>The service is temporarily unavailable.</p>
ServiceUnavailable(String),
/// <p>The rate exceeds the limit.</p>
Throttling(String),
/// <p>You are not authorized to perform this operation.</p>
Unauthorized(String),
/// <p>The document encoding is not supported.</p>
UnsupportedDocumentEncoding(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl GetThingShadowError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> GetThingShadowError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"InternalFailureException" => {
return GetThingShadowError::InternalFailure(String::from(error_message))
}
"InvalidRequestException" => {
return GetThingShadowError::InvalidRequest(String::from(error_message))
}
"MethodNotAllowedException" => {
return GetThingShadowError::MethodNotAllowed(String::from(error_message))
}
"ResourceNotFoundException" => {
return GetThingShadowError::ResourceNotFound(String::from(error_message))
}
"ServiceUnavailableException" => {
return GetThingShadowError::ServiceUnavailable(String::from(error_message))
}
"ThrottlingException" => {
return GetThingShadowError::Throttling(String::from(error_message))
}
"UnauthorizedException" => {
return GetThingShadowError::Unauthorized(String::from(error_message))
}
"UnsupportedDocumentEncodingException" => {
return GetThingShadowError::UnsupportedDocumentEncoding(String::from(
error_message,
))
}
"ValidationException" => {
return GetThingShadowError::Validation(error_message.to_string())
}
_ => {}
}
}
return GetThingShadowError::Unknown(res);
}
}
impl From<serde_json::error::Error> for GetThingShadowError {
fn from(err: serde_json::error::Error) -> GetThingShadowError {
GetThingShadowError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for GetThingShadowError {
fn from(err: CredentialsError) -> GetThingShadowError {
GetThingShadowError::Credentials(err)
}
}
impl From<HttpDispatchError> for GetThingShadowError {
fn from(err: HttpDispatchError) -> GetThingShadowError {
GetThingShadowError::HttpDispatch(err)
}
}
impl From<io::Error> for GetThingShadowError {
fn from(err: io::Error) -> GetThingShadowError {
GetThingShadowError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for GetThingShadowError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for GetThingShadowError {
fn description(&self) -> &str {
match *self {
GetThingShadowError::InternalFailure(ref cause) => cause,
GetThingShadowError::InvalidRequest(ref cause) => cause,
GetThingShadowError::MethodNotAllowed(ref cause) => cause,
GetThingShadowError::ResourceNotFound(ref cause) => cause,
GetThingShadowError::ServiceUnavailable(ref cause) => cause,
GetThingShadowError::Throttling(ref cause) => cause,
GetThingShadowError::Unauthorized(ref cause) => cause,
GetThingShadowError::UnsupportedDocumentEncoding(ref cause) => cause,
GetThingShadowError::Validation(ref cause) => cause,
GetThingShadowError::Credentials(ref err) => err.description(),
GetThingShadowError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
GetThingShadowError::ParseError(ref cause) => cause,
GetThingShadowError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by Publish
#[derive(Debug, PartialEq)]
pub enum PublishError {
/// <p>An unexpected error has occurred.</p>
InternalFailure(String),
/// <p>The request is not valid.</p>
InvalidRequest(String),
/// <p>The specified combination of HTTP verb and URI is not supported.</p>
MethodNotAllowed(String),
/// <p>You are not authorized to perform this operation.</p>
Unauthorized(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl PublishError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> PublishError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"InternalFailureException" => {
return PublishError::InternalFailure(String::from(error_message))
}
"InvalidRequestException" => {
return PublishError::InvalidRequest(String::from(error_message))
}
"MethodNotAllowedException" => {
return PublishError::MethodNotAllowed(String::from(error_message))
}
"UnauthorizedException" => {
return PublishError::Unauthorized(String::from(error_message))
}
"ValidationException" => return PublishError::Validation(error_message.to_string()),
_ => {}
}
}
return PublishError::Unknown(res);
}
}
impl From<serde_json::error::Error> for PublishError {
fn from(err: serde_json::error::Error) -> PublishError {
PublishError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for PublishError {
fn from(err: CredentialsError) -> PublishError {
PublishError::Credentials(err)
}
}
impl From<HttpDispatchError> for PublishError {
fn from(err: HttpDispatchError) -> PublishError {
PublishError::HttpDispatch(err)
}
}
impl From<io::Error> for PublishError {
fn from(err: io::Error) -> PublishError {
PublishError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for PublishError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for PublishError {
fn description(&self) -> &str {
match *self {
PublishError::InternalFailure(ref cause) => cause,
PublishError::InvalidRequest(ref cause) => cause,
PublishError::MethodNotAllowed(ref cause) => cause,
PublishError::Unauthorized(ref cause) => cause,
PublishError::Validation(ref cause) => cause,
PublishError::Credentials(ref err) => err.description(),
PublishError::HttpDispatch(ref dispatch_error) => dispatch_error.description(),
PublishError::ParseError(ref cause) => cause,
PublishError::Unknown(_) => "unknown error",
}
}
}
/// Errors returned by UpdateThingShadow
#[derive(Debug, PartialEq)]
pub enum UpdateThingShadowError {
/// <p>The specified version does not match the version of the document.</p>
Conflict(String),
/// <p>An unexpected error has occurred.</p>
InternalFailure(String),
/// <p>The request is not valid.</p>
InvalidRequest(String),
/// <p>The specified combination of HTTP verb and URI is not supported.</p>
MethodNotAllowed(String),
/// <p>The payload exceeds the maximum size allowed.</p>
RequestEntityTooLarge(String),
/// <p>The service is temporarily unavailable.</p>
ServiceUnavailable(String),
/// <p>The rate exceeds the limit.</p>
Throttling(String),
/// <p>You are not authorized to perform this operation.</p>
Unauthorized(String),
/// <p>The document encoding is not supported.</p>
UnsupportedDocumentEncoding(String),
/// An error occurred dispatching the HTTP request
HttpDispatch(HttpDispatchError),
/// An error was encountered with AWS credentials.
Credentials(CredentialsError),
/// A validation error occurred. Details from AWS are provided.
Validation(String),
/// An error occurred parsing the response payload.
ParseError(String),
/// An unknown error occurred. The raw HTTP response is provided.
Unknown(BufferedHttpResponse),
}
impl UpdateThingShadowError {
// see boto RestJSONParser impl for parsing errors
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L838-L850
pub fn from_response(res: BufferedHttpResponse) -> UpdateThingShadowError {
if let Ok(json) = from_slice::<SerdeJsonValue>(&res.body) {
let error_type = match res.headers.get("x-amzn-errortype") {
Some(raw_error_type) => raw_error_type
.split(':')
.next()
.unwrap_or_else(|| "Unknown"),
_ => json
.get("code")
.or_else(|| json.get("Code"))
.and_then(|c| c.as_str())
.unwrap_or_else(|| "Unknown"),
};
// message can come in either "message" or "Message"
// see boto BaseJSONParser impl for parsing message
// https://github.com/boto/botocore/blob/4dff78c840403d1d17db9b3f800b20d3bd9fbf9f/botocore/parsers.py#L595-L598
let error_message = json
.get("message")
.or_else(|| json.get("Message"))
.and_then(|m| m.as_str())
.unwrap_or("");
match error_type {
"ConflictException" => {
return UpdateThingShadowError::Conflict(String::from(error_message))
}
"InternalFailureException" => {
return UpdateThingShadowError::InternalFailure(String::from(error_message))
}
"InvalidRequestException" => {
return UpdateThingShadowError::InvalidRequest(String::from(error_message))
}
"MethodNotAllowedException" => {
return UpdateThingShadowError::MethodNotAllowed(String::from(error_message))
}
"RequestEntityTooLargeException" => {
return UpdateThingShadowError::RequestEntityTooLarge(String::from(
error_message,
))
}
"ServiceUnavailableException" => {
return UpdateThingShadowError::ServiceUnavailable(String::from(error_message))
}
"ThrottlingException" => {
return UpdateThingShadowError::Throttling(String::from(error_message))
}
"UnauthorizedException" => {
return UpdateThingShadowError::Unauthorized(String::from(error_message))
}
"UnsupportedDocumentEncodingException" => {
return UpdateThingShadowError::UnsupportedDocumentEncoding(String::from(
error_message,
))
}
"ValidationException" => {
return UpdateThingShadowError::Validation(error_message.to_string())
}
_ => {}
}
}
return UpdateThingShadowError::Unknown(res);
}
}
impl From<serde_json::error::Error> for UpdateThingShadowError {
fn from(err: serde_json::error::Error) -> UpdateThingShadowError {
UpdateThingShadowError::ParseError(err.description().to_string())
}
}
impl From<CredentialsError> for UpdateThingShadowError {
fn from(err: CredentialsError) -> UpdateThingShadowError {
UpdateThingShadowError::Credentials(err)
}
}
impl From<HttpDispatchError> for UpdateThingShadowError {
fn from(err: HttpDispatchError) -> UpdateThingShadowError {
UpdateThingShadowError::HttpDispatch(err)
}
}
impl From<io::Error> for UpdateThingShadowError {
fn from(err: io::Error) -> UpdateThingShadowError {
UpdateThingShadowError::HttpDispatch(HttpDispatchError::from(err))
}
}
impl fmt::Display for UpdateThingShadowError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl Error for UpdateThingShadowError {
fn description(&self) -> &str {
match *self {
UpdateThingShadowError::Conflict(ref cause) => cause,
UpdateThingShadowError::InternalFailure(ref cause) => cause,
UpdateThingShadowError::InvalidRequest(ref cause) => cause,
UpdateThingShadowError::MethodNotAllowed(ref cause) => cause,
UpdateThingShadowError::RequestEntityTooLarge(ref cause) => cause,
UpdateThingShadowError::ServiceUnavailable(ref cause) => cause,
UpdateThingShadowError::Throttling(ref cause) => cause,
UpdateThingShadowError::Unauthorized(ref cause) => cause,
UpdateThingShadowError::UnsupportedDocumentEncoding(ref cause) => cause,
UpdateThingShadowError::Validation(ref cause) => cause,
UpdateThingShadowError::Credentials(ref err) => err.description(),
UpdateThingShadowError::HttpDispatch(ref dispatch_error) => {
dispatch_error.description()
}
UpdateThingShadowError::ParseError(ref cause) => cause,
UpdateThingShadowError::Unknown(_) => "unknown error",
}
}
}
/// Trait representing the capabilities of the AWS IoT Data Plane API. AWS IoT Data Plane clients implement this trait.
pub trait IotData {
/// <p>Deletes the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_DeleteThingShadow.html">DeleteThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn delete_thing_shadow(
&self,
input: DeleteThingShadowRequest,
) -> RusotoFuture<DeleteThingShadowResponse, DeleteThingShadowError>;
/// <p>Gets the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_GetThingShadow.html">GetThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn get_thing_shadow(
&self,
input: GetThingShadowRequest,
) -> RusotoFuture<GetThingShadowResponse, GetThingShadowError>;
/// <p>Publishes state information.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/protocols.html#http">HTTP Protocol</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn publish(&self, input: PublishRequest) -> RusotoFuture<(), PublishError>;
/// <p>Updates the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_UpdateThingShadow.html">UpdateThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn update_thing_shadow(
&self,
input: UpdateThingShadowRequest,
) -> RusotoFuture<UpdateThingShadowResponse, UpdateThingShadowError>;
}
/// A client for the AWS IoT Data Plane API.
pub struct IotDataClient {
client: Client,
region: region::Region,
}
impl IotDataClient {
/// Creates a client backed by the default tokio event loop.
///
/// The client will use the default credentials provider and tls client.
pub fn new(region: region::Region) -> IotDataClient {
IotDataClient {
client: Client::shared(),
region: region,
}
}
pub fn new_with<P, D>(
request_dispatcher: D,
credentials_provider: P,
region: region::Region,
) -> IotDataClient
where
P: ProvideAwsCredentials + Send + Sync + 'static,
P::Future: Send,
D: DispatchSignedRequest + Send + Sync + 'static,
D::Future: Send,
{
IotDataClient {
client: Client::new_with(credentials_provider, request_dispatcher),
region: region,
}
}
}
impl IotData for IotDataClient {
/// <p>Deletes the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_DeleteThingShadow.html">DeleteThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn delete_thing_shadow(
&self,
input: DeleteThingShadowRequest,
) -> RusotoFuture<DeleteThingShadowResponse, DeleteThingShadowError> {
let request_uri = format!("/things/{thing_name}/shadow", thing_name = input.thing_name);
let mut request = SignedRequest::new("DELETE", "iotdata", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.set_endpoint_prefix("data.iot".to_string());
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut result = DeleteThingShadowResponse::default();
result.payload = response.body;
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(DeleteThingShadowError::from_response(response))),
)
}
})
}
/// <p>Gets the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_GetThingShadow.html">GetThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn get_thing_shadow(
&self,
input: GetThingShadowRequest,
) -> RusotoFuture<GetThingShadowResponse, GetThingShadowError> {
let request_uri = format!("/things/{thing_name}/shadow", thing_name = input.thing_name);
let mut request = SignedRequest::new("GET", "iotdata", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.set_endpoint_prefix("data.iot".to_string());
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut result = GetThingShadowResponse::default();
result.payload = Some(response.body);
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(GetThingShadowError::from_response(response))),
)
}
})
}
/// <p>Publishes state information.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/protocols.html#http">HTTP Protocol</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn publish(&self, input: PublishRequest) -> RusotoFuture<(), PublishError> {
let request_uri = format!("/topics/{topic}", topic = input.topic);
let mut request = SignedRequest::new("POST", "iotdata", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.set_endpoint_prefix("data.iot".to_string());
let encoded = if let Some(ref payload) = input.payload {
Some(payload.to_owned())
} else {
None
};
request.set_payload(encoded);
let mut params = Params::new();
if let Some(ref x) = input.qos {
params.put("qos", x);
}
request.set_params(params);
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let result = ::std::mem::drop(response);
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(PublishError::from_response(response))),
)
}
})
}
/// <p>Updates the thing shadow for the specified thing.</p> <p>For more information, see <a href="http://docs.aws.amazon.com/iot/latest/developerguide/API_UpdateThingShadow.html">UpdateThingShadow</a> in the <i>AWS IoT Developer Guide</i>.</p>
fn update_thing_shadow(
&self,
input: UpdateThingShadowRequest,
) -> RusotoFuture<UpdateThingShadowResponse, UpdateThingShadowError> {
let request_uri = format!("/things/{thing_name}/shadow", thing_name = input.thing_name);
let mut request = SignedRequest::new("POST", "iotdata", &self.region, &request_uri);
request.set_content_type("application/x-amz-json-1.1".to_owned());
request.set_endpoint_prefix("data.iot".to_string());
let encoded = Some(input.payload.to_owned());
request.set_payload(encoded);
self.client.sign_and_dispatch(request, |response| {
if response.status.is_success() {
Box::new(response.buffer().from_err().map(|response| {
let mut result = UpdateThingShadowResponse::default();
result.payload = Some(response.body);
result
}))
} else {
Box::new(
response
.buffer()
.from_err()
.and_then(|response| Err(UpdateThingShadowError::from_response(response))),
)
}
})
}
}
#[cfg(test)]
mod protocol_tests {}<|fim▁end|>
|
pub thing_name: String,
}
|
<|file_name|>webapis-rtc-peer-connection.js<|end_file_name|><|fim▁begin|>/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? factory() :
typeof define === 'function' && define.amd ? define(factory) :
(factory());
}(this, (function () { 'use strict';
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
Zone.__load_patch('RTCPeerConnection', function (global, Zone, api) {
var RTCPeerConnection = global['RTCPeerConnection'];<|fim▁hole|> var removeSymbol = api.symbol('removeEventListener');
RTCPeerConnection.prototype.addEventListener = RTCPeerConnection.prototype[addSymbol];
RTCPeerConnection.prototype.removeEventListener = RTCPeerConnection.prototype[removeSymbol];
// RTCPeerConnection extends EventTarget, so we must clear the symbol
// to allow pathc RTCPeerConnection.prototype.addEventListener again
RTCPeerConnection.prototype[addSymbol] = null;
RTCPeerConnection.prototype[removeSymbol] = null;
api.patchEventTarget(global, [RTCPeerConnection.prototype], { useGlobalCallback: false });
});
})));<|fim▁end|>
|
if (!RTCPeerConnection) {
return;
}
var addSymbol = api.symbol('addEventListener');
|
<|file_name|>logic.rs<|end_file_name|><|fim▁begin|>//! Logic module.
#[cfg(feature = "gps")]
mod acquiring_fix;
#[cfg(not(feature = "gps"))]
mod eternal_loop;
#[cfg(feature = "gps")]
mod fix_acquired;
#[cfg(feature = "gps")]
mod going_down;
#[cfg(feature = "gps")]
mod going_up;
mod init;
#[cfg(feature = "gps")]
mod landed;
mod safe_mode;
mod shut_down;
#[cfg(feature = "gps")]
mod waiting_launch;
use std::{
fmt,
fs::{File, OpenOptions},
io::{Read, Write},
str::FromStr,
sync::Mutex,
};
use failure::{Error, ResultExt};
use lazy_static::lazy_static;
use log::error;
use crate::{config::CONFIG, error, STATE_FILE};
lazy_static! {
static ref CURRENT_STATE: Mutex<State> = Mutex::new(State::Init);
}
/// Trait representing a state machine.
pub trait StateMachine {
/// The logic to run after the current state.
type Next: MainLogic;
/// Executes this state and returns the next one.
fn execute(self) -> Result<Self::Next, Error>;
}
/// Trait to get the current state in the `State` enum for the current state in the state machine.
pub trait GetState {
/// Gets the state enumeration variant for the current state.
fn get_state(&self) -> State;
}
/// Trait implementing the main logic of the program.
#[allow(clippy::module_name_repetitions)]
pub trait MainLogic: GetState {
/// Performs the main logic of the state.
fn main_logic(self) -> Result<(), Error>;
}
impl<S> MainLogic for S
where
S: StateMachine + GetState,
{
fn main_logic(self) -> Result<(), Error> {
let new_state = self.execute()?;
{
let mut current_state = match CURRENT_STATE.lock() {
Ok(guard) => guard,
Err(poisoned) => {
error!("The CURRENT_STATE mutex was poisoned.");
poisoned.into_inner()
}
};
*current_state = new_state.get_state();
}
save_current_state()?;
new_state.main_logic()
}
}
/// Saves the current state into the state file.
fn save_current_state() -> Result<(), Error> {
let path = CONFIG.data_dir().join(STATE_FILE);
let mut file = OpenOptions::new()
.write(true)
.create(true)
.truncate(true)
.open(path)
.context(error::LastState::FileOpen)?;
{
let current_state = match CURRENT_STATE.lock() {
Ok(guard) => guard,
Err(poisoned) => {
error!("The CURRENT_STATE mutex was poisoned.");
poisoned.into_inner()
}
};
file.write_all(current_state.as_str().as_bytes())
.context(error::LastState::FileWrite)?;
}
Ok(())
}
/// Main OpenStratos state machine
#[derive(Debug, Clone, Copy)]
pub struct OpenStratos<S: GetState + fmt::Debug + Clone + Copy> {
/// State of the logic item, only for compile time checks, no actual memory layout.
state: S,
}
impl<S> GetState for OpenStratos<S>
where
S: GetState + fmt::Debug + Clone + Copy,
{
fn get_state(&self) -> State {
self.state.get_state()
}
}
/// Initializes a new state machine.
pub fn init() -> Result<OpenStratos<Init>, Error> {
save_current_state()?;
Ok(OpenStratos { state: Init })
}
/// States of the onboard computer.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum State {
/// Initialization.
Init,
/// Acquiring GPS fix.
#[cfg(feature = "gps")]
AcquiringFix,
/// GPS fix has been acquired.
#[cfg(feature = "gps")]
FixAcquired,
/// Waiting for balloon launch.
#[cfg(feature = "gps")]
WaitingLaunch,
/// Going up.
#[cfg(feature = "gps")]
GoingUp,
/// Going down.
#[cfg(feature = "gps")]
GoingDown,
/// Probe landed.
#[cfg(feature = "gps")]
Landed,
/// Shutting computer down.
ShutDown,
/// Safe mode operation.
SafeMode,
/// Eternal loop, without GPS.
#[cfg(not(feature = "gps"))]
EternalLoop,
}
impl State {
/// Gets the last state of the application if there is one.
pub fn get_last() -> Result<Option<Self>, Error> {
let path = CONFIG.data_dir().join(STATE_FILE);
if !path.exists() {
return Ok(None);
}
let mut file = File::open(path).context(error::LastState::FileOpen)?;
let mut state = String::new();
let _ = file
.read_to_string(&mut state)
.context(error::LastState::FileRead)?;
if state.is_empty() {
Ok(None)
} else {
Ok(Some(state.parse()?))
}
}
/// Gets the state as a string to be stored in the `LAST_STATE` file.
pub fn as_str(&self) -> &str {
match *self {
State::Init => "INITIALIZING",
#[cfg(feature = "gps")]
State::AcquiringFix => "ACQUIRING_FIX",
#[cfg(feature = "gps")]
State::FixAcquired => "FIX_ACQUIRED",
#[cfg(feature = "gps")]
State::WaitingLaunch => "WAITING_LAUNCH",
#[cfg(feature = "gps")]
State::GoingUp => "GOING_UP",
#[cfg(feature = "gps")]
State::GoingDown => "GOING_DOWN",
#[cfg(feature = "gps")]
State::Landed => "LANDED",
State::ShutDown => "SHUT_DOWN",
State::SafeMode => "SAFE_MODE",
#[cfg(not(feature = "gps"))]
State::EternalLoop => "ETERNAL_LOOP",
}
}
}
impl FromStr for State {
type Err = error::LastState;
fn from_str(s: &str) -> Result<Self, Self::Err> {
match s {
"INITIALIZING" => Ok(State::Init),
#[cfg(feature = "gps")]
"ACQUIRING_FIX" => Ok(State::AcquiringFix),
#[cfg(feature = "gps")]
"FIX_ACQUIRED" => Ok(State::FixAcquired),
#[cfg(feature = "gps")]
"WAITING_LAUNCH" => Ok(State::WaitingLaunch),
#[cfg(feature = "gps")]
"GOING_UP" => Ok(State::GoingUp),
#[cfg(feature = "gps")]
"GOING_DOWN" => Ok(State::GoingDown),
#[cfg(feature = "gps")]
"LANDED" => Ok(State::Landed),
"SHUT_DOWN" => Ok(State::ShutDown),
"SAFE_MODE" => Ok(State::SafeMode),
#[cfg(not(feature = "gps"))]
"ETERNAL_LOOP" => Ok(State::EternalLoop),
_ => Err(error::LastState::Invalid {
state: s.to_owned(),
}),
}
}
}
/// Initialization state.
#[derive(Debug, Clone, Copy)]
pub struct Init;
impl GetState for Init {
fn get_state(&self) -> State {
State::Init
}
}
/// Acquiring fix state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct AcquiringFix;
#[cfg(feature = "gps")]
impl GetState for AcquiringFix {
fn get_state(&self) -> State {
State::AcquiringFix
}
}
/// Fix acquired state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct FixAcquired;
#[cfg(feature = "gps")]
impl GetState for FixAcquired {
fn get_state(&self) -> State {
State::FixAcquired
}
}
/// Waiting launch state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct WaitingLaunch;
#[cfg(feature = "gps")]
impl GetState for WaitingLaunch {
fn get_state(&self) -> State {
State::WaitingLaunch
}
}
/// Going up state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct GoingUp;
#[cfg(feature = "gps")]
impl GetState for GoingUp {
fn get_state(&self) -> State {
State::GoingUp
}
}
/// Going down state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct GoingDown;
#[cfg(feature = "gps")]
impl GetState for GoingDown {
fn get_state(&self) -> State {
State::GoingDown
}
}
/// Landed state.
#[cfg(feature = "gps")]
#[derive(Debug, Clone, Copy)]
pub struct Landed;
#[cfg(feature = "gps")]
impl GetState for Landed {
fn get_state(&self) -> State {
State::Landed
}
}
/// Shut down state.
#[derive(Debug, Clone, Copy)]
pub struct ShutDown;
impl GetState for ShutDown {
fn get_state(&self) -> State {
State::ShutDown
}
}
/// Safe mode state.
#[derive(Debug, Clone, Copy)]
pub struct SafeMode;
impl GetState for SafeMode {
fn get_state(&self) -> State {
State::SafeMode
}
}
/// Eternal loop state, if no GPS is enabled.
#[cfg(not(feature = "gps"))]
#[derive(Debug, Clone, Copy)]
pub struct EternalLoop;
#[cfg(not(feature = "gps"))]
impl GetState for EternalLoop {
fn get_state(&self) -> State {
State::EternalLoop
}
}
#[cfg(test)]
mod tests {
#[cfg(not(feature = "gps"))]
use super::EternalLoop;
#[cfg(feature = "gps")]
use super::{AcquiringFix, FixAcquired, GoingDown, GoingUp, Landed, WaitingLaunch};
use super::{GetState, Init, SafeMode, ShutDown, State};
/// Tests if the `Init` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
fn it_get_state_init() {
let state = Init;
assert_eq!(state.get_state(), State::Init);
}
/// Tests if the `State::Init` is parsed correctly from a string.
#[test]
fn it_from_str_init() {
assert_eq!("INITIALIZING".parse::<State>().unwrap(), State::Init);
}
/// Tests that the `State::Init` is translated to *INITIALIZING* as a string.
#[test]
fn it_as_str_init() {
assert_eq!("INITIALIZING", State::Init.as_str());
}
/// Tests if the `AcquiringFix` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_acquiring_fix() {
let state = AcquiringFix;
assert_eq!(state.get_state(), State::AcquiringFix);
}
/// Tests if the `State::AcquiringFix` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_acquiring_fix() {
assert_eq!(
"ACQUIRING_FIX".parse::<State>().unwrap(),
State::AcquiringFix
);
}
/// Tests that the `State::AcquiringFix` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_acquiring_fix() {
let _ = "ACQUIRING_FIX".parse::<State>().unwrap();
}
/// Tests that the `State::AcquiringFix` is translated to `ACQUIRING_FIX` as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_acquiring_fix() {
assert_eq!("ACQUIRING_FIX", State::AcquiringFix.as_str());
}
/// Tests if the `FixAcquired` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_fix_acquired() {
let state = FixAcquired;
assert_eq!(state.get_state(), State::FixAcquired);
}
/// Tests if the `State::FixAcquired` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_fix_acquired() {
assert_eq!("FIX_ACQUIRED".parse::<State>().unwrap(), State::FixAcquired);
}
/// Tests that the `State::FixAcquired` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_fix_acquired() {
let _ = "FIX_ACQUIRED".parse::<State>().unwrap();
}
/// Tests that the `State::FixAcquired` is translated to `FIX_ACQUIRED` as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_fix_acquired() {
assert_eq!("FIX_ACQUIRED", State::FixAcquired.as_str());
}
/// Tests if the `WaitingLaunch` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_waiting_launch() {
let state = WaitingLaunch;
assert_eq!(state.get_state(), State::WaitingLaunch);
}
/// Tests if the `State::WaitingLaunch` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_waiting_launch() {
assert_eq!(
"WAITING_LAUNCH".parse::<State>().unwrap(),
State::WaitingLaunch
);
}
/// Tests that the `State::WaitingLaunch` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_waiting_launch() {
let _ = "WAITING_LAUNCH".parse::<State>().unwrap();
}
/// Tests that the `State::WaitingLaunch` is translated to `WAITING_LAUNCH` as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_waiting_launch() {
assert_eq!("WAITING_LAUNCH", State::WaitingLaunch.as_str());
}
/// Tests if the `GoingUp` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_going_up() {
let state = GoingUp;
assert_eq!(state.get_state(), State::GoingUp);
}
/// Tests if the `State::GoingUp` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_going_up() {
assert_eq!("GOING_UP".parse::<State>().unwrap(), State::GoingUp);
}
/// Tests that the `State::GoingUp` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_going_up() {
let _ = "GOING_UP".parse::<State>().unwrap();
}
/// Tests that the `State::GoingUp` is translated to `GOING_UP` as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_going_up() {
assert_eq!("GOING_UP", State::GoingUp.as_str());
}
/// Tests if the `GoingDown` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_going_down() {
let state = GoingDown;
assert_eq!(state.get_state(), State::GoingDown);
}
/// Tests if the `State::GoingDown` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_going_down() {
assert_eq!("GOING_DOWN".parse::<State>().unwrap(), State::GoingDown);
}
/// Tests that the `State::GoingDown` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_going_down() {
let _ = "GOING_DOWN".parse::<State>().unwrap();
}
/// Tests that the `State::GoingDown` is translated to `GOING_DOWN` as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_going_down() {
assert_eq!("GOING_DOWN", State::GoingDown.as_str());
}
/// Tests if the `Landed` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(feature = "gps")]
fn it_get_state_landed() {
let state = Landed;
assert_eq!(state.get_state(), State::Landed);
}
/// Tests if the `State::Landed` is parsed correctly from a string.
#[test]
#[cfg(feature = "gps")]
fn it_from_str_landed() {
assert_eq!("LANDED".parse::<State>().unwrap(), State::Landed);
}
/// Tests that the `State::Landed` is not parsed properly if the GPS feature is off.
#[test]
#[should_panic]
#[cfg(not(feature = "gps"))]
fn it_from_str_landed() {
let _ = "LANDED".parse::<State>().unwrap();
}
/// Tests that the `State::Landed` is translated to *LANDED* as a string.
#[test]
#[cfg(feature = "gps")]
fn it_as_str_landed() {
assert_eq!("LANDED", State::Landed.as_str());
}
/// Tests if the `ShutDown` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
fn it_get_state_shut_down() {
let state = ShutDown;
assert_eq!(state.get_state(), State::ShutDown);
}
/// Tests if the `State::ShutDown` is parsed correctly from a string.
#[test]
fn it_from_str_shut_down() {
assert_eq!("SHUT_DOWN".parse::<State>().unwrap(), State::ShutDown);
}
/// Tests that the `State::ShutDown` is translated to `SHUT_DOWN` as a string.
#[test]
fn it_as_str_shut_down() {
assert_eq!("SHUT_DOWN", State::ShutDown.as_str());
}
/// Tests if the `SafeMode` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
fn it_get_state_safe_mode() {
let state = SafeMode;
assert_eq!(state.get_state(), State::SafeMode);
}
/// Tests if the `State::SafeMode` is parsed correctly from a string.
#[test]
fn it_from_str_safe_mode() {
assert_eq!("SAFE_MODE".parse::<State>().unwrap(), State::SafeMode);
}
/// Tests that the `State::SafeMode` is translated to `SAFE_MODE` as a string.
#[test]
fn it_as_str_safe_mode() {
assert_eq!("SAFE_MODE", State::SafeMode.as_str());
}
/// Tests if the `EternalLoop` state generates the correct `State` enumeration variant in
/// `get_state()`.
#[test]
#[cfg(not(feature = "gps"))]
fn it_get_state_eternal_loop() {
let state = EternalLoop;
assert_eq!(state.get_state(), State::EternalLoop);
}
/// Tests if the `State::EternalLoop` is parsed correctly from a string.
#[test]
#[cfg(not(feature = "gps"))]
fn it_from_str_eternal_loop() {
assert_eq!("ETERNAL_LOOP".parse::<State>().unwrap(), State::EternalLoop);
}
/// Tests that the `State::EternalLoop` is not parsed properly if the GPS feature is on.
#[test]
#[should_panic]
#[cfg(feature = "gps")]
fn it_from_str_eternal_loop() {
let _ = "ETERNAL_LOOP".parse::<State>().unwrap();
}
/// Tests that the `State::EternalLoop` is translated to `ETERNAL_LOOP` as a string.
#[test]<|fim▁hole|>}<|fim▁end|>
|
#[cfg(not(feature = "gps"))]
fn it_as_str_eternal_loop() {
assert_eq!("ETERNAL_LOOP", State::EternalLoop.as_str());
}
|
<|file_name|>to_hilbert.rs<|end_file_name|><|fim▁begin|>extern crate byteorder;
extern crate COST;
use std::fs::File;
use std::io::BufWriter;
use byteorder::{WriteBytesExt, LittleEndian};
use COST::graph_iterator::NodesEdgesMemMapper;
fn main() {
if std::env::args().len() != 2 && std::env::args().len() != 3 {
println!("Usage: to_hilbert <prefix> [--dense]");
println!("NOTE: <prefix>.upper and <prefix>.lower will be overwritten.");
return;
}
let prefix = std::env::args().nth(1).expect("name unavailable");<|fim▁hole|> let dense = std::env::args().nth(2) == Some("--dense".to_string());
let graph = NodesEdgesMemMapper::new(&prefix);
let mut u_writer = BufWriter::new(File::create(format!("{}.upper", prefix)).unwrap());
let mut l_writer = BufWriter::new(File::create(format!("{}.lower", prefix)).unwrap());
COST::hilbert_curve::convert_to_hilbert(&graph, dense, |ux, uy, c, ls| {
u_writer.write_u16::<LittleEndian>(ux).unwrap();
u_writer.write_u16::<LittleEndian>(uy).unwrap();
u_writer.write_u32::<LittleEndian>(c).unwrap();
for &(lx, ly) in ls.iter(){
l_writer.write_u16::<LittleEndian>(lx).unwrap();
l_writer.write_u16::<LittleEndian>(ly).unwrap();
}
});
}<|fim▁end|>
| |
<|file_name|>example_ros_spinnaker_interface.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
@author Stephan Reith
@date 31.08.2016
This is a simple example to demonstrate how the ROS Spinnaker Interface can be used.
You will also need a ROS Listener and a ROS Talker to send and receive data.
Make sure they communicate over the same ROS topics and std_msgs.Int64 ROS Messages used in here.
"""
import spynnaker.pyNN as pynn
from ros_spinnaker_interface import ROS_Spinnaker_Interface
# import transfer_functions as tf
from ros_spinnaker_interface import SpikeSourcePoisson
from ros_spinnaker_interface import SpikeSinkSmoothing
ts = 0.1
n_neurons = 1
simulation_time = 10000 # ms
pynn.setup(timestep=ts, min_delay=ts, max_delay=2.0*ts)
pop = pynn.Population(size=n_neurons, cellclass=pynn.IF_curr_exp, cellparams={}, label='pop')
# The ROS_Spinnaker_Interface just needs to be initialised. The following parameters are possible:
ros_interface = ROS_Spinnaker_Interface(
n_neurons_source=n_neurons, # number of neurons of the injector population
Spike_Source_Class=SpikeSourcePoisson, # the transfer function ROS Input -> Spikes you want to use.
Spike_Sink_Class=SpikeSinkSmoothing, # the transfer function Spikes -> ROS Output you want to use.
# You can choose from the transfer_functions module
# or write one yourself.
output_population=pop, # the pynn population you wish to receive the
# live spikes from.
ros_topic_send='to_spinnaker', # the ROS topic used for the incoming ROS values.
ros_topic_recv='from_spinnaker', # the ROS topic used for the outgoing ROS values.
clk_rate=1000, # mainloop clock (update) rate in Hz.
ros_output_rate=10) # number of ROS messages send out per second.
# Build your network, run the simulation and optionally record the spikes and voltages.<|fim▁hole|>pop.record()
pop.record_v()
pynn.run(simulation_time)
spikes = pop.getSpikes()
pynn.end()
# Plot
import pylab
spike_times = [spike[1] for spike in spikes]
spike_ids = [spike[0] for spike in spikes]
pylab.plot(spike_times, spike_ids, ".")
pylab.xlabel('Time (ms)')
pylab.ylabel('Neuron ID')
pylab.title('Spike Plot')
pylab.xlim(xmin=0)
pylab.show()<|fim▁end|>
|
pynn.Projection(ros_interface, pop, pynn.OneToOneConnector(weights=5, delays=1))
|
<|file_name|>commission_site_urlbuilder.go<|end_file_name|><|fim▁begin|>// Code generated by go-swagger; DO NOT EDIT.
package operations
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the generate command
import (
"errors"
"net/url"
golangswaggerpaths "path"
"strings"
)
// CommissionSiteURL generates an URL for the commission site operation
type CommissionSiteURL struct {
ID string
_basePath string
// avoid unkeyed usage
_ struct{}<|fim▁hole|>// WithBasePath sets the base path for this url builder, only required when it's different from the
// base path specified in the swagger spec.
// When the value of the base path is an empty string
func (o *CommissionSiteURL) WithBasePath(bp string) *CommissionSiteURL {
o.SetBasePath(bp)
return o
}
// SetBasePath sets the base path for this url builder, only required when it's different from the
// base path specified in the swagger spec.
// When the value of the base path is an empty string
func (o *CommissionSiteURL) SetBasePath(bp string) {
o._basePath = bp
}
// Build a url path and query string
func (o *CommissionSiteURL) Build() (*url.URL, error) {
var result url.URL
var _path = "/sites/{id}"
id := o.ID
if id != "" {
_path = strings.Replace(_path, "{id}", id, -1)
} else {
return nil, errors.New("ID is required on CommissionSiteURL")
}
_basePath := o._basePath
if _basePath == "" {
_basePath = "/"
}
result.Path = golangswaggerpaths.Join(_basePath, _path)
return &result, nil
}
// Must is a helper function to panic when the url builder returns an error
func (o *CommissionSiteURL) Must(u *url.URL, err error) *url.URL {
if err != nil {
panic(err)
}
if u == nil {
panic("url can't be nil")
}
return u
}
// String returns the string representation of the path with query string
func (o *CommissionSiteURL) String() string {
return o.Must(o.Build()).String()
}
// BuildFull builds a full url with scheme, host, path and query string
func (o *CommissionSiteURL) BuildFull(scheme, host string) (*url.URL, error) {
if scheme == "" {
return nil, errors.New("scheme is required for a full url on CommissionSiteURL")
}
if host == "" {
return nil, errors.New("host is required for a full url on CommissionSiteURL")
}
base, err := o.Build()
if err != nil {
return nil, err
}
base.Scheme = scheme
base.Host = host
return base, nil
}
// StringFull returns the string representation of a complete url
func (o *CommissionSiteURL) StringFull(scheme, host string) string {
return o.Must(o.BuildFull(scheme, host)).String()
}<|fim▁end|>
|
}
|
<|file_name|>qa_vco.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
#
# Copyright 2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, gr_unittest, blocks
import math
def sig_source_f(samp_rate, freq, amp, N):
t = [float(x) / samp_rate for x in range(N)]
y = [amp*math.cos(2.*math.pi*freq*x) for x in t]
return y
def sig_source_c(samp_rate, freq, amp, N):
t = [float(x) / samp_rate for x in range(N)]
y = [math.cos(2.*math.pi*freq*x) + \
1j*math.sin(2.*math.pi*freq*x) for x in t]
return y
class test_vco(gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_001(self):
src_data = 200*[0,] + 200*[0.5,] + 200*[1,]
expected_result = 200*[1,] + \
sig_source_f(1, 0.125, 1, 200) + \
sig_source_f(1, 0.25, 1, 200)
src = blocks.vector_source_f(src_data)
op = blocks.vco_f(1, math.pi / 2.0, 1)
dst = blocks.vector_sink_f()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertFloatTuplesAlmostEqual(expected_result, result_data, 5)<|fim▁hole|>
def test_002(self):
src_data = 200*[0,] + 200*[0.5,] + 200*[1,]
expected_result = 200*[1,] + \
sig_source_c(1, 0.125, 1, 200) + \
sig_source_c(1, 0.25, 1, 200)
src = blocks.vector_source_f(src_data)
op = blocks.vco_c(1, math.pi / 2.0, 1)
dst = blocks.vector_sink_c()
self.tb.connect(src, op, dst)
self.tb.run()
result_data = dst.data()
self.assertComplexTuplesAlmostEqual(expected_result, result_data, 5)
if __name__ == '__main__':
gr_unittest.run(test_vco, "test_vco.xml")<|fim▁end|>
| |
<|file_name|>rest.py<|end_file_name|><|fim▁begin|>import urllib
import urllib2
import json
import functools
def buildUrl(url, params = []):
if(len(params) > 0):
if url.find('?') < 0:
# no '?' in the url
url += '?'
first = True
else:
first = False
for key, value in params:
if(first):
first = False
else:
url += '&'
url += urllib.quote(key) + '=' + urllib.quote(str(value))
return url
class UrlOpenFactory(object):
@property
def httpParams(self):
# we have to send anyting... so why not json?
return {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
def createRequest(self, url, data = None):
return urllib2.Request(url, data, self.httpParams)
def urlopen(self, url, data = None):
return urllib2.urlopen(self.createRequest(url, data)).read()
class JsonUrlOpenFactory(UrlOpenFactory):
@property
def httpParams(self):
return {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
<|fim▁hole|> @functools.wraps(f)
def wrapper(*args, **kwargs):
try:
return f(*args, **kwargs)
except urllib2.HTTPError as e:
with open('httpError', 'w') as out:
out.write('\n'.join(e.read().split('\\n')))
raise e
return wrapper<|fim▁end|>
|
def urlopen(self, url, data = None):
return json.loads(super(JsonUrlOpenFactory, self).urlopen(url, json.dumps(data) if not data is None else None))
def dumpHttpError(f):
|
<|file_name|>turbo.rs<|end_file_name|><|fim▁begin|>fn main() {
a::<B<>><|fim▁hole|>}<|fim▁end|>
| |
<|file_name|>zoom_out_tool.ts<|end_file_name|><|fim▁begin|><|fim▁hole|>import {ZoomBaseTool, ZoomBaseToolView} from "./zoom_base_tool"
import {tool_icon_zoom_out} from "styles/icons.css"
import * as p from "core/properties"
export class ZoomOutToolView extends ZoomBaseToolView {
override model: ZoomBaseTool
}
export namespace ZoomOutTool {
export type Attrs = p.AttrsOf<Props>
export type Props = ZoomBaseTool.Props & {
maintain_focus: p.Property<boolean>
}
}
export interface ZoomOutTool extends ZoomBaseTool.Attrs {}
export class ZoomOutTool extends ZoomBaseTool {
override properties: ZoomOutTool.Props
override __view_type__: ZoomBaseToolView
constructor(attrs?: Partial<ZoomBaseTool.Attrs>) {
super(attrs)
}
static {
this.prototype.default_view = ZoomOutToolView
this.define<ZoomOutTool.Props>(({Boolean}) => ({
maintain_focus: [ Boolean, true ],
}))
this.register_alias("zoom_out", () => new ZoomOutTool({dimensions: "both"}))
this.register_alias("xzoom_out", () => new ZoomOutTool({dimensions: "width"}))
this.register_alias("yzoom_out", () => new ZoomOutTool({dimensions: "height"}))
}
override sign = -1 as -1
override tool_name = "Zoom Out"
override tool_icon = tool_icon_zoom_out
}<|fim▁end|>
| |
<|file_name|>yusat_deframer.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2021-2022 Daniel Estevez <[email protected]>
#
# This file is part of gr-satellites
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
from gnuradio import gr, digital
import pmt
from ...hier.sync_to_pdu_packed import sync_to_pdu_packed
from ...hdlc_deframer import hdlc_crc_check
# HDLC 0x7e flag
_syncword = '01111110'
class crop_and_check_crc(gr.basic_block):
"""
Helper block to crop using the final 0x7e flag and check CRC-16
"""
def __init__(self):
gr.basic_block.__init__(
self,
name='crop_and_check_crc',
in_sig=[],
out_sig=[])
self.crc_check = hdlc_crc_check()
self.message_port_register_in(pmt.intern('in'))
self.set_msg_handler(pmt.intern('in'), self.handle_msg)
self.message_port_register_out(pmt.intern('out'))
def handle_msg(self, msg_pmt):<|fim▁hole|> msg = pmt.cdr(msg_pmt)
if not pmt.is_u8vector(msg):
print('[ERROR] Received invalid message type. Expected u8vector')
return
packet = pmt.u8vector_elements(msg)
start = 0
while True:
try:
idx = packet[start:].index(0x7e)
except ValueError:
return
start += idx + 1
p = packet[:idx]
if self.crc_check.fcs_ok(p):
p = p[:-2]
self.message_port_pub(
pmt.intern('out'),
pmt.cons(pmt.PMT_NIL, pmt.init_u8vector(len(p), p)))
return
class yusat_deframer(gr.hier_block2):
"""
Hierarchical block to deframe YUSAT ad-hoc AX.25-like protocol
The input is a float stream of soft symbols. The output are PDUs
with YUSAT frames.
Args:
options: Options from argparse
"""
def __init__(self, options=None):
gr.hier_block2.__init__(
self,
'yusat_deframer',
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(0, 0, 0))
self.message_port_register_hier_out('out')
self.slicer = digital.binary_slicer_fb()
# We hope that 256 bytes is long enough to contain the full packet
self.deframer = sync_to_pdu_packed(
packlen=256, sync=_syncword, threshold=0)
self.crop = crop_and_check_crc()
self.connect(self, self.slicer, self.deframer)
self.msg_connect((self.deframer, 'out'), (self.crop, 'in'))
self.msg_connect((self.crop, 'out'), (self, 'out'))<|fim▁end|>
| |
<|file_name|>flags.go<|end_file_name|><|fim▁begin|>/*
Copyright 2017 Tamás Gulácsi
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package connect
import (
"bytes"
"database/sql"
"flag"
"fmt"
"log"
"os"
"strings"
"sync"
"gopkg.in/rana/ora.v4"
"github.com/pkg/errors"
"github.com/tgulacsi/go/dber"
"github.com/tgulacsi/go/orahlp"
)
var (
fDsn = flag.String("db.dsn", "", "Oracle DSN (user/passw@sid)")
fUsername = flag.String("db.username", "", "username to connect as (if you don't provide the dsn")
fPassword = flag.String("db.password", "", "password to connect with (if you don't provide the dsn")
//fHost = flag.String("db.host", "", "Oracle DB's host (if you don't provide the dsn")
//fPort = flag.Int("db.port", 1521, "Oracle DB's port (if you don't provide the dsn) - defaults to 1521")
fSid = flag.String("db.sid", "", "Oracle DB's SID (if you don't provide the dsn)")
//fServiceName = flag.String("db.service", "", "Oracle DB's ServiceName (if you don't provide the dsn and the sid)")
)
// GetDSN returns a (command-line defined) connection string
func GetCfg(dsn string) (srvCfg ora.SrvCfg, sesCfg ora.SesCfg) {
if dsn != "" {
sesCfg.Username, sesCfg.Password, srvCfg.Dblink = orahlp.SplitDSN(*fDsn)
return srvCfg, sesCfg
}
if !flag.Parsed() {
flag.Parse()
if *fDsn == "" {
*fDsn = os.Getenv("DSN")
}
}
if *fDsn != "" {
sesCfg.Username, sesCfg.Password, srvCfg.Dblink = orahlp.SplitDSN(*fDsn)
}
if sesCfg.Username == "" {
sesCfg.Username = *fUsername
if sesCfg.Password == "" {
sesCfg.Password = *fPassword
}
}
if srvCfg.Dblink == "" {
if *fSid != "" {
srvCfg.Dblink = *fSid
} else {
if srvCfg.Dblink = os.Getenv("ORACLE_SID"); srvCfg.Dblink == "" {
srvCfg.Dblink = os.Getenv("TWO_TASK")
}
}
}
return srvCfg, sesCfg
}
func GetDSN(srvCfg ora.SrvCfg, sesCfg ora.SesCfg) string {
if srvCfg.Dblink == "" && sesCfg.Username == "" {
srvCfg, sesCfg = GetCfg("")
}
return sesCfg.Username + "/" + sesCfg.Password + "@" + srvCfg.Dblink
}
// GetConnection returns a connection - using GetDSN if dsn is empty
func GetConnection(dsn string) (*sql.DB, error) {
if dsn == "" {
dsn = GetDSN(GetCfg(""))
}
log.Printf("GetConnection dsn=%v", dsn)
conn, err := sql.Open("ora", dsn)
if err != nil {
return nil, errors.Wrap(err, "dsn="+dsn)
}
return conn, nil
}
var (
oraEnv *ora.Env
oraCxMu sync.Mutex
)
// GetRawConnection returns a raw (*ora.Ses) connection
// - using GetDSN if dsn is empty
func GetRawConnection(dsn string) (*ora.Ses, error) {
oraCxMu.Lock()
defer oraCxMu.Unlock()
if oraEnv == nil {
var err error
if oraEnv, err = ora.OpenEnv(); err != nil {
return nil, errors.Wrap(err, "OpenEnv")
}
}
srvCfg, sesCfg := GetCfg(dsn)
srv, err := oraEnv.OpenSrv(srvCfg)
if err != nil {
return nil, errors.Wrapf(err, "OpenSrv(%#v)", srvCfg)
}
ses, err := srv.OpenSes(sesCfg)
if err != nil {
srv.Close()
return nil, errors.Wrapf(err, "OpenSes(%#v)", sesCfg)
}
return ses, nil
}
// SplitDSN splits the username/password@sid string to its parts.
//
// Copied from github.com/tgulacsi/go/orahlp/orahlp.go
func SplitDSN(dsn string) (username, password, sid string) {
if strings.HasPrefix(dsn, "/@") {
return "", "", dsn[2:]
}
if i := strings.LastIndex(dsn, "@"); i >= 0 {
sid, dsn = dsn[i+1:], dsn[:i]
}
if i := strings.IndexByte(dsn, '/'); i >= 0 {
username, password = dsn[:i], dsn[i+1:]
}
return
}
type Column struct {
Schema, Name string
Type, Length, Precision, Scale int
Nullable bool
CharsetID, CharsetForm int
}
// DescribeQuery describes the columns in the qry string,
// using DBMS_SQL.PARSE + DBMS_SQL.DESCRIBE_COLUMNS2.
//
// This can help using unknown-at-compile-time, a.k.a.
// dynamic queries.
func DescribeQuery(db dber.Execer, qry string) ([]Column, error) {
//res := strings.Repeat("\x00", 32767)
res := make([]byte, 32767)
if _, err := db.Exec(`DECLARE
c INTEGER;
col_cnt INTEGER;
rec_tab DBMS_SQL.DESC_TAB;
a DBMS_SQL.DESC_REC;
v_idx PLS_INTEGER;
res VARCHAR2(32767);
BEGIN
c := DBMS_SQL.OPEN_CURSOR;
BEGIN
DBMS_SQL.PARSE(c, :1, DBMS_SQL.NATIVE);
DBMS_SQL.DESCRIBE_COLUMNS(c, col_cnt, rec_tab);
v_idx := rec_tab.FIRST;
WHILE v_idx IS NOT NULL LOOP
a := rec_tab(v_idx);
res := res||a.col_schema_name||' '||a.col_name||' '||a.col_type||' '||
a.col_max_len||' '||a.col_precision||' '||a.col_scale||' '||
(CASE WHEN a.col_null_ok THEN 1 ELSE 0 END)||' '||
a.col_charsetid||' '||a.col_charsetform||
CHR(10);
v_idx := rec_tab.NEXT(v_idx);
END LOOP;
EXCEPTION WHEN OTHERS THEN NULL;
DBMS_SQL.CLOSE_CURSOR(c);
RAISE;
END;
:2 := UTL_RAW.CAST_TO_RAW(res);
END;`, qry, &res,
); err != nil {
return nil, err
}
if i := bytes.IndexByte(res, 0); i >= 0 {
res = res[:i]
}
lines := bytes.Split(res, []byte{'\n'})
cols := make([]Column, 0, len(lines))
var nullable int
for _, line := range lines {
if len(line) == 0 {
continue
}
var col Column
switch j := bytes.IndexByte(line, ' '); j {
case -1:
continue
case 0:
line = line[1:]
default:
col.Schema, line = string(line[:j]), line[j+1:]
}
if n, err := fmt.Sscanf(string(line), "%s %d %d %d %d %d %d %d",
&col.Name, &col.Type, &col.Length, &col.Precision, &col.Scale, &nullable, &col.CharsetID, &col.CharsetForm,
); err != nil {
return cols, errors.Wrapf(err, "parsing %q (parsed: %d)", line, n)
}
col.Nullable = nullable != 0
cols = append(cols, col)
}
return cols, nil
}
type Version struct {
// major.maintenance.application-server.component-specific.platform-specific
Major, Maintenance, AppServer, Component, Platform int8
}
func GetVersion(db dber.Queryer) (Version, error) {
var s sql.NullString
if err := db.QueryRow("SELECT MIN(VERSION) FROM product_component_version " +
" WHERE product LIKE 'Oracle Database%'").Scan(&s); err != nil {
return Version{Major: -1}, err
}
var v Version
if _, err := fmt.Sscanf(s.String, "%d.%d.%d.%d.%d",
&v.Major, &v.Maintenance, &v.AppServer, &v.Component, &v.Platform); err != nil {
return v, errors.Wrapf(err, "scan version number %q", s.String)
}
return v, nil
}
// MapToSlice modifies query for map (:paramname) to :%d placeholders + slice of params.
//
// Calls metParam for each parameter met, and returns the slice of their results.
func MapToSlice(qry string, metParam func(string) interface{}) (string, []interface{}) {
if metParam == nil {
metParam = func(string) interface{} { return nil }
}
arr := make([]interface{}, 0, 16)
var buf bytes.Buffer
state, p, last := 0, 0, 0
for i, r := range qry {
switch {
case state == 0 && r == ':':
state++
p = i
// An identifier consists of a letter optionally followed by more letters, numerals, dollar signs, underscores, and number signs.
// http://docs.oracle.com/cd/B19306_01/appdev.102/b14261/fundamentals.htm#sthref309
case state == 1 &&
!('A' <= r && r <= 'Z' || 'a' <= r && r <= 'z' ||
(i-p > 1 && ('0' <= r && r <= '9' || r == '$' || r == '_' || r == '#'))):
state = 0
if i-p <= 1 { // :=
continue
}
arr = append(arr, metParam(qry[p+1:i]))
param := fmt.Sprintf(":%d", len(arr))
buf.WriteString(qry[last:p])
buf.WriteString(param)
last = i
}
}
if last < len(qry)-1 {
buf.WriteString(qry[last:])
}
return buf.String(), arr
}<|fim▁end|>
|
Unless required by applicable law or agreed to in writing, software
|
<|file_name|>models.go<|end_file_name|><|fim▁begin|>package main
import (
"database/sql"
"fmt"
)
//go:generate reform
type yesNo bool
func (yn *yesNo) Scan(src interface{}) error {
var str string
switch s := src.(type) {
case string:
str = s
case []byte:
str = string(s)
default:
return fmt.Errorf("unexpected type %T (%#v)", src, src)
}
switch str {
case "YES":
*yn = true
case "NO":
*yn = false
default:
return fmt.Errorf("unexpected %q", str)
}
return nil
}
// check interface<|fim▁hole|>type table struct {
TableCatalog string `reform:"table_catalog"`
TableSchema string `reform:"table_schema"`
TableName string `reform:"table_name"`
TableType string `reform:"table_type"`
}
//reform:information_schema.columns
type column struct {
TableCatalog string `reform:"table_catalog"`
TableSchema string `reform:"table_schema"`
TableName string `reform:"table_name"`
Name string `reform:"column_name"`
IsNullable yesNo `reform:"is_nullable"`
Type string `reform:"data_type"`
}
//reform:information_schema.key_column_usage
type keyColumnUsage struct {
ColumnName string `reform:"column_name"`
OrdinalPosition int `reform:"ordinal_position"`
}
//reform:sqlite_master
type sqliteMaster struct {
Name string `reform:"name"`
}
// TODO This "dummy" table name is ugly. We should do better.
// See https://github.com/go-reform/reform/issues/107.
//reform:dummy
type sqliteTableInfo struct {
CID int `reform:"cid"`
Name string `reform:"name"`
Type string `reform:"type"`
NotNull bool `reform:"notnull"`
DefaultValue *string `reform:"dflt_value"`
PK bool `reform:"pk"`
}<|fim▁end|>
|
var _ sql.Scanner = (*yesNo)(nil)
//reform:information_schema.tables
|
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require('gulp');
var setup = require('web3-common-build-setup');
var DEPS_FOLDER = setup.depsFolder;
// Build tools
var _ = require(DEPS_FOLDER + 'lodash');
var insert = require(DEPS_FOLDER + 'gulp-insert');
var del = require(DEPS_FOLDER + 'del');
var plugins = {};
plugins.sass = require(DEPS_FOLDER + 'gulp-sass');
plugins.tsc = require(DEPS_FOLDER + 'gulp-tsc');
plugins.ngHtml2js = require(DEPS_FOLDER + 'gulp-ng-html2js');
plugins.concat = require(DEPS_FOLDER + 'gulp-concat');
// Customize build configuration
var CONFIG = setup.buildConfig;
CONFIG.FOLDER.APP = _.constant("./src/app/web3-demo/");
CONFIG.PARTIALS.MAIN = function() {
return [
"./src/app/web3-demo/view/content.html"
];
};
var tmpLibs = CONFIG.SRC.JS.LIBS();
tmpLibs.push('./bower_components/angular-mocks/angular-mocks.js');
tmpLibs.push('./bower_components/jquery/dist/jquery.js');
tmpLibs.push('./bower_components/bootstrap/dist/js/bootstrap.min.js');
CONFIG.SRC.JS.LIBS = function() { return tmpLibs; };
CONFIG.DEV.NG_MODULE_DEPS = function() { return ['httpBackendMock']; };
var deployDir = "./dist";
// Initialize gulp
var gulpInstance = setup.initGulp(gulp, CONFIG);
<|fim▁hole|>
gulpInstance.task('deploy', ['dist'], function() {
gulp.src([
CONFIG.DIST.FOLDER() + "app.js",
CONFIG.DIST.FOLDER() + "templates.js",
CONFIG.DIST.FOLDER() + "app.js.map",
])
.pipe(gulp.dest(deployDir));
});
gulp.task("tscompile:templates", function (cb) {
var camelCaseModuleName = CONFIG.DYNAMIC_META.MODULE_NAME().replace(/-([a-z])/g, function(g) {
return g[1].toUpperCase();
});
gulp.src(CONFIG.SRC.ANGULAR_HTMLS())
.pipe(plugins.ngHtml2js({
moduleName: camelCaseModuleName + "Templatecache",
prefix: "/"
}))
.pipe(plugins.concat(CONFIG.DIST.JS.FILES.TEMPLATES()))
.pipe(insert.wrap(requireJSTemplatesPrefix, requireJSSuffix))
.pipe(gulp.dest(CONFIG.DIST.FOLDER()))
.on('error', cb);
cb();
});
gulpInstance.task('tscompile:app', ['prod:init-app'], function(cb) {
// Exclude bootstrap.ts when compiling distributables since
// Camunda's tasklist app takes care of bootrapping angular
var srcFiles = [CONFIG.FOLDER.SRC() + "**/*.ts",
//"!" + CONFIG.FOLDER.SRC() + "**/*Interceptor.ts",
//"!" + CONFIG.FOLDER.SRC() + "**/bootstrap.ts",
"!" + CONFIG.SRC.TS.GLOBAL_TS_UNIT_TEST_FILES()];
gulp.src(srcFiles.concat(CONFIG.SRC.TS.TS_DEFINITIONS()))
.pipe(plugins.tsc(
{
allowBool: true,
out: CONFIG.DIST.JS.FILES.APP(),
sourcemap: true,
sourceRoot: "/",
target: "ES5"
}))
.pipe(insert.wrap(requireJSAppPrefix, requireJSSuffix))
.pipe(gulp.dest(CONFIG.DIST.FOLDER()))
.on('error', cb);
cb();
});
gulpInstance.task('sass', function (cb) {
gulp.src("./sass/main.scss")
.pipe(plugins.sass({
precision: 8,
errLogToConsole: true
}))
.pipe(gulp.dest("./target/css"))
.on('error', cb);
cb();
});
gulpInstance.task('watchSass', function (cb) {
gulp.watch(['sass/**/*.scss'], ['sass']);
});<|fim▁end|>
|
gulpInstance.task('dist', ['tscompile:templates', 'tscompile:app', 'resources']);
|
<|file_name|>CustomLoginBean.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2005-2010 Alfresco Software Limited.
*
* This file is part of Alfresco
*
* Alfresco is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Alfresco is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Alfresco. If not, see <http://www.gnu.org/licenses/>.
*/
package org.alfresco.sample;
import java.util.Date;
import org.alfresco.web.bean.LoginBean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
public class CustomLoginBean extends LoginBean
{
private static final Log logger = LogFactory.getLog(CustomLoginBean.class);
@Override
public String login()
{
String outcome = super.login();
// log to the console who logged in and when
String username = this.getUsername();
if (username == null)
{
username = "Guest";<|fim▁hole|>
logger.info(username + " has logged in at " + new Date());
return outcome;
}
@Override
public String logout()
{
String outcome = super.logout();
// log to the console who logged out and when
String username = this.getUsername();
if (username == null)
{
username = "Guest";
}
logger.info(username + " logged out at " + new Date());
return outcome;
}
}<|fim▁end|>
|
}
|
<|file_name|>freezer_table.go<|end_file_name|><|fim▁begin|>// Copyright 2019 The go-ethereum Authors
// This file is part of the go-ethereum library.
//
// The go-ethereum library is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// The go-ethereum library is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>.
package rawdb
import (
"encoding/binary"
"errors"
"fmt"
"io"
"os"
"path/filepath"
"sync"
"sync/atomic"
"github.com/ethereum/go-ethereum/common"
"github.com/ethereum/go-ethereum/log"
"github.com/ethereum/go-ethereum/metrics"
"github.com/golang/snappy"
)
var (
// errClosed is returned if an operation attempts to read from or write to the
// freezer table after it has already been closed.
errClosed = errors.New("closed")
// errOutOfBounds is returned if the item requested is not contained within the
// freezer table.
errOutOfBounds = errors.New("out of bounds")
// errNotSupported is returned if the database doesn't support the required operation.
errNotSupported = errors.New("this operation is not supported")
)
// indexEntry contains the number/id of the file that the data resides in, aswell as the
// offset within the file to the end of the data
// In serialized form, the filenum is stored as uint16.
type indexEntry struct {
filenum uint32 // stored as uint16 ( 2 bytes)
offset uint32 // stored as uint32 ( 4 bytes)
}
const indexEntrySize = 6
// unmarshallBinary deserializes binary b into the rawIndex entry.
func (i *indexEntry) unmarshalBinary(b []byte) error {
i.filenum = uint32(binary.BigEndian.Uint16(b[:2]))
i.offset = binary.BigEndian.Uint32(b[2:6])
return nil
}
// marshallBinary serializes the rawIndex entry into binary.
func (i *indexEntry) marshallBinary() []byte {
b := make([]byte, indexEntrySize)
binary.BigEndian.PutUint16(b[:2], uint16(i.filenum))
binary.BigEndian.PutUint32(b[2:6], i.offset)
return b
}
// freezerTable represents a single chained data table within the freezer (e.g. blocks).
// It consists of a data file (snappy encoded arbitrary data blobs) and an indexEntry
// file (uncompressed 64 bit indices into the data file).
type freezerTable struct {
// WARNING: The `items` field is accessed atomically. On 32 bit platforms, only
// 64-bit aligned fields can be atomic. The struct is guaranteed to be so aligned,
// so take advantage of that (https://golang.org/pkg/sync/atomic/#pkg-note-BUG).
items uint64 // Number of items stored in the table (including items removed from tail)
noCompression bool // if true, disables snappy compression. Note: does not work retroactively
maxFileSize uint32 // Max file size for data-files
name string
path string
head *os.File // File descriptor for the data head of the table
files map[uint32]*os.File // open files
headId uint32 // number of the currently active head file
tailId uint32 // number of the earliest file
index *os.File // File descriptor for the indexEntry file of the table
// In the case that old items are deleted (from the tail), we use itemOffset
// to count how many historic items have gone missing.
itemOffset uint32 // Offset (number of discarded items)
headBytes uint32 // Number of bytes written to the head file
readMeter metrics.Meter // Meter for measuring the effective amount of data read
writeMeter metrics.Meter // Meter for measuring the effective amount of data written
sizeGauge metrics.Gauge // Gauge for tracking the combined size of all freezer tables
logger log.Logger // Logger with database path and table name ambedded
lock sync.RWMutex // Mutex protecting the data file descriptors
}
// NewFreezerTable opens the given path as a freezer table.
func NewFreezerTable(path, name string, disableSnappy bool) (*freezerTable, error) {
return newTable(path, name, metrics.NilMeter{}, metrics.NilMeter{}, metrics.NilGauge{}, disableSnappy)
}
// newTable opens a freezer table with default settings - 2G files
func newTable(path string, name string, readMeter metrics.Meter, writeMeter metrics.Meter, sizeGauge metrics.Gauge, disableSnappy bool) (*freezerTable, error) {
return newCustomTable(path, name, readMeter, writeMeter, sizeGauge, 2*1000*1000*1000, disableSnappy)
}
// openFreezerFileForAppend opens a freezer table file and seeks to the end
func openFreezerFileForAppend(filename string) (*os.File, error) {
// Open the file without the O_APPEND flag
// because it has differing behaviour during Truncate operations
// on different OS's
file, err := os.OpenFile(filename, os.O_RDWR|os.O_CREATE, 0644)
if err != nil {
return nil, err
}
// Seek to end for append
if _, err = file.Seek(0, io.SeekEnd); err != nil {
return nil, err
}
return file, nil
}
// openFreezerFileForReadOnly opens a freezer table file for read only access
func openFreezerFileForReadOnly(filename string) (*os.File, error) {
return os.OpenFile(filename, os.O_RDONLY, 0644)
}
// openFreezerFileTruncated opens a freezer table making sure it is truncated
func openFreezerFileTruncated(filename string) (*os.File, error) {
return os.OpenFile(filename, os.O_RDWR|os.O_CREATE|os.O_TRUNC, 0644)
}
// truncateFreezerFile resizes a freezer table file and seeks to the end
func truncateFreezerFile(file *os.File, size int64) error {
if err := file.Truncate(size); err != nil {
return err
}
// Seek to end for append
if _, err := file.Seek(0, io.SeekEnd); err != nil {
return err
}
return nil
}
// newCustomTable opens a freezer table, creating the data and index files if they are
// non existent. Both files are truncated to the shortest common length to ensure
// they don't go out of sync.
func newCustomTable(path string, name string, readMeter metrics.Meter, writeMeter metrics.Meter, sizeGauge metrics.Gauge, maxFilesize uint32, noCompression bool) (*freezerTable, error) {
// Ensure the containing directory exists and open the indexEntry file
if err := os.MkdirAll(path, 0755); err != nil {
return nil, err
}
var idxName string
if noCompression {
// Raw idx
idxName = fmt.Sprintf("%s.ridx", name)
} else {
// Compressed idx
idxName = fmt.Sprintf("%s.cidx", name)
}
offsets, err := openFreezerFileForAppend(filepath.Join(path, idxName))
if err != nil {
return nil, err
}
// Create the table and repair any past inconsistency
tab := &freezerTable{
index: offsets,
files: make(map[uint32]*os.File),
readMeter: readMeter,
writeMeter: writeMeter,
sizeGauge: sizeGauge,
name: name,
path: path,
logger: log.New("database", path, "table", name),
noCompression: noCompression,
maxFileSize: maxFilesize,
}
if err := tab.repair(); err != nil {
tab.Close()
return nil, err
}
// Initialize the starting size counter
size, err := tab.sizeNolock()
if err != nil {
tab.Close()
return nil, err
}
tab.sizeGauge.Inc(int64(size))
return tab, nil
}
// repair cross checks the head and the index file and truncates them to
// be in sync with each other after a potential crash / data loss.
func (t *freezerTable) repair() error {
// Create a temporary offset buffer to init files with and read indexEntry into
buffer := make([]byte, indexEntrySize)
// If we've just created the files, initialize the index with the 0 indexEntry
stat, err := t.index.Stat()
if err != nil {
return err
}
if stat.Size() == 0 {
if _, err := t.index.Write(buffer); err != nil {
return err
}
}
// Ensure the index is a multiple of indexEntrySize bytes
if overflow := stat.Size() % indexEntrySize; overflow != 0 {
truncateFreezerFile(t.index, stat.Size()-overflow) // New file can't trigger this path
}
// Retrieve the file sizes and prepare for truncation
if stat, err = t.index.Stat(); err != nil {
return err
}
offsetsSize := stat.Size()
// Open the head file
var (
firstIndex indexEntry
lastIndex indexEntry
contentSize int64
contentExp int64
)
// Read index zero, determine what file is the earliest
// and what item offset to use
t.index.ReadAt(buffer, 0)
firstIndex.unmarshalBinary(buffer)
t.tailId = firstIndex.filenum
t.itemOffset = firstIndex.offset
t.index.ReadAt(buffer, offsetsSize-indexEntrySize)
lastIndex.unmarshalBinary(buffer)
t.head, err = t.openFile(lastIndex.filenum, openFreezerFileForAppend)
if err != nil {
return err
}
if stat, err = t.head.Stat(); err != nil {
return err
}
contentSize = stat.Size()
// Keep truncating both files until they come in sync
contentExp = int64(lastIndex.offset)
for contentExp != contentSize {
// Truncate the head file to the last offset pointer
if contentExp < contentSize {
t.logger.Warn("Truncating dangling head", "indexed", common.StorageSize(contentExp), "stored", common.StorageSize(contentSize))
if err := truncateFreezerFile(t.head, contentExp); err != nil {
return err
}
contentSize = contentExp
}
// Truncate the index to point within the head file
if contentExp > contentSize {
t.logger.Warn("Truncating dangling indexes", "indexed", common.StorageSize(contentExp), "stored", common.StorageSize(contentSize))
if err := truncateFreezerFile(t.index, offsetsSize-indexEntrySize); err != nil {
return err
}
offsetsSize -= indexEntrySize
t.index.ReadAt(buffer, offsetsSize-indexEntrySize)
var newLastIndex indexEntry
newLastIndex.unmarshalBinary(buffer)
// We might have slipped back into an earlier head-file here
if newLastIndex.filenum != lastIndex.filenum {
// Release earlier opened file
t.releaseFile(lastIndex.filenum)
if t.head, err = t.openFile(newLastIndex.filenum, openFreezerFileForAppend); err != nil {
return err
}
if stat, err = t.head.Stat(); err != nil {
// TODO, anything more we can do here?
// A data file has gone missing...
return err
}
contentSize = stat.Size()
}
lastIndex = newLastIndex
contentExp = int64(lastIndex.offset)
}
}
// Ensure all reparation changes have been written to disk
if err := t.index.Sync(); err != nil {
return err
}
if err := t.head.Sync(); err != nil {
return err
}
// Update the item and byte counters and return
t.items = uint64(t.itemOffset) + uint64(offsetsSize/indexEntrySize-1) // last indexEntry points to the end of the data file
t.headBytes = uint32(contentSize)
t.headId = lastIndex.filenum
// Close opened files and preopen all files
if err := t.preopen(); err != nil {
return err
}
t.logger.Debug("Chain freezer table opened", "items", t.items, "size", common.StorageSize(t.headBytes))
return nil
}
// preopen opens all files that the freezer will need. This method should be called from an init-context,
// since it assumes that it doesn't have to bother with locking
// The rationale for doing preopen is to not have to do it from within Retrieve, thus not needing to ever
// obtain a write-lock within Retrieve.
func (t *freezerTable) preopen() (err error) {
// The repair might have already opened (some) files
t.releaseFilesAfter(0, false)
// Open all except head in RDONLY
for i := t.tailId; i < t.headId; i++ {
if _, err = t.openFile(i, openFreezerFileForReadOnly); err != nil {
return err
}
}
// Open head in read/write
t.head, err = t.openFile(t.headId, openFreezerFileForAppend)
return err
}
// truncate discards any recent data above the provided threshold number.
func (t *freezerTable) truncate(items uint64) error {
t.lock.Lock()
defer t.lock.Unlock()
// If our item count is correct, don't do anything
existing := atomic.LoadUint64(&t.items)
if existing <= items {
return nil
}
// We need to truncate, save the old size for metrics tracking
oldSize, err := t.sizeNolock()
if err != nil {
return err
}
// Something's out of sync, truncate the table's offset index
log := t.logger.Debug
if existing > items+1 {
log = t.logger.Warn // Only loud warn if we delete multiple items
}
log("Truncating freezer table", "items", existing, "limit", items)
if err := truncateFreezerFile(t.index, int64(items+1)*indexEntrySize); err != nil {
return err
}
// Calculate the new expected size of the data file and truncate it
buffer := make([]byte, indexEntrySize)
if _, err := t.index.ReadAt(buffer, int64(items*indexEntrySize)); err != nil {
return err
}
var expected indexEntry
expected.unmarshalBinary(buffer)
// We might need to truncate back to older files
if expected.filenum != t.headId {
// If already open for reading, force-reopen for writing
t.releaseFile(expected.filenum)
newHead, err := t.openFile(expected.filenum, openFreezerFileForAppend)
if err != nil {
return err
}
// Release any files _after the current head -- both the previous head
// and any files which may have been opened for reading
t.releaseFilesAfter(expected.filenum, true)
// Set back the historic head
t.head = newHead
atomic.StoreUint32(&t.headId, expected.filenum)
}
if err := truncateFreezerFile(t.head, int64(expected.offset)); err != nil {
return err
}
// All data files truncated, set internal counters and return
atomic.StoreUint64(&t.items, items)
atomic.StoreUint32(&t.headBytes, expected.offset)
// Retrieve the new size and update the total size counter
newSize, err := t.sizeNolock()
if err != nil {
return err
}
t.sizeGauge.Dec(int64(oldSize - newSize))
return nil
}
// Close closes all opened files.
func (t *freezerTable) Close() error {
t.lock.Lock()
defer t.lock.Unlock()
var errs []error
if err := t.index.Close(); err != nil {
errs = append(errs, err)
}
t.index = nil
for _, f := range t.files {
if err := f.Close(); err != nil {
errs = append(errs, err)
}
}
t.head = nil
if errs != nil {
return fmt.Errorf("%v", errs)
}
return nil
}
// openFile assumes that the write-lock is held by the caller
func (t *freezerTable) openFile(num uint32, opener func(string) (*os.File, error)) (f *os.File, err error) {
var exist bool
if f, exist = t.files[num]; !exist {
var name string
if t.noCompression {
name = fmt.Sprintf("%s.%04d.rdat", t.name, num)
} else {
name = fmt.Sprintf("%s.%04d.cdat", t.name, num)
}
f, err = opener(filepath.Join(t.path, name))
if err != nil {
return nil, err
}
t.files[num] = f
}
return f, err
}
// releaseFile closes a file, and removes it from the open file cache.
// Assumes that the caller holds the write lock
func (t *freezerTable) releaseFile(num uint32) {
if f, exist := t.files[num]; exist {
delete(t.files, num)
f.Close()
}
}
// releaseFilesAfter closes all open files with a higher number, and optionally also deletes the files
func (t *freezerTable) releaseFilesAfter(num uint32, remove bool) {
for fnum, f := range t.files {
if fnum > num {
delete(t.files, fnum)
f.Close()
if remove {
os.Remove(f.Name())
}
}
}
}
// Append injects a binary blob at the end of the freezer table. The item number
// is a precautionary parameter to ensure data correctness, but the table will
// reject already existing data.
//
// Note, this method will *not* flush any data to disk so be sure to explicitly
// fsync before irreversibly deleting data from the database.
func (t *freezerTable) Append(item uint64, blob []byte) error {
// Read lock prevents competition with truncate
t.lock.RLock()
// Ensure the table is still accessible
if t.index == nil || t.head == nil {
t.lock.RUnlock()
return errClosed
}
// Ensure only the next item can be written, nothing else
if atomic.LoadUint64(&t.items) != item {
t.lock.RUnlock()
return fmt.Errorf("appending unexpected item: want %d, have %d", t.items, item)
}
// Encode the blob and write it into the data file
if !t.noCompression {
blob = snappy.Encode(nil, blob)
}
bLen := uint32(len(blob))
if t.headBytes+bLen < bLen ||
t.headBytes+bLen > t.maxFileSize {
// we need a new file, writing would overflow
t.lock.RUnlock()
t.lock.Lock()
nextID := atomic.LoadUint32(&t.headId) + 1
// We open the next file in truncated mode -- if this file already
// exists, we need to start over from scratch on it
newHead, err := t.openFile(nextID, openFreezerFileTruncated)
if err != nil {
t.lock.Unlock()
return err
}
// Close old file, and reopen in RDONLY mode
t.releaseFile(t.headId)
t.openFile(t.headId, openFreezerFileForReadOnly)
// Swap out the current head
t.head = newHead
atomic.StoreUint32(&t.headBytes, 0)
atomic.StoreUint32(&t.headId, nextID)
t.lock.Unlock()
t.lock.RLock()
}
defer t.lock.RUnlock()
if _, err := t.head.Write(blob); err != nil {
return err
}
newOffset := atomic.AddUint32(&t.headBytes, bLen)
idx := indexEntry{
filenum: atomic.LoadUint32(&t.headId),
offset: newOffset,
}
// Write indexEntry
t.index.Write(idx.marshallBinary())
t.writeMeter.Mark(int64(bLen + indexEntrySize))
t.sizeGauge.Inc(int64(bLen + indexEntrySize))
atomic.AddUint64(&t.items, 1)
return nil
}
// getBounds returns the indexes for the item
// returns start, end, filenumber and error
func (t *freezerTable) getBounds(item uint64) (uint32, uint32, uint32, error) {
buffer := make([]byte, indexEntrySize)
var startIdx, endIdx indexEntry
// Read second index
if _, err := t.index.ReadAt(buffer, int64((item+1)*indexEntrySize)); err != nil {
return 0, 0, 0, err
}
endIdx.unmarshalBinary(buffer)
// Read first index (unless it's the very first item)
if item != 0 {
if _, err := t.index.ReadAt(buffer, int64(item*indexEntrySize)); err != nil {
return 0, 0, 0, err
}
startIdx.unmarshalBinary(buffer)
} else {
// Special case if we're reading the first item in the freezer. We assume that
// the first item always start from zero(regarding the deletion, we
// only support deletion by files, so that the assumption is held).
// This means we can use the first item metadata to carry information about
// the 'global' offset, for the deletion-case
return 0, endIdx.offset, endIdx.filenum, nil
}
if startIdx.filenum != endIdx.filenum {
// If a piece of data 'crosses' a data-file,
// it's actually in one piece on the second data-file.
// We return a zero-indexEntry for the second file as start
return 0, endIdx.offset, endIdx.filenum, nil
}
return startIdx.offset, endIdx.offset, endIdx.filenum, nil
}
// Retrieve looks up the data offset of an item with the given number and retrieves
// the raw binary blob from the data file.
func (t *freezerTable) Retrieve(item uint64) ([]byte, error) {
t.lock.RLock()
// Ensure the table and the item is accessible
if t.index == nil || t.head == nil {
t.lock.RUnlock()
return nil, errClosed
}
if atomic.LoadUint64(&t.items) <= item {
t.lock.RUnlock()
return nil, errOutOfBounds
}
// Ensure the item was not deleted from the tail either
if uint64(t.itemOffset) > item {
t.lock.RUnlock()
return nil, errOutOfBounds
}
startOffset, endOffset, filenum, err := t.getBounds(item - uint64(t.itemOffset))
if err != nil {
t.lock.RUnlock()
return nil, err
}
dataFile, exist := t.files[filenum]
if !exist {
t.lock.RUnlock()
return nil, fmt.Errorf("missing data file %d", filenum)
}
// Retrieve the data itself, decompress and return
blob := make([]byte, endOffset-startOffset)
if _, err := dataFile.ReadAt(blob, int64(startOffset)); err != nil {
t.lock.RUnlock()
return nil, err
}
t.lock.RUnlock()
t.readMeter.Mark(int64(len(blob) + 2*indexEntrySize))
if t.noCompression {
return blob, nil
}
return snappy.Decode(nil, blob)
}
// has returns an indicator whether the specified number data
// exists in the freezer table.
func (t *freezerTable) has(number uint64) bool {
return atomic.LoadUint64(&t.items) > number
}
// size returns the total data size in the freezer table.
func (t *freezerTable) size() (uint64, error) {
t.lock.RLock()
defer t.lock.RUnlock()
return t.sizeNolock()
}
// sizeNolock returns the total data size in the freezer table without obtaining
// the mutex first.
func (t *freezerTable) sizeNolock() (uint64, error) {
stat, err := t.index.Stat()
if err != nil {
return 0, err
}
total := uint64(t.maxFileSize)*uint64(t.headId-t.tailId) + uint64(t.headBytes) + uint64(stat.Size())
return total, nil
}
// Sync pushes any pending data from memory out to disk. This is an expensive
// operation, so use it with care.
func (t *freezerTable) Sync() error {
if err := t.index.Sync(); err != nil {
return err
}
return t.head.Sync()
}<|fim▁hole|>// printIndex is a debug print utility function for testing
func (t *freezerTable) printIndex() {
buf := make([]byte, indexEntrySize)
fmt.Printf("|-----------------|\n")
fmt.Printf("| fileno | offset |\n")
fmt.Printf("|--------+--------|\n")
for i := uint64(0); ; i++ {
if _, err := t.index.ReadAt(buf, int64(i*indexEntrySize)); err != nil {
break
}
var entry indexEntry
entry.unmarshalBinary(buf)
fmt.Printf("| %03d | %03d | \n", entry.filenum, entry.offset)
if i > 100 {
fmt.Printf(" ... \n")
break
}
}
fmt.Printf("|-----------------|\n")
}<|fim▁end|>
| |
<|file_name|>vca_fw.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# Copyright (c) 2015 VMware, Inc. All Rights Reserved.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: vca_fw
short_description: add remove firewall rules in a gateway in a vca
description:
- Adds or removes firewall rules from a gateway in a vca environment
version_added: "2.0"
options:
username:
description:
- The vca username or email address, if not set the environment variable VCA_USER is checked for the username.
required: false
default: None
password:
description:
- The vca password, if not set the environment variable VCA_PASS is checked for the password
required: false
default: None
org:
description:
- The org to login to for creating vapp, mostly set when the service_type is vdc.
required: false
default: None
service_id:
description:
- The service id in a vchs environment to be used for creating the vapp
required: false
default: None
host:
description:
- The authentication host to be used when service type is vcd.
required: false
default: None
api_version:
description:
- The api version to be used with the vca
required: false
default: "5.7"
service_type:
description:
- The type of service we are authenticating against
required: false
default: vca
choices: [ "vca", "vchs", "vcd" ]
state:
description:
- if the object should be added or removed
required: false
default: present
choices: [ "present", "absent" ]
verify_certs:
description:
- If the certificates of the authentication is to be verified
required: false
default: True
vdc_name:
description:
- The name of the vdc where the gateway is located.
required: false
default: None
gateway_name:
description:
- The name of the gateway of the vdc where the rule should be added
required: false
default: gateway
fw_rules:
description:
- A list of firewall rules to be added to the gateway, Please see examples on valid entries
required: True
default: false
'''
EXAMPLES = '''
#Add a set of firewall rules
- hosts: localhost
connection: local
tasks:
- vca_fw:
instance_id: 'b15ff1e5-1024-4f55-889f-ea0209726282'
vdc_name: 'benz_ansible'
state: 'absent'
fw_rules:
- description: "ben testing"
source_ip: "Any"
dest_ip: 192.168.2.11
- description: "ben testing 2"
source_ip: 192.168.2.100
source_port: "Any"
dest_port: "22"
dest_ip: 192.168.2.13
is_enable: "true"
enable_logging: "false"
protocol: "Tcp"
policy: "allow"
'''
import time, json, xmltodict
HAS_PYVCLOUD = False
try:
from pyvcloud.vcloudair import VCA
from pyvcloud.schema.vcd.v1_5.schemas.vcloud.networkType import ProtocolsType
HAS_PYVCLOUD = True
except ImportError:
pass
SERVICE_MAP = {'vca': 'ondemand', 'vchs': 'subscription', 'vcd': 'vcd'}
LOGIN_HOST = {}
LOGIN_HOST['vca'] = 'vca.vmware.com'
LOGIN_HOST['vchs'] = 'vchs.vmware.com'
VALID_RULE_KEYS = ['policy', 'is_enable', 'enable_logging', 'description', 'dest_ip', 'dest_port', 'source_ip', 'source_port', 'protocol']
def vca_login(module=None):
service_type = module.params.get('service_type')
username = module.params.get('username')
password = module.params.get('password')
instance = module.params.get('instance_id')
org = module.params.get('org')
service = module.params.get('service_id')
vdc_name = module.params.get('vdc_name')
version = module.params.get('api_version')
verify = module.params.get('verify_certs')
if not vdc_name:
if service_type == 'vchs':
vdc_name = module.params.get('service_id')
if not org:
if service_type == 'vchs':
if vdc_name:
org = vdc_name
else:
org = service
if service_type == 'vcd':
host = module.params.get('host')
else:
host = LOGIN_HOST[service_type]
if not username:
if 'VCA_USER' in os.environ:
username = os.environ['VCA_USER']
if not password:
if 'VCA_PASS' in os.environ:
password = os.environ['VCA_PASS']
if not username or not password:
module.fail_json(msg = "Either the username or password is not set, please check")
if service_type == 'vchs':
version = '5.6'
if service_type == 'vcd':
if not version:
version == '5.6'
vca = VCA(host=host, username=username, service_type=SERVICE_MAP[service_type], version=version, verify=verify)<|fim▁hole|>
if service_type == 'vca':
if not vca.login(password=password):
module.fail_json(msg = "Login Failed: Please check username or password", error=vca.response.content)
if not vca.login_to_instance(password=password, instance=instance, token=None, org_url=None):
s_json = serialize_instances(vca.instances)
module.fail_json(msg = "Login to Instance failed: Seems like instance_id provided is wrong .. Please check",\
valid_instances=s_json)
if not vca.login_to_instance(instance=instance, password=None, token=vca.vcloud_session.token,
org_url=vca.vcloud_session.org_url):
module.fail_json(msg = "Error logging into org for the instance", error=vca.response.content)
return vca
if service_type == 'vchs':
if not vca.login(password=password):
module.fail_json(msg = "Login Failed: Please check username or password", error=vca.response.content)
if not vca.login(token=vca.token):
module.fail_json(msg = "Failed to get the token", error=vca.response.content)
if not vca.login_to_org(service, org):
module.fail_json(msg = "Failed to login to org, Please check the orgname", error=vca.response.content)
return vca
if service_type == 'vcd':
if not vca.login(password=password, org=org):
module.fail_json(msg = "Login Failed: Please check username or password or host parameters")
if not vca.login(password=password, org=org):
module.fail_json(msg = "Failed to get the token", error=vca.response.content)
if not vca.login(token=vca.token, org=org, org_url=vca.vcloud_session.org_url):
module.fail_json(msg = "Failed to login to org", error=vca.response.content)
return vca
def validate_fw_rules(module=None, fw_rules=None):
VALID_PROTO = ['Tcp', 'Udp', 'Icmp', 'Any']
for rule in fw_rules:
if not isinstance(rule, dict):
module.fail_json(msg="Firewall rules must be a list of dictionaries, Please check", valid_keys=VALID_RULE_KEYS)
for k in rule.keys():
if k not in VALID_RULE_KEYS:
module.fail_json(msg="%s is not a valid key in fw rules, Please check above.." %k, valid_keys=VALID_RULE_KEYS)
rule['dest_port'] = rule.get('dest_port', 'Any')
rule['dest_ip'] = rule.get('dest_ip', 'Any')
rule['source_port'] = rule.get('source_port', 'Any')
rule['source_ip'] = rule.get('source_ip', 'Any')
rule['protocol'] = rule.get('protocol', 'Any')
rule['policy'] = rule.get('policy', 'allow')
rule['is_enable'] = rule.get('is_enable', 'true')
rule['enable_logging'] = rule.get('enable_logging', 'false')
rule['description'] = rule.get('description', 'rule added by Ansible')
if not rule['protocol'] in VALID_PROTO:
module.fail_json(msg="the value in protocol is not valid, valid values are as above", valid_proto=VALID_PROTO)
return fw_rules
def create_protocol_list(protocol):
plist = []
plist.append(protocol.get_Tcp())
plist.append(protocol.get_Any())
plist.append(protocol.get_Tcp())
plist.append(protocol.get_Udp())
plist.append(protocol.get_Icmp())
plist.append(protocol.get_Other())
return plist
def create_protocols_type(protocol):
all_protocols = {"Tcp": None, "Udp": None, "Icmp": None, "Any": None}
all_protocols[protocol] = True
return ProtocolsType(**all_protocols)
def main():
module = AnsibleModule(
argument_spec=dict(
username = dict(default=None),
password = dict(default=None),
org = dict(default=None),
service_id = dict(default=None),
instance_id = dict(default=None),
host = dict(default=None),
api_version = dict(default='5.7'),
service_type = dict(default='vca', choices=['vchs', 'vca', 'vcd']),
state = dict(default='present', choices = ['present', 'absent']),
vdc_name = dict(default=None),
gateway_name = dict(default='gateway'),
fw_rules = dict(required=True, default=None, type='list'),
)
)
vdc_name = module.params.get('vdc_name')
org = module.params.get('org')
service = module.params.get('service_id')
state = module.params.get('state')
service_type = module.params.get('service_type')
host = module.params.get('host')
instance_id = module.params.get('instance_id')
fw_rules = module.params.get('fw_rules')
gateway_name = module.params.get('gateway_name')
verify_certs = dict(default=True, type='bool'),
if not HAS_PYVCLOUD:
module.fail_json(msg="python module pyvcloud is needed for this module")
if service_type == 'vca':
if not instance_id:
module.fail_json(msg="When service type is vca the instance_id parameter is mandatory")
if not vdc_name:
module.fail_json(msg="When service type is vca the vdc_name parameter is mandatory")
if service_type == 'vchs':
if not service:
module.fail_json(msg="When service type vchs the service_id parameter is mandatory")
if not org:
org = service
if not vdc_name:
vdc_name = service
if service_type == 'vcd':
if not host:
module.fail_json(msg="When service type is vcd host parameter is mandatory")
vca = vca_login(module)
vdc = vca.get_vdc(vdc_name)
if not vdc:
module.fail_json(msg = "Error getting the vdc, Please check the vdc name")
mod_rules = validate_fw_rules(module, fw_rules)
gateway = vca.get_gateway(vdc_name, gateway_name)
if not gateway:
module.fail_json(msg="Not able to find the gateway %s, please check the gateway_name param" %gateway_name)
rules = gateway.get_fw_rules()
existing_rules = []
del_rules = []
for rule in rules:
current_trait = (create_protocol_list(rule.get_Protocols()),
rule.get_DestinationPortRange(),
rule.get_DestinationIp(),
rule.get_SourcePortRange(),
rule.get_SourceIp())
for idx, val in enumerate(mod_rules):
trait = (create_protocol_list(create_protocols_type(val['protocol'])),
val['dest_port'], val['dest_ip'], val['source_port'], val['source_ip'])
if current_trait == trait:
del_rules.append(mod_rules[idx])
mod_rules.pop(idx)
existing_rules.append(current_trait)
if state == 'absent':
if len(del_rules) < 1:
module.exit_json(changed=False, msg="Nothing to delete", delete_rules=mod_rules)
else:
for i in del_rules:
gateway.delete_fw_rule(i['protocol'], i['dest_port'], i['dest_ip'], i['source_port'], i['source_ip'])
task = gateway.save_services_configuration()
if not task:
module.fail_json(msg="Unable to Delete Rule, please check above error", error=gateway.response.content)
if not vca.block_until_completed(task):
module.fail_json(msg="Error while waiting to remove Rule, please check above error", error=gateway.response.content)
module.exit_json(changed=True, msg="Rules Deleted", deleted_rules=del_rules)
if len(mod_rules) < 1:
module.exit_json(changed=False, rules=existing_rules)
if len(mod_rules) >= 1:
for i in mod_rules:
gateway.add_fw_rule(i['is_enable'], i['description'], i['policy'], i['protocol'], i['dest_port'], i['dest_ip'],
i['source_port'], i['source_ip'], i['enable_logging'])
task = gateway.save_services_configuration()
if not task:
module.fail_json(msg="Unable to Add Rule, please check above error", error=gateway.response.content)
if not vca.block_until_completed(task):
module.fail_json(msg="Failure in waiting for adding firewall rule", error=gateway.response.content)
module.exit_json(changed=True, rules=mod_rules)
# import module snippets
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()<|fim▁end|>
| |
<|file_name|>CudaEMAlgorithm.cpp<|end_file_name|><|fim▁begin|>/*
-----------------------------------------------------------------------
Copyright: 2010-2015, iMinds-Vision Lab, University of Antwerp
2014-2015, CWI, Amsterdam<|fim▁hole|>This file is part of the ASTRA Toolbox.
The ASTRA Toolbox is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
The ASTRA Toolbox is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with the ASTRA Toolbox. If not, see <http://www.gnu.org/licenses/>.
-----------------------------------------------------------------------
$Id$
*/
#ifdef ASTRA_CUDA
#include "astra/CudaEMAlgorithm.h"
#include "../cuda/2d/em.h"
using namespace std;
namespace astra {
// type of the algorithm, needed to register with CAlgorithmFactory
std::string CCudaEMAlgorithm::type = "EM_CUDA";
//----------------------------------------------------------------------------------------
// Constructor
CCudaEMAlgorithm::CCudaEMAlgorithm()
{
m_bIsInitialized = false;
CCudaReconstructionAlgorithm2D::_clear();
}
//----------------------------------------------------------------------------------------
// Destructor
CCudaEMAlgorithm::~CCudaEMAlgorithm()
{
// The actual work is done by ~CCudaReconstructionAlgorithm2D
}
//---------------------------------------------------------------------------------------
// Initialize - Config
bool CCudaEMAlgorithm::initialize(const Config& _cfg)
{
ASTRA_ASSERT(_cfg.self);
ConfigStackCheck<CAlgorithm> CC("CudaEMAlgorithm", this, _cfg);
m_bIsInitialized = CCudaReconstructionAlgorithm2D::initialize(_cfg);
if (!m_bIsInitialized)
return false;
m_pAlgo = new astraCUDA::EM();
m_bAlgoInit = false;
return true;
}
//---------------------------------------------------------------------------------------
// Initialize - C++
bool CCudaEMAlgorithm::initialize(CProjector2D* _pProjector,
CFloat32ProjectionData2D* _pSinogram,
CFloat32VolumeData2D* _pReconstruction)
{
m_bIsInitialized = CCudaReconstructionAlgorithm2D::initialize(_pProjector, _pSinogram, _pReconstruction);
if (!m_bIsInitialized)
return false;
m_pAlgo = new astraCUDA::EM();
m_bAlgoInit = false;
return true;
}
} // namespace astra
#endif // ASTRA_CUDA<|fim▁end|>
|
Contact: [email protected]
Website: http://sf.net/projects/astra-toolbox
|
<|file_name|>DragSourceImpl.d.ts<|end_file_name|><|fim▁begin|>import type { DragDropMonitor, DragSource, Identifier } from 'dnd-core';
import type { Connector } from '../../internals';
import type { DragSourceMonitor } from '../../types';
import type { DragSourceHookSpec } from '../types';
export declare class DragSourceImpl<O, R, P> implements DragSource {
<|fim▁hole|> private monitor;
private connector;
constructor(spec: DragSourceHookSpec<O, R, P>, monitor: DragSourceMonitor<O, R>, connector: Connector);
beginDrag(): NonNullable<O> | null;
canDrag(): boolean;
isDragging(globalMonitor: DragDropMonitor, target: Identifier): boolean;
endDrag(): void;
}<|fim▁end|>
|
spec: DragSourceHookSpec<O, R, P>;
|
<|file_name|>unit_tests.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
extern crate webrtc_sdp;
#[cfg(test)]
fn check_parse_and_serialize(sdp_str: &str) {
let parsed_sdp = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(parsed_sdp.is_ok());
let serialized_sdp = parsed_sdp.unwrap().to_string();
assert_eq!(serialized_sdp, sdp_str)
}
#[test]
fn parse_minimal_sdp() {
let sdp_str = "v=0\r\n\
o=- 1 1 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
c=IN IP4 0.0.0.0\r\n\
m=audio 0 UDP/TLS/RTP/SAVPF 0\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.get_version(), 0);
let o = sdp.get_origin();
assert_eq!(o.username, "-");
assert_eq!(o.session_id, 1);
assert_eq!(o.session_version, 1);
assert_eq!(sdp.get_session(), &Some("-".to_owned()));
assert!(sdp.timing.is_some());
assert!(sdp.get_connection().is_some());
assert_eq!(sdp.attribute.len(), 0);
assert_eq!(sdp.media.len(), 1);
let msection = &(sdp.media[0]);
assert_eq!(
*msection.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Audio
);
assert_eq!(msection.get_port(), 0);
assert_eq!(msection.get_port_count(), 0);
assert_eq!(
*msection.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(msection.get_attributes().is_empty());
assert!(msection.get_bandwidth().is_empty());
assert!(msection.get_connection().is_none());
check_parse_and_serialize(sdp_str);
}
#[test]
fn parse_minimal_sdp_with_emtpy_lines() {
let sdp = "v=0\r\n
\r\n
o=- 0 0 IN IP4 0.0.0.0\r\n
\r\n
s=-\r\n
c=IN IP4 0.0.0.0\r\n
t=0 0\r\n
m=audio 0 UDP/TLS/RTP/SAVPF 0\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp, false);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.get_version(), 0);
assert_eq!(sdp.get_session(), &Some("-".to_owned()));
}
#[test]
fn parse_minimal_sdp_with_single_space_session() {
let sdp = "v=0\r\n
\r\n
o=- 0 0 IN IP4 0.0.0.0\r\n
\r\n
s= \r\n
c=IN IP4 0.0.0.0\r\n
t=0 0\r\n
m=audio 0 UDP/TLS/RTP/SAVPF 0\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp, false);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());<|fim▁hole|>}
#[test]
fn parse_minimal_sdp_with_most_session_types() {
let sdp_str = "v=0\r\n\
o=- 0 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
b=AS:1\r\n\
b=CT:123\r\n\
b=TIAS:12345\r\n\
b=UNKNOWN:9\r\n\
c=IN IP6 ::1/1/1\r\n\
a=ice-options:trickle\r\n\
m=audio 0 UDP/TLS/RTP/SAVPF 0\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, false);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.session, Some("-".to_owned()));
assert!(sdp.get_connection().is_some());
check_parse_and_serialize(sdp_str);
}
#[test]
fn parse_minimal_sdp_with_most_media_types() {
let sdp_str = "v=0\r\n\
o=- 0 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
m=video 0 UDP/TLS/RTP/SAVPF 0\r\n\
b=AS:1\r\n\
b=CT:123\r\n\
b=TIAS:12345\r\n\
c=IN IP4 0.0.0.0\r\n\
a=sendrecv\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, false);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.session, Some("-".to_owned()));
assert_eq!(sdp.attribute.len(), 0);
assert_eq!(sdp.media.len(), 1);
let msection = &(sdp.media[0]);
assert_eq!(
*msection.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Video
);
assert_eq!(msection.get_port(), 0);
assert_eq!(
*msection.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(!msection.get_bandwidth().is_empty());
assert!(!msection.get_connection().is_none());
assert!(!msection.get_attributes().is_empty());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Sendrecv)
.is_some());
check_parse_and_serialize(sdp_str);
}
#[test]
fn parse_firefox_audio_offer() {
let sdp_str = "v=0\r\n\
o=mozilla...THIS_IS_SDPARTA-52.0a1 506705521068071134 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
a=fingerprint:sha-256 CD:34:D1:62:16:95:7B:B7:EB:74:E2:39:27:97:EB:0B:23:73:AC:BC:BF:2F:E3:91:CB:57:A9:9D:4A:A2:0B:40\r\n\
a=group:BUNDLE sdparta_0\r\n\
a=ice-options:trickle\r\n\
a=msid-semantic:WMS *\r\n\
m=audio 9 UDP/TLS/RTP/SAVPF 109 9 0 8\r\n\
c=IN IP4 0.0.0.0\r\n\
a=sendrecv\r\n\
a=extmap:1/sendonly urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n\
a=fmtp:109 maxplaybackrate=48000;stereo=1;useinbandfec=1\r\n\
a=ice-pwd:e3baa26dd2fa5030d881d385f1e36cce\r\n\
a=ice-ufrag:58b99ead\r\n\
a=mid:sdparta_0\r\n\
a=msid:{5a990edd-0568-ac40-8d97-310fc33f3411} {218cfa1c-617d-2249-9997-60929ce4c405}\r\n\
a=rtcp-mux\r\n\
a=rtpmap:109 opus/48000/2\r\n\
a=rtpmap:9 G722/8000/1\r\n\
a=rtpmap:0 PCMU/8000\r\n\
a=rtpmap:8 PCMA/8000\r\n\
a=setup:actpass\r\n\
a=ssrc:2655508255 cname:{735484ea-4f6c-f74a-bd66-7425f8476c2e}\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 1);
let msection = &(sdp.media[0]);
assert_eq!(
*msection.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Audio
);
assert_eq!(msection.get_port(), 9);
assert_eq!(msection.get_port_count(), 0);
assert_eq!(
*msection.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(msection.get_connection().is_some());
assert!(msection.get_bandwidth().is_empty());
assert!(!msection.get_attributes().is_empty());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Sendrecv)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Extmap)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Fmtp)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IcePwd)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IceUfrag)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Mid)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Mid)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Msid)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::RtcpMux)
.is_some());
assert_eq!(
msection
.get_attributes_of_type(webrtc_sdp::attribute_type::SdpAttributeType::Rtpmap)
.len(),
4
);
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Setup)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Ssrc)
.is_some());
}
#[test]
fn parse_firefox_video_offer() {
let sdp_str = "v=0\r\n\
o=mozilla...THIS_IS_SDPARTA-52.0a1 506705521068071134 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
a=fingerprint:sha-256 CD:34:D1:62:16:95:7B:B7:EB:74:E2:39:27:97:EB:0B:23:73:AC:BC:BF:2F:E3:91:CB:57:A9:9D:4A:A2:0B:40\r\n\
a=group:BUNDLE sdparta_2\r\n\
a=ice-options:trickle\r\n\
a=msid-semantic:WMS *\r\n\
m=video 9 UDP/TLS/RTP/SAVPF 126 120 97\r\n\
c=IN IP4 0.0.0.0\r\n\
a=recvonly\r\n\
a=fmtp:126 profile-level-id=42e01f;level-asymmetry-allowed=1;packetization-mode=1\r\n\
a=fmtp:120 max-fs=12288;max-fr=60\r\n\
a=fmtp:97 profile-level-id=42e01f;level-asymmetry-allowed=1\r\n\
a=ice-pwd:e3baa26dd2fa5030d881d385f1e36cce\r\n\
a=ice-ufrag:58b99ead\r\n\
a=mid:sdparta_2\r\n\
a=rtcp-fb:126 nack\r\n\
a=rtcp-fb:126 nack pli\r\n\
a=rtcp-fb:126 ccm fir\r\n\
a=rtcp-fb:126 goog-remb\r\n\
a=rtcp-fb:120 nack\r\n\
a=rtcp-fb:120 nack pli\r\n\
a=rtcp-fb:120 ccm fir\r\n\
a=rtcp-fb:120 goog-remb\r\n\
a=rtcp-fb:97 nack\r\n\
a=rtcp-fb:97 nack pli\r\n\
a=rtcp-fb:97 ccm fir\r\n\
a=rtcp-fb:97 goog-remb\r\n\
a=rtcp-mux\r\n\
a=rtpmap:126 H264/90000\r\n\
a=rtpmap:120 VP8/90000\r\n\
a=rtpmap:97 H264/90000\r\n\
a=setup:actpass\r\n\
a=ssrc:2709871439 cname:{735484ea-4f6c-f74a-bd66-7425f8476c2e}";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 1);
let msection = &(sdp.media[0]);
assert_eq!(
*msection.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Video
);
assert_eq!(msection.get_port(), 9);
assert_eq!(
*msection.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(msection.get_connection().is_some());
assert!(msection.get_bandwidth().is_empty());
assert!(!msection.get_attributes().is_empty());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Recvonly)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Extmap)
.is_some());
assert_eq!(
msection
.get_attributes_of_type(webrtc_sdp::attribute_type::SdpAttributeType::Fmtp)
.len(),
3
);
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IcePwd)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IceUfrag)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Mid)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Mid)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Msid)
.is_some());
assert_eq!(
msection
.get_attributes_of_type(webrtc_sdp::attribute_type::SdpAttributeType::Rtcpfb)
.len(),
12
);
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::RtcpMux)
.is_some());
assert_eq!(
msection
.get_attributes_of_type(webrtc_sdp::attribute_type::SdpAttributeType::Rtpmap)
.len(),
3
);
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Setup)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Ssrc)
.is_some());
}
#[test]
fn parse_firefox_datachannel_offer() {
let sdp_str = "v=0\r\n\
o=mozilla...THIS_IS_SDPARTA-52.0a2 3327975756663609975 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
a=sendrecv\r\n\
a=fingerprint:sha-256 AC:72:CB:D6:1E:A3:A3:B0:E7:97:77:25:03:4B:5B:FF:19:6C:02:C6:93:7D:EB:5C:81:6F:36:D9:02:32:F8:23\r\n\
a=ice-options:trickle\r\n\
a=msid-semantic:WMS *\r\n\
m=application 49760 DTLS/SCTP 5000\r\n\
c=IN IP4 172.16.156.106\r\n\
a=candidate:0 1 UDP 2122252543 172.16.156.106 49760 typ host\r\n\
a=sendrecv\r\n\
a=end-of-candidates\r\n\
a=ice-pwd:24f485c580129b36447b65df77429a82\r\n\
a=ice-ufrag:4cba30fe\r\n\
a=mid:sdparta_0\r\n\
a=sctpmap:5000 webrtc-datachannel 256\r\n\
a=setup:active\r\n\
a=ssrc:3376683177 cname:{62f78ee0-620f-a043-86ca-b69f189f1aea}\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 1);
let msection = &(sdp.media[0]);
assert_eq!(
*msection.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Application
);
assert_eq!(msection.get_port(), 49760);
assert_eq!(
*msection.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::DtlsSctp
);
assert!(msection.get_connection().is_some());
assert!(msection.get_bandwidth().is_empty());
assert!(!msection.get_attributes().is_empty());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Sendrecv)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Extmap)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IcePwd)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::IceUfrag)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::EndOfCandidates)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Mid)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Msid)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Rtcpfb)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::RtcpMux)
.is_some());
assert!(!msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Rtpmap)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Sctpmap)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Setup)
.is_some());
assert!(msection
.get_attribute(webrtc_sdp::attribute_type::SdpAttributeType::Ssrc)
.is_some());
check_parse_and_serialize(sdp_str);
}
#[test]
fn parse_chrome_audio_video_offer() {
let sdp = "v=0\r\n
o=- 3836772544440436510 2 IN IP4 127.0.0.1\r\n
s=-\r\n
t=0 0\r\n
a=group:BUNDLE audio video\r\n
a=msid-semantic: WMS HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP\r\n
m=audio 9 UDP/TLS/RTP/SAVPF 111 103 104 9 0 8 106 105 13 126\r\n
c=IN IP4 0.0.0.0\r\n
a=rtcp:9 IN IP4 0.0.0.0\r\n
a=ice-ufrag:A4by\r\n
a=ice-pwd:Gfvb2rbYMiW0dZz8ZkEsXICs\r\n
a=fingerprint:sha-256 15:B0:92:1F:C7:40:EE:22:A6:AF:26:EF:EA:FF:37:1D:B3:EF:11:0B:8B:73:4F:01:7D:C9:AE:26:4F:87:E0:95\r\n
a=setup:actpass\r\n
a=mid:audio\r\n
a=extmap:1 urn:ietf:params:rtp-hdrext:ssrc-audio-level\r\n
a=sendrecv\r\n
a=rtcp-mux\r\n
a=rtpmap:111 opus/48000/2\r\n
a=rtcp-fb:111 transport-cc\r\n
a=fmtp:111 minptime=10;useinbandfec=1\r\n
a=rtpmap:103 ISAC/16000\r\n
a=rtpmap:104 ISAC/32000\r\n
a=rtpmap:9 G722/8000\r\n
a=rtpmap:0 PCMU/8000\r\n
a=rtpmap:8 PCMA/8000\r\n
a=rtpmap:106 CN/32000\r\n
a=rtpmap:105 CN/16000\r\n
a=rtpmap:13 CN/8000\r\n
a=rtpmap:126 telephone-event/8000\r\n
a=ssrc:162559313 cname:qPTZ+BI+42mgbOi+\r\n
a=ssrc:162559313 msid:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP f6188af5-d8d6-462c-9c75-f12bc41fe322\r\n
a=ssrc:162559313 mslabel:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP\r\n
a=ssrc:162559313 label:f6188af5-d8d6-462c-9c75-f12bc41fe322\r\n
m=video 9 UDP/TLS/RTP/SAVPF 100 101 107 116 117 96 97 99 98\r\n
c=IN IP4 0.0.0.0\r\n
a=rtcp:9 IN IP4 0.0.0.0\r\n
a=ice-ufrag:A4by\r\n
a=ice-pwd:Gfvb2rbYMiW0dZz8ZkEsXICs\r\n
a=fingerprint:sha-256 15:B0:92:1F:C7:40:EE:22:A6:AF:26:EF:EA:FF:37:1D:B3:EF:11:0B:8B:73:4F:01:7D:C9:AE:26:4F:87:E0:95\r\n
a=setup:actpass\r\n
a=mid:video\r\n
a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n
a=extmap:3 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\n
a=extmap:4 urn:3gpp:video-orientation\r\n
a=extmap:5 http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01\r\n
a=extmap:6 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay\r\n
a=sendrecv\r\n
a=rtcp-mux\r\n
a=rtcp-rsize\r\n
a=rtpmap:100 VP8/90000\r\n
a=rtcp-fb:100 ccm fir\r\n
a=rtcp-fb:100 nack\r\n
a=rtcp-fb:100 nack pli\r\n
a=rtcp-fb:100 goog-remb\r\n
a=rtcp-fb:100 transport-cc\r\n
a=rtpmap:101 VP9/90000\r\n
a=rtcp-fb:101 ccm fir\r\n
a=rtcp-fb:101 nack\r\n
a=rtcp-fb:101 nack pli\r\n
a=rtcp-fb:101 goog-remb\r\n
a=rtcp-fb:101 transport-cc\r\n
a=rtpmap:107 H264/90000\r\n
a=rtcp-fb:107 ccm fir\r\n
a=rtcp-fb:107 nack\r\n
a=rtcp-fb:107 nack pli\r\n
a=rtcp-fb:107 goog-remb\r\n
a=rtcp-fb:107 transport-cc\r\n
a=fmtp:107 level-asymmetry-allowed=1;packetization-mode=1;profile-level-id=42e01f\r\n
a=rtpmap:116 red/90000\r\n
a=rtpmap:117 ulpfec/90000\r\n
a=rtpmap:96 rtx/90000\r\n
a=fmtp:96 apt=100\r\n
a=rtpmap:97 rtx/90000\r\n
a=fmtp:97 apt=101\r\n
a=rtpmap:99 rtx/90000\r\n
a=fmtp:99 apt=107\r\n
a=rtpmap:98 rtx/90000\r\n
a=fmtp:98 apt=116\r\n
a=ssrc-group:FID 3156517279 2673335628\r\n
a=ssrc:3156517279 cname:qPTZ+BI+42mgbOi+\r\n
a=ssrc:3156517279 msid:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP b6ec5178-c611-403f-bbec-3833ed547c09\r\n
a=ssrc:3156517279 mslabel:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP\r\n
a=ssrc:3156517279 label:b6ec5178-c611-403f-bbec-3833ed547c09\r\n
a=ssrc:2673335628 cname:qPTZ+BI+42mgbOi+\r\n
a=ssrc:2673335628 msid:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP b6ec5178-c611-403f-bbec-3833ed547c09\r\n
a=ssrc:2673335628 mslabel:HWpbmTmXleVSnlssQd80bPuw9cxQFroDkkBP\r\n
a=ssrc:2673335628 label:b6ec5178-c611-403f-bbec-3833ed547c09\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 2);
let msection1 = &(sdp.media[0]);
assert_eq!(
*msection1.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Audio
);
assert_eq!(msection1.get_port(), 9);
assert_eq!(
*msection1.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(!msection1.get_attributes().is_empty());
assert!(msection1.get_connection().is_some());
assert!(msection1.get_bandwidth().is_empty());
let msection2 = &(sdp.media[1]);
assert_eq!(
*msection2.get_type(),
webrtc_sdp::media_type::SdpMediaValue::Video
);
assert_eq!(msection2.get_port(), 9);
assert_eq!(
*msection2.get_proto(),
webrtc_sdp::media_type::SdpProtocolValue::UdpTlsRtpSavpf
);
assert!(!msection2.get_attributes().is_empty());
assert!(msection2.get_connection().is_some());
assert!(msection2.get_bandwidth().is_empty());
}
#[test]
fn parse_firefox_simulcast_offer() {
let sdp = "v=0\r\n
o=mozilla...THIS_IS_SDPARTA-55.0a1 983028567300715536 0 IN IP4 0.0.0.0\r\n
s=-\r\n
t=0 0\r\n
a=fingerprint:sha-256 68:42:13:88:B6:C1:7D:18:79:07:8A:C6:DC:28:D6:DC:DD:E3:C9:41:E7:80:A7:FE:02:65:FB:76:A0:CD:58:ED\r\n
a=ice-options:trickle\r\n
a=msid-semantic:WMS *\r\n
m=video 9 UDP/TLS/RTP/SAVPF 120 121 126 97\r\n
c=IN IP4 0.0.0.0\r\n
a=sendrecv\r\n
a=extmap:1 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\n
a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n
a=extmap:3/sendonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id\r\n
a=fmtp:126 profile-level-id=42e01f;level-asymmetry-allowed=1;packetization-mode=1\r\n
a=fmtp:97 profile-level-id=42e01f;level-asymmetry-allowed=1\r\n
a=fmtp:120 max-fs=12288;max-fr=60\r\n
a=fmtp:121 max-fs=12288;max-fr=60\r\n
a=ice-pwd:4af388405d558b91f5ba6c2c48f161bf\r\n
a=ice-ufrag:ce1ac488\r\n
a=mid:sdparta_0\r\n
a=msid:{fb6d1fa3-d993-f244-a0fe-d9fb99214c23} {8be9a0f7-9272-6c42-90f3-985d55bd8de5}\r\n
a=rid:foo send\r\n
a=rid:bar send\r\n
a=rtcp-fb:120 nack\r\n
a=rtcp-fb:120 nack pli\r\n
a=rtcp-fb:120 ccm fir\r\n
a=rtcp-fb:120 goog-remb\r\n
a=rtcp-fb:121 nack\r\n
a=rtcp-fb:121 nack pli\r\n
a=rtcp-fb:121 ccm fir\r\n
a=rtcp-fb:121 goog-remb\r\n
a=rtcp-fb:126 nack\r\n
a=rtcp-fb:126 nack pli\r\n
a=rtcp-fb:126 ccm fir\r\n
a=rtcp-fb:126 goog-remb\r\n
a=rtcp-fb:97 nack\r\n
a=rtcp-fb:97 nack pli\r\n
a=rtcp-fb:97 ccm fir\r\n
a=rtcp-fb:97 goog-remb\r\n
a=rtcp-mux\r\n
a=rtpmap:120 VP8/90000\r\n
a=rtpmap:121 VP9/90000\r\n
a=rtpmap:126 H264/90000\r\n
a=rtpmap:97 H264/90000\r\n
a=setup:actpass\r\n
a=simulcast: send rid=foo;bar\r\n
a=ssrc:2988475468 cname:{77067f00-2e8d-8b4c-8992-cfe338f56851}\r\n
a=ssrc:1649784806 cname:{77067f00-2e8d-8b4c-8992-cfe338f56851}\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 1);
}
#[test]
fn parse_firefox_simulcast_answer() {
let sdp_str = "v=0\r\n\
o=mozilla...THIS_IS_SDPARTA-55.0a1 7548296603161351381 0 IN IP4 0.0.0.0\r\n\
s=-\r\n\
t=0 0\r\n\
a=fingerprint:sha-256 B1:47:49:4F:7D:83:03:BE:E9:FC:73:A3:FB:33:38:40:0B:3B:6A:56:78:EB:EE:D5:6D:2D:D5:3A:B6:13:97:E7\r\n\
a=ice-options:trickle\r\n\
a=msid-semantic:WMS *\r\n\
m=video 9 UDP/TLS/RTP/SAVPF 120\r\n\
c=IN IP4 0.0.0.0\r\n
a=recvonly\r\n\
a=extmap:1 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\n\
a=extmap:2 urn:ietf:params:rtp-hdrext:toffset\r\n\
a=fmtp:120 max-fs=12288;max-fr=60\r\n\
a=ice-pwd:c886e2caf2ae397446312930cd1afe51\r\n\
a=ice-ufrag:f57396c0\r\n\
a=mid:sdparta_0\r\n\
a=rtcp-fb:120 nack\r\n\
a=rtcp-fb:120 nack pli\r\n\
a=rtcp-fb:120 ccm fir\r\n\
a=rtcp-fb:120 goog-remb\r\n\
a=rtcp-mux\r\n\
a=rtpmap:120 VP8/90000\r\n\
a=setup:active\r\n\
a=ssrc:2564157021 cname:{cae1cd32-7433-5b48-8dc8-8e3f8b2f96cd}\r\n\
a=simulcast: recv rid=foo;bar\r\n\
a=rid:foo recv\r\n\
a=rid:bar recv\r\n\
a=extmap:3/recvonly urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id\r\n";
let sdp_res = webrtc_sdp::parse_sdp(sdp_str, true);
assert!(sdp_res.is_ok());
let sdp_opt = sdp_res.ok();
assert!(sdp_opt.is_some());
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.version, 0);
assert_eq!(sdp.media.len(), 1);
}
#[test]
fn parse_and_serialize_sdp_with_unusual_attributes() {
let sdp_str = "v=0\r\n\
o=- 0 0 IN IP6 2001:db8::4444\r\n\
s=-\r\n\
t=0 0\r\n\
a=ice-pacing:500\r\n\
m=video 0 UDP/TLS/RTP/SAVPF 0\r\n\
b=UNSUPPORTED:12345\r\n\
c=IN IP6 ::1\r\n\
a=rtcp:9 IN IP6 2001:db8::8888\r\n\
a=rtcp-fb:* nack\r\n\
a=extmap:1/recvonly urn:ietf:params:rtp-hdrext:toffset\r\n\
a=extmap:2/sendonly urn:ietf:params:rtp-hdrext:toffset\r\n\
a=extmap:3/sendrecv urn:ietf:params:rtp-hdrext:toffset\r\n\
a=imageattr:* send [x=330,y=250,sar=[1.1,1.3,1.9],q=0.1] recv [x=800,y=[50,80,30],sar=1.1]\r\n\
a=imageattr:97 send [x=[480:16:800],y=[100,200,300],par=[1.2-1.3],q=0.6] [x=1080,y=[144:176],sar=[0.5-0.7]] recv *\r\n\
a=sendrecv\r\n";
check_parse_and_serialize(sdp_str);
}<|fim▁end|>
|
let sdp = sdp_opt.unwrap();
assert_eq!(sdp.get_version(), 0);
assert_eq!(sdp.get_session(), &None);
|
<|file_name|>consts.ts<|end_file_name|><|fim▁begin|>import * as countries from './countries.json';
import * as routes from './routes.json';<|fim▁hole|> routes: routes.routes
};<|fim▁end|>
|
export const Consts = {
countries: countries,
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>from conf import paths
import scipy.io
import numpy as np
def load_train():
""" Loads all training data. """
tr_set = scipy.io.loadmat(file_name = paths.TR_SET)
tr_identity = tr_set['tr_identity']
tr_labels = tr_set['tr_labels']
tr_images = tr_set['tr_images']
return tr_identity, tr_labels, tr_images
def load_unlabeled():
""" Loads all unlabeled data."""
unlabeled_set = scipy.io.loadmat(file_name = paths.UNLABELED_SET)
unlabeled_images = unlabeled_set['unlabeled_images']
return unlabeled_images
def load_test():
""" Loads training data. """
test_set = scipy.io.loadmat(file_name = paths.TEST_SET)
test_images = test_set['public_test_images']
# hidden_set = scipy.io.loadmat(file_name = paths.HIDDEN_SET)
# hidden_images = hidden_set['hidden_test_images']<|fim▁hole|><|fim▁end|>
|
return test_images
|
<|file_name|>testing_support.py<|end_file_name|><|fim▁begin|>"""
Functions that aid testing in various ways. A typical use would be::
lowcore = create_named_configuration('LOWBD2-CORE')
times = numpy.linspace(-3, +3, 13) * (numpy.pi / 12.0)
frequency = numpy.array([1e8])
channel_bandwidth = numpy.array([1e7])
# Define the component and give it some polarisation and spectral behaviour
f = numpy.array([100.0])
flux = numpy.array([f])
phasecentre = SkyCoord(ra=+15.0 * u.deg, dec=-35.0 * u.deg, frame='icrs', equinox='J2000')
compabsdirection = SkyCoord(ra=17.0 * u.deg, dec=-36.5 * u.deg, frame='icrs', equinox='J2000')
comp = create_skycomponent(flux=flux, frequency=frequency, direction=compabsdirection,
polarisation_frame=PolarisationFrame('stokesI'))
image = create_test_image(frequency=frequency, phasecentre=phasecentre,
cellsize=0.001,
polarisation_frame=PolarisationFrame('stokesI')
vis = create_visibility(lowcore, times=times, frequency=frequency,
channel_bandwidth=channel_bandwidth,
phasecentre=phasecentre, weight=1,
polarisation_frame=PolarisationFrame('stokesI'),
integration_time=1.0)
"""
import csv
import logging
from typing import List
import astropy.units as u
import numpy
from astropy.coordinates import SkyCoord
from astropy.io import fits
from astropy.wcs import WCS
from astropy.wcs.utils import pixel_to_skycoord
from scipy import interpolate
from data_models.memory_data_models import Configuration, Image, GainTable, Skycomponent, SkyModel, PointingTable
from data_models.parameters import arl_path
from data_models.polarisation import PolarisationFrame
from processing_components.calibration.calibration_control import create_calibration_controls
from processing_components.calibration.operations import create_gaintable_from_blockvisibility, apply_gaintable
from processing_components.image.operations import import_image_from_fits
from processing_components.imaging.base import predict_2d, predict_skycomponent_visibility, \
create_image_from_visibility, advise_wide_field
from processing_components.imaging.primary_beams import create_pb
from processing_components.skycomponent.operations import create_skycomponent, insert_skycomponent, \
apply_beam_to_skycomponent, filter_skycomponents_by_flux
from processing_components.visibility.base import create_blockvisibility, create_visibility
from processing_components.visibility.coalesce import convert_blockvisibility_to_visibility, \
convert_visibility_to_blockvisibility
from processing_library.image.operations import create_image_from_array
log = logging.getLogger(__name__)
def create_test_image(canonical=True, cellsize=None, frequency=None, channel_bandwidth=None,
phasecentre=None, polarisation_frame=PolarisationFrame("stokesI")) -> Image:
"""Create a useful test image
This is the test image M31 widely used in ALMA and other simulations. It is actually part of an Halpha region in
M31.
:param canonical: Make the image into a 4 dimensional image
:param cellsize:
:param frequency: Frequency (array) in Hz
:param channel_bandwidth: Channel bandwidth (array) in Hz
:param phasecentre: Phase centre of image (SkyCoord)
:param polarisation_frame: Polarisation frame
:return: Image
"""
if frequency is None:
frequency = [1e8]
im = import_image_from_fits(arl_path("data/models/M31.MOD"))
if canonical:
if polarisation_frame is None:
im.polarisation_frame = PolarisationFrame("stokesI")
elif isinstance(polarisation_frame, PolarisationFrame):
im.polarisation_frame = polarisation_frame
else:
raise ValueError("polarisation_frame is not valid")
im = replicate_image(im, frequency=frequency, polarisation_frame=im.polarisation_frame)
if cellsize is not None:
im.wcs.wcs.cdelt[0] = -180.0 * cellsize / numpy.pi
im.wcs.wcs.cdelt[1] = +180.0 * cellsize / numpy.pi
if frequency is not None:
im.wcs.wcs.crval[3] = frequency[0]
if channel_bandwidth is not None:
im.wcs.wcs.cdelt[3] = channel_bandwidth[0]
else:
if len(frequency) > 1:
im.wcs.wcs.cdelt[3] = frequency[1] - frequency[0]
else:
im.wcs.wcs.cdelt[3] = 0.001 * frequency[0]
im.wcs.wcs.radesys = 'ICRS'
im.wcs.wcs.equinox = 2000.00
if phasecentre is not None:
im.wcs.wcs.crval[0] = phasecentre.ra.deg
im.wcs.wcs.crval[1] = phasecentre.dec.deg
# WCS is 1 relative
im.wcs.wcs.crpix[0] = im.data.shape[3] // 2 + 1
im.wcs.wcs.crpix[1] = im.data.shape[2] // 2 + 1
return im
def create_test_image_from_s3(npixel=16384, polarisation_frame=PolarisationFrame("stokesI"), cellsize=0.000015,
frequency=numpy.array([1e8]), channel_bandwidth=numpy.array([1e6]),
phasecentre=None, fov=20, flux_limit=1e-3) -> Image:
"""Create MID test image from S3
The input catalog was generated at http://s-cubed.physics.ox.ac.uk/s3_sex using the following query::
Database: s3_sex
SQL: select * from Galaxies where (pow(10,itot_151)*1000 > 1.0) and (right_ascension between -5 and 5) and (declination between -5 and 5);;
Number of rows returned: 29966
For frequencies < 610MHz, there are three tables to use::
data/models/S3_151MHz_10deg.csv, use fov=10
data/models/S3_151MHz_20deg.csv, use fov=20
data/models/S3_151MHz_40deg.csv, use fov=40
For frequencies > 610MHz, there are three tables:
data/models/S3_1400MHz_1mJy_10deg.csv, use flux_limit>= 1e-3
data/models/S3_1400MHz_100uJy_10deg.csv, use flux_limit < 1e-3
data/models/S3_1400MHz_1mJy_18deg.csv, use flux_limit>= 1e-3
data/models/S3_1400MHz_100uJy_18deg.csv, use flux_limit < 1e-3
The component spectral index is calculated from the 610MHz and 151MHz or 1400MHz and 610MHz, and then calculated
for the specified frequencies.
If polarisation_frame is not stokesI then the image will a polarised axis but the values will be zero.
:param npixel: Number of pixels
:param polarisation_frame: Polarisation frame (default PolarisationFrame("stokesI"))
:param cellsize: cellsize in radians
:param frequency:
:param channel_bandwidth: Channel width (Hz)
:param phasecentre: phasecentre (SkyCoord)
:param fov: fov 10 | 20 | 40
:param flux_limit: Minimum flux (Jy)
:return: Image
"""
ras = []
decs = []
fluxes = []
if phasecentre is None:
phasecentre = SkyCoord(ra=+180.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000')
if polarisation_frame is None:
polarisation_frame = PolarisationFrame("stokesI")
npol = polarisation_frame.npol
nchan = len(frequency)
shape = [nchan, npol, npixel, npixel]
w = WCS(naxis=4)
# The negation in the longitude is needed by definition of RA, DEC
w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth[0]]
w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]
w.wcs.ctype = ["RA---SIN", "DEC--SIN", 'STOKES', 'FREQ']
w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, frequency[0]]
w.naxis = 4
w.wcs.radesys = 'ICRS'
w.wcs.equinox = 2000.0
model = create_image_from_array(numpy.zeros(shape), w, polarisation_frame=polarisation_frame)
if numpy.max(frequency) > 6.1E8:
if fov > 10:
fovstr = '18'
else:
fovstr = '10'
if flux_limit >= 1e-3:
csvfilename = arl_path('data/models/S3_1400MHz_1mJy_%sdeg.csv' % fovstr)
else:
csvfilename = arl_path('data/models/S3_1400MHz_100uJy_%sdeg.csv' % fovstr)
log.info('create_test_image_from_s3: Reading S3 sources from %s ' % csvfilename)
else:
assert fov in [10, 20, 40], "Field of view invalid: use one of %s" % ([10, 20, 40])
csvfilename = arl_path('data/models/S3_151MHz_%ddeg.csv' % (fov))
log.info('create_test_image_from_s3: Reading S3 sources from %s ' % csvfilename)
with open(csvfilename) as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
r = 0
for row in readCSV:
# Skip first row
if r > 0:
ra = float(row[4]) + phasecentre.ra.deg
dec = float(row[5]) + phasecentre.dec.deg
if numpy.max(frequency) > 6.1E8:
alpha = (float(row[11]) - float(row[10])) / numpy.log10(1400.0 / 610.0)
flux = numpy.power(10, float(row[10])) * numpy.power(frequency / 1.4e9, alpha)
else:
alpha = (float(row[10]) - float(row[9])) / numpy.log10(610.0 / 151.0)
flux = numpy.power(10, float(row[9])) * numpy.power(frequency / 1.51e8, alpha)
if numpy.max(flux) > flux_limit:
ras.append(ra)
decs.append(dec)
fluxes.append(flux)
r += 1
csvfile.close()
assert len(fluxes) > 0, "No sources found above flux limit %s" % flux_limit
log.info('create_test_image_from_s3: %d sources read' % (len(fluxes)))
p = w.sub(2).wcs_world2pix(numpy.array(ras), numpy.array(decs), 1)
total_flux = numpy.sum(fluxes)
fluxes = numpy.array(fluxes)
ip = numpy.round(p).astype('int')
ok = numpy.where((0 <= ip[0, :]) & (npixel > ip[0, :]) & (0 <= ip[1, :]) & (npixel > ip[1, :]))[0]
ps = ip[:, ok]
fluxes = fluxes[ok]
actual_flux = numpy.sum(fluxes)
log.info('create_test_image_from_s3: %d sources inside the image' % (ps.shape[1]))
log.info('create_test_image_from_s3: average channel flux in S3 model = %.3f, actual average channel flux in '
'image = %.3f' % (total_flux / float(nchan), actual_flux / float(nchan)))
for chan in range(nchan):
for iflux, flux in enumerate(fluxes):
model.data[chan, 0, ps[1, iflux], ps[0, iflux]] = flux[chan]
return model
def create_test_skycomponents_from_s3(polarisation_frame=PolarisationFrame("stokesI"),
frequency=numpy.array([1e8]), channel_bandwidth=numpy.array([1e6]),
phasecentre=None, fov=20, flux_limit=1e-3,
radius=None):
"""Create test image from S3
The input catalog was generated at http://s-cubed.physics.ox.ac.uk/s3_sex using the following query::
Database: s3_sex
SQL: select * from Galaxies where (pow(10,itot_151)*1000 > 1.0) and (right_ascension between -5 and 5) and (declination between -5 and 5);;
Number of rows returned: 29966
For frequencies < 610MHz, there are three tables to use::
data/models/S3_151MHz_10deg.csv, use fov=10
data/models/S3_151MHz_20deg.csv, use fov=20
data/models/S3_151MHz_40deg.csv, use fov=40
For frequencies > 610MHz, there are three tables:
data/models/S3_1400MHz_1mJy_10deg.csv, use flux_limit>= 1e-3
data/models/S3_1400MHz_100uJy_10deg.csv, use flux_limit < 1e-3
data/models/S3_1400MHz_1mJy_18deg.csv, use flux_limit>= 1e-3
data/models/S3_1400MHz_100uJy_18deg.csv, use flux_limit < 1e-3
The component spectral index is calculated from the 610MHz and 151MHz or 1400MHz and 610MHz, and then calculated
for the specified frequencies.
If polarisation_frame is not stokesI then the image will a polarised axis but the values will be zero.
:param npixel: Number of pixels
:param polarisation_frame: Polarisation frame (default PolarisationFrame("stokesI"))
:param cellsize: cellsize in radians
:param frequency:
:param channel_bandwidth: Channel width (Hz)
:param phasecentre: phasecentre (SkyCoord)
:param fov: fov 10 | 20 | 40
:param flux_limit: Minimum flux (Jy)
:return: Image
"""
ras = []
decs = []
fluxes = []
names = []
if phasecentre is None:
phasecentre = SkyCoord(ra=+180.0 * u.deg, dec=-60.0 * u.deg, frame='icrs', equinox='J2000')
if polarisation_frame is None:
polarisation_frame = PolarisationFrame("stokesI")
if numpy.max(frequency) > 6.1E8:
if fov > 10:
fovstr = '18'
else:
fovstr = '10'
if flux_limit >= 1e-3:
csvfilename = arl_path('data/models/S3_1400MHz_1mJy_%sdeg.csv' % fovstr)
else:
csvfilename = arl_path('data/models/S3_1400MHz_100uJy_%sdeg.csv' % fovstr)
log.info('create_test_skycomponents_from_s3: Reading S3-SEX sources from %s ' % csvfilename)
else:
assert fov in [10, 20, 40], "Field of view invalid: use one of %s" % ([10, 20, 40])
csvfilename = arl_path('data/models/S3_151MHz_%ddeg.csv' % (fov))
log.info('create_test_skycomponents_from_s3: Reading S3-SEX sources from %s ' % csvfilename)
skycomps = list()
with open(csvfilename) as csvfile:
readCSV = csv.reader(csvfile, delimiter=',')
r = 0
for row in readCSV:
# Skip first row
if r > 0:
ra = float(row[4])/numpy.cos(phasecentre.dec.rad) + phasecentre.ra.deg
dec = float(row[5]) + phasecentre.dec.deg
if numpy.max(frequency) > 6.1E8:
alpha = (float(row[11]) - float(row[10])) / numpy.log10(1400.0 / 610.0)
flux = numpy.power(10, float(row[10])) * numpy.power(frequency / 1.4e9, alpha)
else:
alpha = (float(row[10]) - float(row[9])) / numpy.log10(610.0 / 151.0)
flux = numpy.power(10, float(row[9])) * numpy.power(frequency / 1.51e8, alpha)
if numpy.max(flux) > flux_limit:
ras.append(ra)
decs.append(dec)
fluxes.append([[f] for f in flux])
names.append("S3_%s" % row[0])
r += 1
csvfile.close()
assert len(fluxes) > 0, "No sources found above flux limit %s" % flux_limit
directions = SkyCoord(ra=ras * u.deg, dec=decs * u.deg)
if phasecentre is not None:
separations = directions.separation(phasecentre).to('rad').value
else:
separations = numpy.zeros(len(names))
for isource, name in enumerate(names):
direction = directions[isource]
if separations[isource] < radius:
if not numpy.isnan(flux).any():
skycomps.append(Skycomponent(direction=direction, flux=fluxes[isource], frequency=frequency,
name=names[isource], shape='Point',
polarisation_frame=polarisation_frame))
log.info('create_test_skycomponents_from_s3: %d sources found above fluxlimit inside search radius' %
len(skycomps))
return skycomps
def create_low_test_image_from_gleam(npixel=512, polarisation_frame=PolarisationFrame("stokesI"), cellsize=0.000015,
frequency=numpy.array([1e8]), channel_bandwidth=numpy.array([1e6]),
phasecentre=None, kind='cubic', applybeam=False, flux_limit=0.1,
flux_max=numpy.inf, flux_min=-numpy.inf,
radius=None, insert_method='Nearest') -> Image:
"""Create LOW test image from the GLEAM survey
Stokes I is estimated from a cubic spline fit to the measured fluxes. The polarised flux is always zero.
See http://www.mwatelescope.org/science/gleam-survey The catalog is available from Vizier.
VIII/100 GaLactic and Extragalactic All-sky MWA survey (Hurley-Walker+, 2016)
GaLactic and Extragalactic All-sky Murchison Wide Field Array (GLEAM) survey. I: A low-frequency extragalactic
catalogue. Hurley-Walker N., et al., Mon. Not. R. Astron. Soc., 464, 1146-1167 (2017), 2017MNRAS.464.1146H
:param npixel: Number of pixels
:param polarisation_frame: Polarisation frame (default PolarisationFrame("stokesI"))
:param cellsize: cellsize in radians
:param frequency:
:param channel_bandwidth: Channel width (Hz)
:param phasecentre: phasecentre (SkyCoord)
:param kind: Kind of interpolation (see scipy.interpolate.interp1d) Default: linear
:return: Image
"""
if phasecentre is None:
phasecentre = SkyCoord(ra=+15.0 * u.deg, dec=-35.0 * u.deg, frame='icrs', equinox='J2000')
if radius is None:
radius = npixel * cellsize / numpy.sqrt(2.0)
sc = create_low_test_skycomponents_from_gleam(flux_limit=flux_limit, polarisation_frame=polarisation_frame,
frequency=frequency, phasecentre=phasecentre,
kind=kind, radius=radius)
sc = filter_skycomponents_by_flux(sc, flux_min=flux_min, flux_max=flux_max)
if polarisation_frame is None:
polarisation_frame = PolarisationFrame("stokesI")
npol = polarisation_frame.npol
nchan = len(frequency)
shape = [nchan, npol, npixel, npixel]
w = WCS(naxis=4)
# The negation in the longitude is needed by definition of RA, DEC
w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth[0]]
w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]
w.wcs.ctype = ["RA---SIN", "DEC--SIN", 'STOKES', 'FREQ']
w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, frequency[0]]
w.naxis = 4
w.wcs.radesys = 'ICRS'
w.wcs.equinox = 2000.0
model = create_image_from_array(numpy.zeros(shape), w, polarisation_frame=polarisation_frame)
model = insert_skycomponent(model, sc, insert_method=insert_method)
if applybeam:
beam = create_pb(model, telescope='LOW', use_local=False)
model.data[...] *= beam.data[...]
return model
def create_low_test_skymodel_from_gleam(npixel=512, polarisation_frame=PolarisationFrame("stokesI"), cellsize=0.000015,
frequency=numpy.array([1e8]), channel_bandwidth=numpy.array([1e6]),
phasecentre=None, kind='cubic', applybeam=True, flux_limit=0.1,
flux_max=numpy.inf, flux_threshold=1.0, insert_method='Nearest',
telescope='LOW') -> SkyModel:
"""Create LOW test skymodel from the GLEAM survey
Stokes I is estimated from a cubic spline fit to the measured fluxes. The polarised flux is always zero.
See http://www.mwatelescope.org/science/gleam-survey The catalog is available from Vizier.
VIII/100 GaLactic and Extragalactic All-sky MWA survey (Hurley-Walker+, 2016)
GaLactic and Extragalactic All-sky Murchison Wide Field Array (GLEAM) survey. I: A low-frequency extragalactic
catalogue. Hurley-Walker N., et al., Mon. Not. R. Astron. Soc., 464, 1146-1167 (2017), 2017MNRAS.464.1146H
:param telescope:
:param npixel: Number of pixels
:param polarisation_frame: Polarisation frame (default PolarisationFrame("stokesI"))
:param cellsize: cellsize in radians
:param frequency:
:param channel_bandwidth: Channel width (Hz)
:param phasecentre: phasecentre (SkyCoord)
:param kind: Kind of interpolation (see scipy.interpolate.interp1d) Default: cubic
:param applybeam: Apply the primary beam?
:param flux_limit: Weakest component
:param flux_max: Maximum strength component to be included in components
:param flux_threshold: Split between components (brighter) and image (weaker)
:param insert_method: Nearest | PSWF | Lanczos
:return:
:return: SkyModel
"""
if phasecentre is None:
phasecentre = SkyCoord(ra=+15.0 * u.deg, dec=-35.0 * u.deg, frame='icrs', equinox='J2000')
radius = npixel * cellsize
sc = create_low_test_skycomponents_from_gleam(flux_limit=flux_limit, polarisation_frame=polarisation_frame,
frequency=frequency, phasecentre=phasecentre,
kind=kind, radius=radius)
sc = filter_skycomponents_by_flux(sc, flux_max=flux_max)
if polarisation_frame is None:
polarisation_frame = PolarisationFrame("stokesI")
npol = polarisation_frame.npol
nchan = len(frequency)
shape = [nchan, npol, npixel, npixel]
w = WCS(naxis=4)
# The negation in the longitude is needed by definition of RA, DEC
w.wcs.cdelt = [-cellsize * 180.0 / numpy.pi, cellsize * 180.0 / numpy.pi, 1.0, channel_bandwidth[0]]
w.wcs.crpix = [npixel // 2 + 1, npixel // 2 + 1, 1.0, 1.0]
w.wcs.ctype = ["RA---SIN", "DEC--SIN", 'STOKES', 'FREQ']
w.wcs.crval = [phasecentre.ra.deg, phasecentre.dec.deg, 1.0, frequency[0]]
w.naxis = 4
w.wcs.radesys = 'ICRS'
w.wcs.equinox = 2000.0
model = create_image_from_array(numpy.zeros(shape), w, polarisation_frame=polarisation_frame)
if applybeam:
beam = create_pb(model, telescope=telescope, use_local=False)
sc = apply_beam_to_skycomponent(sc, beam)
weaksc = filter_skycomponents_by_flux(sc, flux_max=flux_threshold)
brightsc = filter_skycomponents_by_flux(sc, flux_min=flux_threshold)
model = insert_skycomponent(model, weaksc, insert_method=insert_method)
log.info(
'create_low_test_skymodel_from_gleam: %d bright sources above flux threshold %.3f, %d weak sources below ' %
(len(brightsc), flux_threshold, len(weaksc)))
return SkyModel(components=brightsc, image=model, mask=None, gaintable=None)
def create_low_test_skycomponents_from_gleam(flux_limit=0.1, polarisation_frame=PolarisationFrame("stokesI"),
frequency=numpy.array([1e8]), kind='cubic', phasecentre=None,
radius=1.0) \
-> List[Skycomponent]:
"""Create sky components from the GLEAM survey
Stokes I is estimated from a cubic spline fit to the measured fluxes. The polarised flux is always zero.
See http://www.mwatelescope.org/science/gleam-survey The catalog is available from Vizier.
VIII/100 GaLactic and Extragalactic All-sky MWA survey (Hurley-Walker+, 2016)
GaLactic and Extragalactic All-sky Murchison Wide Field Array (GLEAM) survey. I: A low-frequency extragalactic
catalogue. Hurley-Walker N., et al., Mon. Not. R. Astron. Soc., 464, 1146-1167 (2017), 2017MNRAS.464.1146H
:param flux_limit: Only write components brighter than this (Jy)
:param polarisation_frame: Polarisation frame (default PolarisationFrame("stokesI"))
:param frequency: Frequencies at which the flux will be estimated
:param kind: Kind of interpolation (see scipy.interpolate.interp1d) Default: linear
:param phasecentre: Desired phase centre (SkyCoord) default None implies all sources
:param radius: Radius of sources selected around phasecentre (default 1.0 rad)
:return: List of Skycomponents
"""
fitsfile = arl_path("data/models/GLEAM_EGC.fits")
rad2deg = 180.0 / numpy.pi
decmin = phasecentre.dec.to('deg').value - rad2deg * radius / 2.0
decmax = phasecentre.dec.to('deg').value + rad2deg * radius / 2.0
hdulist = fits.open(fitsfile, lazy_load_hdus=False)
recs = hdulist[1].data[0].array
fluxes = recs['peak_flux_wide']
mask = fluxes > flux_limit
filtered_recs = recs[mask]
decs = filtered_recs['DEJ2000']
mask = decs > decmin
filtered_recs = filtered_recs[mask]
decs = filtered_recs['DEJ2000']
mask = decs < decmax
filtered_recs = filtered_recs[mask]
ras = filtered_recs['RAJ2000']
decs = filtered_recs['DEJ2000']
names = filtered_recs['Name']
if polarisation_frame is None:
polarisation_frame = PolarisationFrame("stokesI")
npol = polarisation_frame.npol
nchan = len(frequency)
# For every source, we read all measured fluxes and interpolate to the
# required frequencies
gleam_freqs = numpy.array([76, 84, 92, 99, 107, 115, 122, 130, 143, 151, 158, 166, 174, 181, 189, 197, 204,
212, 220, 227])
gleam_flux_freq = numpy.zeros([len(names), len(gleam_freqs)])
for i, f in enumerate(gleam_freqs):
gleam_flux_freq[:, i] = filtered_recs['int_flux_%03d' % (f)][:]
skycomps = []
directions = SkyCoord(ra=ras * u.deg, dec=decs * u.deg)
if phasecentre is not None:
separations = directions.separation(phasecentre).to('rad').value
else:
separations = numpy.zeros(len(names))
for isource, name in enumerate(names):
direction = directions[isource]
if separations[isource] < radius:
fint = interpolate.interp1d(gleam_freqs * 1.0e6, gleam_flux_freq[isource, :], kind=kind)
flux = numpy.zeros([nchan, npol])
flux[:, 0] = fint(frequency)
if not numpy.isnan(flux).any():
skycomps.append(Skycomponent(direction=direction, flux=flux, frequency=frequency,
name=name, shape='Point',
polarisation_frame=polarisation_frame))
log.info('create_low_test_skycomponents_from_gleam: %d sources above flux limit %.3f' % (len(skycomps), flux_limit))
hdulist.close()
return skycomps
def replicate_image(im: Image, polarisation_frame=PolarisationFrame('stokesI'), frequency=numpy.array([1e8])) \
-> Image:
""" Make a new canonical shape Image, extended along third and fourth axes by replication.
The order of the data is [chan, pol, dec, ra]
:param frequency:
:param im:
:param polarisation_frame: Polarisation_frame
:return: Image
"""
if len(im.data.shape) == 2:
fim = Image()
newwcs = WCS(naxis=4)
newwcs.wcs.crpix = [im.wcs.wcs.crpix[0] + 1.0, im.wcs.wcs.crpix[1] + 1.0, 1.0, 1.0]
newwcs.wcs.cdelt = [im.wcs.wcs.cdelt[0], im.wcs.wcs.cdelt[1], 1.0, 1.0]
newwcs.wcs.crval = [im.wcs.wcs.crval[0], im.wcs.wcs.crval[1], 1.0, frequency[0]]
newwcs.wcs.ctype = [im.wcs.wcs.ctype[0], im.wcs.wcs.ctype[1], 'STOKES', 'FREQ']
nchan = len(frequency)
npol = polarisation_frame.npol
fim.polarisation_frame = polarisation_frame
fim.wcs = newwcs
fshape = [nchan, npol, im.data.shape[1], im.data.shape[0]]
fim.data = numpy.zeros(fshape)
log.info("replicate_image: replicating shape %s to %s" % (im.data.shape, fim.data.shape))
for i3 in range(nchan):
fim.data[i3, 0, :, :] = im.data[:, :]
return fim
else:
return im
def create_blockvisibility_iterator(config: Configuration, times: numpy.array, frequency: numpy.array,
channel_bandwidth, phasecentre: SkyCoord, weight: float = 1,
polarisation_frame=PolarisationFrame('stokesI'), integration_time=1.0,
number_integrations=1, predict=predict_2d, model=None, components=None,
phase_error=0.0, amplitude_error=0.0, sleep=0.0, **kwargs):
""" Create a sequence of Visibilities and optionally predicting and coalescing
This is useful mainly for performing large simulations. Do something like::
vis_iter = create_blockvisibility_iterator(config, times, frequency, channel_bandwidth, phasecentre=phasecentre,
weight=1.0, integration_time=30.0, number_integrations=3)
for i, vis in enumerate(vis_iter):
if i == 0:
fullvis = vis
else:
fullvis = append_visibility(fullvis, vis)
:param config: Configuration of antennas
:param times: hour angles in radians
:param frequency: frequencies (Hz] Shape [nchan]
:param weight: weight of a single sample
:param phasecentre: phasecentre of observation
:param npol: Number of polarizations
:param integration_time: Integration time ('auto' or value in s)
:param number_integrations: Number of integrations to be created at each time.
:param model: Model image to be inserted
:param components: Components to be inserted
:param sleep_time: Time to sleep between yields
:return: Visibility
"""
for time in times:
actualtimes = time + numpy.arange(0, number_integrations) * integration_time * numpy.pi / 43200.0
bvis = create_blockvisibility(config, actualtimes, frequency=frequency, phasecentre=phasecentre, weight=weight,
polarisation_frame=polarisation_frame, integration_time=integration_time,
channel_bandwidth=channel_bandwidth)
if model is not None:
vis = convert_blockvisibility_to_visibility(bvis)
vis = predict(vis, model, **kwargs)
bvis = convert_visibility_to_blockvisibility(vis)
if components is not None:
vis = predict_skycomponent_visibility(bvis, components)
# Add phase errors
if phase_error > 0.0 or amplitude_error > 0.0:
gt = create_gaintable_from_blockvisibility(bvis)
gt = simulate_gaintable(gt=gt, phase_error=phase_error, amplitude_error=amplitude_error)
bvis = apply_gaintable(bvis, gt)
import time
time.sleep(sleep)
yield bvis
def simulate_gaintable(gt: GainTable, phase_error=0.1, amplitude_error=0.0, smooth_channels=1, leakage=0.0,
**kwargs) -> GainTable:
""" Simulate a gain table
:type gt: GainTable
:param phase_error: std of normal distribution, zero mean
:param amplitude_error: std of log normal distribution
:param leakage: std of cross hand leakage
:param smooth_channels: Use bspline over smooth_channels
:param kwargs:
:return: Gaintable
"""
def moving_average(a, n=3):
return numpy.convolve(a, numpy.ones((n,)) / n, mode='valid')
log.debug("simulate_gaintable: Simulating amplitude error = %.4f, phase error = %.4f"
% (amplitude_error, phase_error))
amps = 1.0
phases = 1.0
ntimes, nant, nchan, nrec, _ = gt.data['gain'].shape
if phase_error > 0.0:
phases = numpy.zeros(gt.data['gain'].shape)
for time in range(ntimes):
for ant in range(nant):
phase = numpy.random.normal(0, phase_error, nchan + int(smooth_channels) - 1)
if smooth_channels > 1:
phase = moving_average(phase, smooth_channels)
phases[time, ant, ...] = phase[..., numpy.newaxis, numpy.newaxis]
if amplitude_error > 0.0:
amps = numpy.ones(gt.data['gain'].shape, dtype='complex')
for time in range(ntimes):
for ant in range(nant):
amp = numpy.random.lognormal(mean=0.0, sigma=amplitude_error, size=nchan + int(smooth_channels) - 1)
if smooth_channels > 1:
amp = moving_average(amp, smooth_channels)
amp = amp / numpy.average(amp)
amps[time, ant, ...] = amp[..., numpy.newaxis, numpy.newaxis]
gt.data['gain'] = amps * numpy.exp(0 + 1j * phases)
nrec = gt.data['gain'].shape[-1]
if nrec > 1:
if leakage > 0.0:
leak = numpy.random.normal(0, leakage, gt.data['gain'][..., 0, 0].shape) + 1j * \
numpy.random.normal(0, leakage, gt.data['gain'][..., 0, 0].shape)
gt.data['gain'][..., 0, 1] = gt.data['gain'][..., 0, 0] * leak
leak = numpy.random.normal(0, leakage, gt.data['gain'][..., 1, 1].shape) + 1j * \
numpy.random.normal(0, leakage, gt.data['gain'][..., 1, 1].shape)
gt.data['gain'][..., 1, 0] = gt.data['gain'][..., 1, 1] * leak
else:
gt.data['gain'][..., 0, 1] = 0.0
gt.data['gain'][..., 1, 0] = 0.0
return gt
def simulate_pointingtable(pt: PointingTable, pointing_error, static_pointing_error=None, global_pointing_error=None,
seed=None, **kwargs) -> PointingTable:
""" Simulate a gain table
:type pt: PointingTable
:param pointing_error: std of normal distribution (radians)
:param static_pointing_error: std of normal distribution (radians)
:param global_pointing_error: 2-vector of global pointing error (rad)
:param kwargs:
:return: PointingTable
"""
if seed is not None:
numpy.random.seed(seed)
if static_pointing_error is None:
static_pointing_error = [0.0, 0.0]
r2s = 180.0 * 3600.0 / numpy.pi
pt.data['pointing'] = numpy.zeros(pt.data['pointing'].shape)
ntimes, nant, nchan, nrec, _ = pt.data['pointing'].shape
if pointing_error > 0.0:
log.debug("simulate_pointingtable: Simulating dynamic pointing error = %g (rad) %g (arcsec)"
% (pointing_error, r2s * pointing_error))
pt.data['pointing'] += numpy.random.normal(0.0, pointing_error, pt.data['pointing'].shape)
if (abs(static_pointing_error[0]) > 0.0) or (abs(static_pointing_error[1]) > 0.0):
numpy.random.seed(18051955)
log.debug("simulate_pointingtable: Simulating static pointing error = (%g, %g) (rad) (%g, %g)(arcsec)"
% (static_pointing_error[0], static_pointing_error[1],
r2s * static_pointing_error[0], r2s * static_pointing_error[1]))
static_pe = numpy.zeros(pt.data['pointing'].shape[1:])
static_pe[...,0] = numpy.random.normal(0.0, static_pointing_error[0],
static_pe[...,0].shape)[numpy.newaxis, ...]
static_pe[...,1] = numpy.random.normal(0.0, static_pointing_error[1],
static_pe[...,1].shape)[numpy.newaxis, ...]
pt.data['pointing'] += static_pe
if global_pointing_error is not None:
if seed is not None:
numpy.random.seed(seed)
log.debug("simulate_pointingtable: Simulating global pointing error = [%g, %g] (rad) [%g,s %g] (arcsec)"
% (global_pointing_error[0], global_pointing_error[1],
r2s * global_pointing_error[0], r2s * global_pointing_error[1]))
pt.data['pointing'][..., :] += global_pointing_error
return pt
def simulate_pointingtable_from_timeseries(pt, type='wind', time_series_type='precision',
pointing_directory=None, reference_pointing=False,
seed=None):
"""Create a pointing table with time series created from PSD.
:param pt: Pointing table to be filled
:param type: Type of pointing: 'tracking' or 'wind'
:param pointing_file: Name of pointing file
:param reference_pointing: Use reference pointing?
:return:
"""
if seed is not None:
numpy.random.seed(seed)
if pointing_directory is None:
pointing_directory = arl_path("data/models/%s" % time_series_type)
pt.data['pointing'] = numpy.zeros(pt.data['pointing'].shape)
ntimes, nant, nchan, nrec, _ = pt.data['pointing'].shape
# Use az and el at the beginning of this pointingtable
axis_values = pt.nominal[0,0,0,0,0]
el = pt.nominal[0,0,0,0,1]
el_deg = el * 180.0 / numpy.pi
az_deg = axis_values * 180.0 / numpy.pi
if el_deg < 30.0:
el_deg = 15.0
elif el_deg < (90.0+45.0)/2.0:
el_deg = 45.0
else:
el_deg = 90.0
if abs(az_deg) < 45.0 / 2.0:
az_deg = 0.0
elif abs(az_deg) < (45.0 + 90.0)/2.0:
az_deg = 45.0
elif abs(az_deg) < (90.0 + 135.0)/2.0:
az_deg = 90.0
elif abs(az_deg) < (135.0 + 180.0)/2.0:
az_deg = 135.0
else:
az_deg = 180.0
pointing_file = '%s/El%dAz%d.dat' % (pointing_directory, int(el_deg), int(az_deg))
log.debug("simulate_pointingtable_from_timeseries: Reading wind PSD from %s" % pointing_file)
psd = numpy.loadtxt(pointing_file)
# define some arrays
freq = psd[:, 0]
axesdict = {
"az": psd[:, 1],
"el": psd[:, 2],
"pxel": psd[:, 3],
"pel": psd[:, 4]
}
if type == 'tracking':
axes = ["az", "el"]
elif type == 'wind':
axes = ["pxel", "pel"]
else:
raise ValueError("Pointing type %s not known" % type)
<|fim▁hole|>
for axis in axes:
axis_values = axesdict[axis]
if (axis == "az") or (axis == "el"):
# determine index of maximum PSD value; add 50 for better fit
axis_values_max_index = numpy.argwhere(axis_values == numpy.max(axis_values))[0][0] + 50
axis_values_max_index = min(axis_values_max_index, len(axis_values))
# max_freq = 2.0 / pt.interval[0]
max_freq = 0.4
freq_max_index = numpy.argwhere(freq > max_freq)[0][0]
else:
break_freq = 0.01 # not max; just a break
axis_values_max_index = numpy.argwhere(freq>break_freq)[0][0]
# max_freq = 2.0 / pt.interval[0]
max_freq = 0.1
freq_max_index = numpy.argwhere(freq > max_freq)[0][0]
# construct regularly-spaced frequencies
regular_freq = numpy.arange(freq[0], freq[freq_max_index], freq_interval)
regular_axis_values_max_index = numpy.argwhere(numpy.abs(regular_freq-freq[axis_values_max_index])==numpy.min(numpy.abs(regular_freq-freq[axis_values_max_index])))[0][0]
# print ('Frequency break: ', freq[az_max_index])
# print ('Max frequency: ', max_freq)
#
# print ('New frequency break: ', regular_freq[regular_az_max_index])
# print ('New max frequency: ', regular_freq[-1])
if axis_values_max_index>=freq_max_index:
raise ValueError('Frequency break is higher than highest frequency; select a lower break')
# use original frequency break and max frequency to fit function
# fit polynomial to psd up to max value
import warnings
from numpy import RankWarning
warnings.simplefilter('ignore', RankWarning)
p_axis_values1 = numpy.polyfit(freq[:axis_values_max_index],
numpy.log(axis_values[:axis_values_max_index]), 5)
f_axis_values1 = numpy.poly1d(p_axis_values1)
# fit polynomial to psd beyond max value
p_axis_values2 = numpy.polyfit(freq[axis_values_max_index:freq_max_index],
numpy.log(axis_values[axis_values_max_index:freq_max_index]), 5)
f_axis_values2 = numpy.poly1d(p_axis_values2)
# use new frequency break and max frequency to apply function (ensures equal spacing of frequency intervals)
# resampled to construct regularly-spaced frequencies
regular_axis_values1 = numpy.exp(f_axis_values1(regular_freq[:regular_axis_values_max_index]))
regular_axis_values2 = numpy.exp(f_axis_values2(regular_freq[regular_axis_values_max_index:]))
# join
regular_axis_values = numpy.append(regular_axis_values1, regular_axis_values2)
M0 = len(regular_axis_values)
# check rms of resampled PSD
# df = regular_freq[1:]-regular_freq[:-1]
# psd2rms_pxel = numpy.sqrt(numpy.sum(regular_az[:-1]*df))
# print ('Calculate rms of resampled PSD: ', psd2rms_pxel)
original_regular_freq = regular_freq
original_regular_axis_values = regular_axis_values
# get amplitudes from psd values
if (regular_axis_values<0).any():
raise ValueError('Resampling returns negative power values; change fit range')
amp_axis_values = numpy.sqrt(regular_axis_values*2*freq_interval)
# need to scale PSD by 2* frequency interval before square rooting, then by number of modes in resampled PSD
# Now we generate some random phases
for ant in range(nant):
regular_freq = original_regular_freq
regular_axis_values = original_regular_axis_values
phi_axis_values = numpy.random.rand(len(regular_axis_values)) * 2 * numpy.pi
# create complex array
z_axis_values = amp_axis_values * numpy.exp(1j * phi_axis_values) # polar
# make symmetrical frequencies
mirror_z_axis_values = numpy.copy(z_axis_values)
# make complex conjugates
mirror_z_axis_values.imag -= 2 * z_axis_values.imag
# make negative frequencies
mirror_regular_freq = -regular_freq
# join
z_axis_values = numpy.append(z_axis_values, mirror_z_axis_values[::-1])
regular_freq = numpy.append(regular_freq, mirror_regular_freq[::-1])
# add a 0 Fourier term
z_axis_values = numpy.append(0 + 0 * 1j, z_axis_values)
regular_freq = numpy.append(0, regular_freq)
# perform inverse fft
ts = numpy.fft.ifft(z_axis_values)
# set up and check scalings
N = len(ts)
Dt = pt.interval[0]
ts = numpy.real(ts)
ts *= M0 # the result is scaled by number of points in the signal, so multiply - real part - by this
# The output of the iFFT will be a random time series on the finite
# (bounded, limited) time interval t = 0 to tmax = (N-1) X Dt, #
# where Dt = 1 / (2 X Fmax)
# scale to time interval
times = numpy.arange(ntimes) * Dt
# Convert from arcsec to radians
ts *= numpy.pi / (180.0 * 3600.0)
# We take reference pointing to mean that the pointing errors are zero at the beginning
# of the set of integrations
if reference_pointing:
ts[:] -= ts[0]
# pt.data['time'] = times[:ntimes]
if axis == 'az':
pt.data['pointing'][:, ant, :, :, 0] = ts[:ntimes, numpy.newaxis, numpy.newaxis, ...]
elif axis == 'el':
pt.data['pointing'][:, ant, :, :, 1] = ts[:ntimes, numpy.newaxis, numpy.newaxis, ...]
elif axis == 'pxel':
pt.data['pointing'][:, ant, :, :, 0] = ts[:ntimes, numpy.newaxis, numpy.newaxis, ...]
elif axis == 'pel':
pt.data['pointing'][:, ant, :, :, 1] = ts[:ntimes, numpy.newaxis, numpy.newaxis, ...]
else:
raise ValueError("Unknown axis %s" % axis)
return pt
def ingest_unittest_visibility(config, frequency, channel_bandwidth, times, vis_pol, phasecentre, block=False,
zerow=False):
if block:
vt = create_blockvisibility(config, times, frequency, channel_bandwidth=channel_bandwidth,
phasecentre=phasecentre, weight=1.0, polarisation_frame=vis_pol, zerow=zerow)
else:
vt = create_visibility(config, times, frequency, channel_bandwidth=channel_bandwidth,
phasecentre=phasecentre, weight=1.0, polarisation_frame=vis_pol, zerow=zerow)
vt.data['vis'][...] = 0.0
return vt
def create_unittest_components(model, flux, applypb=False, telescope='LOW', npixel=None,
scale=1.0, single=False, symmetric=False, angular_scale=1.0):
# Fill the visibility with exactly computed point sources.
if npixel == None:
_, _, _, npixel = model.data.shape
spacing_pixels = int(scale * npixel) // 4
log.info('Spacing in pixels = %s' % spacing_pixels)
if not symmetric:
centers = [(0.2*angular_scale, 1.1*angular_scale)]
else:
centers = list()
if not single:
centers.append([0.0, 0.0])
for x in numpy.linspace(-1.2*angular_scale, 1.2*angular_scale, 7):
if abs(x) > 1e-15:
centers.append([x, x])
centers.append([x, -x])
model_pol = model.polarisation_frame
# Make the list of components
rpix = model.wcs.wcs.crpix
components = []
for center in centers:
ix, iy = center
# The phase center in 0-relative coordinates is n // 2 so we centre the grid of
# components on ny // 2, nx // 2. The wcs must be defined consistently.
p = int(round(rpix[0] + ix * spacing_pixels * numpy.sign(model.wcs.wcs.cdelt[0]))), \
int(round(rpix[1] + iy * spacing_pixels * numpy.sign(model.wcs.wcs.cdelt[1])))
sc = pixel_to_skycoord(p[0], p[1], model.wcs, origin=1)
log.info("Component at (%f, %f) [0-rel] %s" % (p[0], p[1], str(sc)))
# Channel images
comp = create_skycomponent(direction=sc, flux=flux, frequency=model.frequency, polarisation_frame=model_pol)
components.append(comp)
if applypb:
beam = create_pb(model, telescope=telescope, use_local=False)
components = apply_beam_to_skycomponent(components, beam)
return components
def create_unittest_model(vis, model_pol, npixel=None, cellsize=None, nchan=1):
advice = advise_wide_field(vis, guard_band_image=2.0, delA=0.02, facets=1,
wprojection_planes=1, oversampling_synthesised_beam=4.0)
if cellsize is None:
cellsize = advice['cellsize']
if npixel is None:
npixel = advice['npixels2']
model = create_image_from_visibility(vis, npixel=npixel, cellsize=cellsize, nchan=nchan,
polarisation_frame=model_pol)
return model
def insert_unittest_errors(vt, seed=180555, calibration_context="TG", amp_errors=None, phase_errors=None):
"""Simulate gain errors and apply
:param vt:
:param seed: Random number seed, set to big integer repeat values from run to run
:param phase_errors: e.g. {'T': 1.0, 'G': 0.1, 'B': 0.01}
:param amp_errors: e.g. {'T': 0.0, 'G': 0.01, 'B': 0.01}
:return:
"""
controls = create_calibration_controls()
if amp_errors is None:
amp_errors = {'T': 0.0, 'G': 0.01, 'B': 0.01}
if phase_errors is None:
phase_errors = {'T': 1.0, 'G': 0.1, 'B': 0.01}
for c in calibration_context:
gaintable = create_gaintable_from_blockvisibility(vt, timeslice=controls[c]['timeslice'])
gaintable = simulate_gaintable(gaintable, phase_error=phase_errors[c], amplitude_error=amp_errors[c],
timeslice=controls[c]['timeslice'], phase_only=controls[c]['phase_only'],
crosspol=controls[c]['shape'] == 'matrix')
vt = apply_gaintable(vt, gaintable, timeslice=controls[c]['timeslice'], inverse=True)
return vt<|fim▁end|>
|
freq_interval = 0.0001
|
<|file_name|>Auth.js<|end_file_name|><|fim▁begin|>class Auth {
isAuthenticate () {
return this.getToken() !== null
}<|fim▁hole|>
setToken (token) {
window.localStorage.setItem('access_token', token)
}
getToken () {
return window.localStorage.getItem('access_token')
}
removeToken () {
window.localStorage.removeItem('access_token')
}
}
export default new Auth()<|fim▁end|>
| |
<|file_name|>save.py<|end_file_name|><|fim▁begin|>"""
Simple utils to save and load from disk.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
# TODO(rbharath): Use standard joblib once old-data has been regenerated.
import joblib
from sklearn.externals import joblib as old_joblib
import gzip
import pickle
import pandas as pd
import numpy as np
import os
from rdkit import Chem
def log(string, verbose=True):
"""Print string if verbose."""
if verbose:
print(string)
def save_to_disk(dataset, filename, compress=3):
"""Save a dataset to file."""
joblib.dump(dataset, filename, compress=compress)
def get_input_type(input_file):
"""Get type of input file. Must be csv/pkl.gz/sdf file."""
filename, file_extension = os.path.splitext(input_file)
# If gzipped, need to compute extension again
if file_extension == ".gz":
filename, file_extension = os.path.splitext(filename)
if file_extension == ".csv":
return "csv"
elif file_extension == ".pkl":
return "pandas-pickle"
elif file_extension == ".joblib":
return "pandas-joblib"
elif file_extension == ".sdf":
return "sdf"
else:
raise ValueError("Unrecognized extension %s" % file_extension)
def load_data(input_files, shard_size=None, verbose=True):
"""Loads data from disk.
For CSV files, supports sharded loading for large files.
"""
if not len(input_files):
return
input_type = get_input_type(input_files[0])
if input_type == "sdf":
if shard_size is not None:
log("Ignoring shard_size for sdf input.", verbose)
for value in load_sdf_files(input_files):
yield value
elif input_type == "csv":
for value in load_csv_files(input_files, shard_size, verbose=verbose):
yield value
elif input_type == "pandas-pickle":
for input_file in input_files:
yield load_pickle_from_disk(input_file)
def load_sdf_files(input_files):
"""Load SDF file into dataframe."""
dataframes = []
for input_file in input_files:
# Tasks are stored in .sdf.csv file
raw_df = next(load_csv_files([input_file+".csv"], shard_size=None))
# Structures are stored in .sdf file
print("Reading structures from %s." % input_file)
suppl = Chem.SDMolSupplier(str(input_file), False, False, False)
df_rows = []
for ind, mol in enumerate(suppl):
if mol is not None:
smiles = Chem.MolToSmiles(mol)
df_rows.append([ind,smiles,mol])
mol_df = pd.DataFrame(df_rows, columns=('mol_id', 'smiles', 'mol'))
dataframes.append(pd.concat([mol_df, raw_df], axis=1, join='inner'))
return dataframes
def load_csv_files(filenames, shard_size=None, verbose=True):
"""Load data as pandas dataframe."""
# First line of user-specified CSV *must* be header.
shard_num = 1
for filename in filenames:
if shard_size is None:
yield pd.read_csv(filename)
else:
log("About to start loading CSV from %s" % filename, verbose)
for df in pd.read_csv(filename, chunksize=shard_size):
log("Loading shard %d of size %s." % (shard_num, str(shard_size)),
verbose)
df = df.replace(np.nan, str(""), regex=True)
shard_num += 1
yield df
def load_from_disk(filename):
"""Load a dataset from file."""
name = filename
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".pkl":
return load_pickle_from_disk(filename)
elif os.path.splitext(name)[1] == ".joblib":
try:<|fim▁hole|> except KeyError:
# Try older joblib version for legacy files.
return old_joblib.load(filename)
except ValueError:
return old_joblib.load(filename)
elif os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(filename, header=0)
df = df.replace(np.nan, str(""), regex=True)
return df
else:
raise ValueError("Unrecognized filetype for %s" % filename)
def load_sharded_csv(filenames):
"""Load a dataset from multiple files. Each file MUST have same column headers"""
dataframes = []
for name in filenames:
placeholder_name = name
if os.path.splitext(name)[1] == ".gz":
name = os.path.splitext(name)[0]
if os.path.splitext(name)[1] == ".csv":
# First line of user-specified CSV *must* be header.
df = pd.read_csv(placeholder_name, header=0)
df = df.replace(np.nan, str(""), regex=True)
dataframes.append(df)
else:
raise ValueError("Unrecognized filetype for %s" % filename)
#combine dataframes
combined_df = dataframes[0]
for i in range(0, len(dataframes) - 1):
combined_df = combined_df.append(dataframes[i+1])
combined_df = combined_df.reset_index(drop=True)
return combined_df
def load_pickle_from_disk(filename):
"""Load dataset from pickle file."""
if ".gz" in filename:
with gzip.open(filename, "rb") as f:
df = pickle.load(f)
else:
with open(filename, "rb") as f:
df = pickle.load(f)
return df<|fim▁end|>
|
return joblib.load(filename)
|
<|file_name|>rand_reader.go<|end_file_name|><|fim▁begin|>package restic
import (
"io"
"math/rand"
"github.com/restic/restic/internal/errors"
)
// RandReader allows reading from a rand.Rand.
type RandReader struct {
rnd *rand.Rand
buf []byte
}
// NewRandReader creates a new Reader from a random source.
func NewRandReader(rnd *rand.Rand) io.Reader {
return &RandReader{rnd: rnd, buf: make([]byte, 0, 7)}
}<|fim▁hole|> }
rnd := rd.rnd
for i := 0; i < len(p); i += 7 {
val := rnd.Int63()
p[i+0] = byte(val >> 0)
p[i+1] = byte(val >> 8)
p[i+2] = byte(val >> 16)
p[i+3] = byte(val >> 24)
p[i+4] = byte(val >> 32)
p[i+5] = byte(val >> 40)
p[i+6] = byte(val >> 48)
}
return len(p), nil
}
func (rd *RandReader) Read(p []byte) (int, error) {
// first, copy buffer to p
pos := copy(p, rd.buf)
copy(rd.buf, rd.buf[pos:])
// shorten buf and p accordingly
rd.buf = rd.buf[:len(rd.buf)-pos]
p = p[pos:]
// if this is enough to fill p, return
if len(p) == 0 {
return pos, nil
}
// load multiple of 7 byte
l := (len(p) / 7) * 7
n, err := rd.read(p[:l])
pos += n
if err != nil {
return pos, errors.Wrap(err, "Read")
}
p = p[n:]
// load 7 byte to temp buffer
rd.buf = rd.buf[:7]
n, err = rd.read(rd.buf)
if err != nil {
return pos, errors.Wrap(err, "Read")
}
// copy the remaining bytes from the buffer to p
n = copy(p, rd.buf)
pos += n
// save the remaining bytes in rd.buf
n = copy(rd.buf, rd.buf[n:])
rd.buf = rd.buf[:n]
return pos, nil
}<|fim▁end|>
|
func (rd *RandReader) read(p []byte) (n int, err error) {
if len(p)%7 != 0 {
panic("invalid buffer length, not multiple of 7")
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Rsure is a set of utilities for capturing information about files, and later verifying it is<|fim▁hole|>//! The easiest way to use Rsure is to build the `rsure` executable contained in this crate. This
//! program allows you to use most of the functionality of the crate.
//!
//! However, it is also possible to use the crate programmatically. At the top level of the crate
//! as some utility functions for the most common operations.
//!
//! For example, to scan a directory or do an update use `update`.
//!
//! This example makes use of several of the building blocks necessary to use the store. First is
//! the store itself. `parse_store` is able to decode options that are passed to the command line.
//! it is also possible to build a `store::Plain` store directly.
//!
//! Next are the tags for the snapshot. Generally, this should hold some kind of information about
//! the snapshot itself. For the `Plain` store, it can be just an empty map. Other store types
//! may require certain tags to be present.
#![warn(bare_trait_objects)]
use std::{fs::File, path::Path};
pub use crate::{
errors::{Error, Result},
hashes::Estimate,
node::{
compare_trees, fs, load_from, HashCombiner, HashUpdater, NodeWriter, ReadIterator, Source,
SureNode,
},
progress::{log_init, Progress},
show::show_tree,
store::{parse_store, Store, StoreTags, StoreVersion, TempLoader, Version},
suretree::AttMap,
};
mod errors;
mod escape;
mod hashes;
pub mod node;
mod progress;
mod show;
mod store;
mod surefs;
mod suretree;
// Some common operations, abstracted here.
/// Perform an update scan, using the given store.
///
/// If 'update' is true, use the hashes from a previous run, otherwise perform a fresh scan.
/// Depending on the [`Store`] type, the tags may be kept, or ignored.
///
/// [`Store`]: trait.Store.html
///
/// A simple example:
///
/// ```rust
/// # use std::error::Error;
/// #
/// # fn try_main() -> Result<(), Box<Error>> {
/// let mut tags = rsure::StoreTags::new();
/// tags.insert("name".into(), "sample".into());
/// let store = rsure::parse_store("2sure.dat.gz")?;
/// rsure::update(".", &*store, false, &tags)?;
/// # Ok(())
/// # }
/// #
/// # fn main() {
/// # try_main().unwrap();
/// # }
/// ```
pub fn update<P: AsRef<Path>>(
dir: P,
store: &dyn Store,
is_update: bool,
tags: &StoreTags,
) -> Result<()> {
let dir = dir.as_ref();
let mut estimate = Estimate { files: 0, bytes: 0 };
let tmp = if is_update {
// In update mode, first tmp file is just the scan.
let scan_temp = {
let mut tmp = store.make_temp()?;
let src = fs::scan_fs(dir)?;
node::save_to(&mut tmp, src)?;
tmp
}
.into_loader()?;
let latest = store.load_iter(Version::Latest)?;
let tmp = {
let mut tmp = store.make_temp()?;
let loader = Loader(&*scan_temp);
let combiner = HashCombiner::new(latest, loader.iter()?)?.inspect(|node| {
if let Ok(n @ SureNode::File { .. }) = node {
if n.needs_hash() {
estimate.files += 1;
estimate.bytes += n.size();
}
}
});
node::save_to(&mut tmp, combiner)?;
tmp
};
tmp
} else {
let mut tmp = store.make_temp()?;
let src = fs::scan_fs(dir)?.inspect(|node| {
if let Ok(n @ SureNode::File { .. }) = node {
if n.needs_hash() {
estimate.files += 1;
estimate.bytes += n.size();
}
}
});
node::save_to(&mut tmp, src)?;
tmp
}
.into_loader()?;
// TODO: If this is an update, pull in hashes from the old version.
// Update any missing hashes.
let loader = Loader(&*tmp);
let hu = HashUpdater::new(loader, store);
// TODO: This will panic on non-unicode directories.
let hm = hu.compute_parallel(dir.to_str().unwrap(), &estimate)?;
let mut tmp2 = store.make_new(tags)?;
hm.merge(&mut NodeWriter::new(&mut tmp2)?)?;
tmp2.commit()?;
/*
let dir = dir.as_ref();
let mut new_tree = scan_fs(dir)?;
if is_update {
let old_tree = store.load(Version::Latest)?;
new_tree.update_from(&old_tree);
}
let estimate = new_tree.hash_estimate();
let mut progress = Progress::new(estimate.files, estimate.bytes);
new_tree.hash_update(dir, &mut progress);
progress.flush();
store.write_new(&new_tree, tags)?;
*/
Ok(())
}
struct Loader<'a>(&'a dyn TempLoader);
impl<'a> Source for Loader<'a> {
fn iter(&self) -> Result<Box<dyn Iterator<Item = Result<SureNode>> + Send>> {
let rd = File::open(self.0.path_ref())?;
Ok(Box::new(load_from(rd)?))
}
}<|fim▁end|>
|
//! still true.
//!
|
<|file_name|>sdi_rc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Resource object code
#
# Created: Sun May 12 18:04:51 2013
# by: The Resource Compiler for PyQt (Qt v5.0.2)
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore
qt_resource_data = b"\
\x00\x00\x03\x54\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x02\xe6\x49\x44\x41\x54\x58\xc3\xd5\
\x97\xcd\x4e\x13\x61\x14\x86\xeb\x35\x94\x95\x7b\x71\xe1\xd2\xc4\
\xe0\x05\xb8\xe2\x0e\x5c\xb8\xf4\x02\x5c\xb1\x30\xea\x05\x18\x96\
\x26\x62\x58\xb8\xb0\x91\x58\x20\xd1\x9d\xbf\x89\xa4\x14\xb1\x52\
\xa4\x48\x45\x94\xfe\xd0\x02\x43\xff\xa6\x9d\x19\xa6\x65\x80\xe3\
\x79\x7b\xfa\x85\x51\x4a\x82\xc9\x21\x86\x49\xde\x9c\x33\xa7\xf3\
\xcd\xfb\x9c\xf3\x4d\x9b\x4e\x84\x88\x22\xff\x53\x91\x73\x01\xc0\
\xc7\xd5\x90\x6e\xff\xa5\xfb\xac\xc7\x3d\x3d\x64\x0d\xa9\x02\xf0\
\x31\x32\x3c\x3c\xbc\x6a\x34\x3a\x3a\xba\x19\x56\x3c\x1e\xaf\x26\
\x93\xc9\x56\x3a\x9d\x76\x13\x89\x44\x6b\x60\x60\x20\xcd\x6b\x6e\
\x68\x02\xa4\x38\xd2\xe1\xe1\x71\x99\xba\xef\xb7\xc9\xb2\x2c\xda\
\xdf\xdf\x27\x86\xf1\x78\xcd\x18\xeb\x8a\x1a\x40\x3f\xf3\xb0\x1c\
\xc7\xa5\x4c\x66\xb9\x0b\x14\x04\x01\xc5\x62\xb1\x3a\xaf\x7b\x70\
\x1a\x88\x53\x01\x1c\x1c\x10\x77\x77\xb2\x6c\xdb\xa1\xf9\xf9\xcf\
\x64\x0e\xd7\x75\xe9\xf9\xc4\x44\x17\x42\x05\x00\x26\x7b\xc1\xc9\
\xaa\x37\x1c\x4a\xce\xcd\x53\xf8\x70\x5d\x0f\x8b\x17\x54\x00\x82\
\x10\x40\x67\x4f\x14\xce\xed\xa6\x47\x1f\x67\x66\xe9\xf5\x9b\xb7\
\x14\x9f\x9c\xa4\xa9\xa9\x69\x7a\xf7\xfe\x03\x45\xa3\xd1\x65\x5e\
\x7f\x41\x05\xc0\xef\x10\xed\xb6\x25\x86\x85\x9a\xe3\x05\x94\x5d\
\xcd\xd1\xe4\xf4\x2b\x7a\x32\xfe\x94\x9e\xc5\x5e\xd0\x4c\x62\x0e\
\x8b\x17\x55\x00\xda\x81\x18\xf5\x13\x20\x3c\xff\x90\x6a\xcd\x36\
\x15\x37\xab\x94\x2f\x6e\x53\x89\x63\x8d\xb7\x85\xd7\x7e\x51\x01\
\xf0\x79\xcc\xcd\x5d\x1e\xb5\xc7\x7b\xdb\xee\x9f\x3b\xbe\xe4\x88\
\x5d\xb8\xbd\xee\xe2\x94\xca\x33\xe0\x75\xe4\xc6\x75\x57\x62\xd8\
\x10\x39\xea\xe6\x33\x44\xd4\x01\xa7\x06\xe0\xf4\x3a\xad\x39\x22\
\x98\x98\x68\x72\x80\x98\x6b\x50\x53\x9d\x00\x00\x2a\x2d\xb9\x31\
\xe2\x4e\x53\x8c\x10\x0d\x04\xf2\x6d\xfb\x28\xb6\x7c\x45\x00\x9b\
\x3b\xdb\x6a\xfc\x69\x8e\x3c\x6c\x88\x1a\xae\x39\x13\x80\x3a\x8f\
\xb7\x54\x23\x2a\xd7\xc5\x04\x06\x06\x00\x35\x28\x9c\x17\xab\xbc\
\x25\xbb\xca\x13\xc0\x4d\x61\x0e\x15\x2a\x72\x6e\xcc\x7e\x5a\x02\
\x68\x6a\xdd\xad\xf1\x94\x27\x00\x53\xdc\x1c\x71\x6d\x5b\x40\x60\
\x9a\xab\x1c\x75\x9e\xeb\x81\x41\x15\x47\x11\xc0\x6a\x89\x31\x0c\
\xd6\x77\x04\x20\x0c\x64\x26\x62\xb6\x69\x75\x8b\xa8\xaa\x09\x50\
\xb6\xc5\xbc\xd0\x03\xf8\xbe\x29\x63\x87\x29\x60\x0c\x18\x84\x1c\
\x00\x5b\x4d\x45\x00\x74\x03\x53\x98\xad\x94\xc5\x1c\xe7\x46\xe6\
\x1c\x00\xc8\x71\x5d\xa9\xa1\x08\x80\xfd\xfc\x56\x12\x73\x33\x01\
\x08\x35\x18\x42\xe8\xda\x7c\x8e\x29\xa8\x4e\x00\x5b\x00\x03\xc8\
\x98\x67\x36\x04\x00\x32\xe6\x85\xde\xf8\x17\x0b\xfc\x2c\xd8\x8a\
\x00\x18\x67\x3a\x4f\xb4\x54\x14\x23\x98\x02\x00\x02\x0c\x3e\xfb\
\xc5\x53\x28\xf0\x43\xb8\x66\x49\xf7\x6b\xf9\x52\x87\xd7\xbe\x54\
\x01\xc8\x55\x8f\xba\x4e\xad\x4b\x0e\x90\xaf\x85\xde\xb7\xc2\x92\
\x3d\x4f\xa6\xb3\xde\xa3\xb1\x71\xeb\xda\xd0\xf5\x15\x98\xb3\x6e\
\xa9\x00\x6c\x34\xa4\x6b\x18\xff\xe0\x11\x7f\x5a\x17\x53\xd4\x13\
\x0b\x59\x6f\xe4\xee\xbd\xe2\xa5\xc1\xcb\x4b\x7c\x6d\x8c\x75\x87\
\x35\xa8\xfa\xb7\x1c\xdd\x65\xd9\x3c\x8f\x1f\x19\xfe\x9e\xcf\x1e\
\x37\xbd\xc9\xba\x78\x26\x6f\x46\x00\x68\xf2\xff\x81\x99\x94\x9e\
\xe9\x3f\xbf\x19\x01\x42\xd3\xf4\xfc\xbd\x9c\x9e\xa5\x7e\x03\x51\
\x6c\x25\xa1\x92\x95\x0a\x77\x00\x00\x00\x00\x49\x45\x4e\x44\xae\
\x42\x60\x82\
\x00\x00\x06\x6d\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x06\x34\x49\x44\x41\x54\x78\x5e\xad\x97\x5b\x6c\x54\xc7\
\x1d\xc6\x7f\x73\xce\xd9\x8b\xbd\xf6\xfa\x16\xa0\xbe\x00\x0e\xb2\
\x69\x63\x24\x42\x4a\x21\x22\xa1\x2d\x95\x62\xa5\x2f\xee\x4b\x68\
\x2b\x95\xa6\x55\xa5\xc6\x60\x55\xaa\xda\xb4\xaa\xfa\x56\x09\x55\
\xca\x03\x94\x27\xda\x07\x84\x14\x29\xad\xc4\x8b\xa5\x52\x83\x79\
\x08\xc5\x18\x39\x0e\x69\xd3\x84\x9a\x9b\x63\x6a\xec\xb2\x04\x1b\
\x3b\xbb\xf6\x7a\x8f\xbd\xbb\xde\xb3\x67\xa6\xc3\x68\x85\xe5\x72\
\x6c\x88\xc9\x27\x7d\xfa\x9f\x9d\x87\xfd\x7e\xf3\x9f\x99\x73\x11\
\x4a\x29\x82\x24\x84\x78\x05\x78\x9e\xc7\x6b\x48\x29\xf5\x77\xd6\
\x28\x27\x20\xb8\x43\xbb\x01\x68\x97\x52\xbe\xc6\x63\x64\x59\xd6\
\x07\x1a\xf6\xbb\x40\xb7\x06\x39\xff\x14\x00\x26\xfc\xb7\xed\xf5\
\xe2\x60\x5d\x44\x44\x6e\xce\x89\x8a\x2b\x57\xae\x50\x5d\x53\x8d\
\x40\x00\xa0\x50\x08\x65\x28\x41\x29\x66\xd3\x69\x5e\xa9\x17\x2f\
\xbc\xb4\x4e\x6c\x3b\xf1\x1f\xb9\x47\x83\x7c\x5b\x43\x4c\x3c\x4d\
\x07\xf6\xff\x60\x8b\xdd\x2c\x25\xf8\x4a\x32\x3c\x3c\x4c\x65\x65\
\x25\x2b\xc9\x75\x5d\x1e\xc0\x6e\xa9\xb0\x22\x1b\xa2\x2a\x72\x3f\
\xa7\xea\x81\xb5\x03\x08\x2d\x05\x48\xa1\x0d\xf4\x5d\xbc\x48\x2e\
\x97\xc3\x2f\x16\x51\x4a\x91\xcf\xe7\x59\x5c\x5c\xa4\x50\x28\x50\
\xd4\x63\xb5\xb5\xb5\x94\x01\x58\x80\xf8\x82\xf6\x80\x01\x00\x36\
\x44\x05\x1f\x0f\xbc\x4b\x3e\x3b\x8f\x85\x44\x95\x32\xe2\xb6\xc4\
\xb6\x04\x21\x21\x70\x3e\x53\x6c\x8c\x3b\x80\x44\x2a\x04\xf0\x9c\
\x10\x02\xe0\xcb\x40\x05\x50\x0f\x34\x60\xc4\x48\x69\x9f\x24\x02\
\x01\x4e\x9c\x38\x21\x00\x81\x05\xd2\x87\x96\x96\x67\x09\x65\x6d\
\x14\xe5\x28\xa5\xb4\x41\x08\x58\x57\x19\x25\xe2\xd8\x44\x42\x16\
\xc3\x13\x73\x5c\xbc\x3d\x41\xf7\x58\x8e\x5c\x24\xbe\xa9\xbd\x7d\
\xf7\xef\x2d\xcb\x5a\xdc\xb1\x63\x47\x59\x55\x55\x95\xd3\xd8\xd8\
\x18\x7e\xe0\x86\x86\x86\xd0\xa5\x4b\x97\xdc\xae\xae\xae\x08\xf0\
\xd6\xaa\x1d\x00\x13\x44\x55\x2c\xc2\x73\xd5\x31\xf2\x9e\x4f\xa1\
\x28\x91\x4a\x61\x09\x41\xd8\xb1\x88\x86\x6c\xe6\x72\x05\x12\xa2\
\x8e\x3f\x9f\xff\x2b\x0d\x4d\x1b\x01\x22\xc0\x66\x96\x84\xef\xfb\
\x78\x9e\x47\x75\x75\xb5\x9e\x50\x4b\xf4\xea\xd5\xab\x87\x84\x10\
\x28\xa5\xde\x5a\x11\xc0\xb2\x41\x00\xb6\x2d\x90\xda\xb6\x14\x38\
\x08\xa4\x12\x58\xc2\x8c\x1b\x8f\x4c\xb9\xec\x7b\xf5\x3b\xd4\x37\
\x36\x11\x7c\x2f\xc1\x84\x67\x32\x19\xca\xcb\xcb\xcd\x66\x3e\x76\
\xec\xd8\x26\xbd\x7f\x0e\x2e\x41\x2c\x01\xd0\xd9\xd9\xa9\x0e\x1d\
\x3a\xa4\x6c\x21\x08\x59\x10\xb6\x2d\x1c\xc7\xc6\x42\x50\xb4\xcd\
\x1a\x1b\x00\xc7\xb2\x88\x38\x96\xae\x02\x60\x59\x78\x10\xc0\xdc\
\xdc\x1c\x35\x35\x35\x06\x20\x1a\x8d\x72\xe4\xc8\x91\xcd\xc0\x03\
\x88\x1b\x1a\xa2\xc7\x62\xb9\xb0\x6d\x74\x30\x66\x8d\xcb\x23\x36\
\xb1\xa8\xa3\xc7\x2c\x32\x8b\x1e\x93\x99\x1c\x63\xa9\x79\xee\xcc\
\x2e\xe8\xdf\x45\x72\xf9\x3c\xab\xc8\x2c\x41\x36\x9b\x35\xa7\x66\
\xe9\xff\x6d\x0e\x1c\x38\xb0\x1e\xe8\x00\x58\x06\xa0\xb4\x74\x16\
\x8e\x0d\xe1\x90\xc0\x53\x8a\xb1\xa4\xcb\x8d\x8c\x83\xd3\xb2\x97\
\xa6\x7d\xaf\xb3\xb5\xe3\x17\xac\xdb\xfb\x3a\x0d\x2f\xb4\x73\xfb\
\xce\x24\xfd\xfd\xfd\x24\x93\x49\x94\x52\xe6\xfa\xf8\xf1\xe3\xe8\
\xba\xac\x33\xe7\xce\x9d\xe3\xe8\xd1\xa3\x1c\x3e\x7c\x98\xde\xde\
\x5e\x12\x89\x84\x04\x2c\xa1\x15\xdc\x01\xed\xff\xce\xe6\xf8\xe7\
\x94\x4f\x6b\xc7\xcf\xf8\xe6\x2f\xdf\x26\xf6\xf5\x37\x99\x7c\xa6\
\x83\x6b\xfe\x2e\xae\xf1\x2d\x64\x6b\x17\xad\x7b\x7f\x4e\x5e\x56\
\x73\xfa\x6f\x67\xd1\x77\x4d\xee\xdc\x9d\xe2\x1b\xaf\x76\x72\xfd\
\xfa\x75\x03\xa0\x67\x6b\xd6\x3f\x16\x8b\x99\xeb\x78\x3c\x8e\xe3\
\x38\x25\x38\x04\xc0\x23\x00\x96\x25\x98\xca\x41\x3a\xde\xca\xfe\
\xdf\xbd\x4d\xd5\xae\xd7\x28\x84\x62\x08\xdb\x42\x59\x82\x6c\x41\
\x72\x7f\x66\x91\x4f\xee\x66\x18\xb8\xea\x72\xfa\x1f\x61\x64\xd5\
\x5e\xae\x8f\xdc\x67\x32\xd7\xc6\x85\x0f\xee\x9b\x00\xed\x87\xa1\
\xcd\xcd\xcd\xb4\xb5\xb5\x19\x37\x35\x35\xa1\xa1\x14\x20\x83\x1f\
\x46\x16\xdc\x71\x15\xdf\xff\xe9\x6f\xa8\x6c\xd8\x48\xe2\xec\x3b\
\x4c\x8f\x5e\xc3\x89\x94\xb1\xb5\x79\x07\x9b\x5b\xb6\xf3\x49\x79\
\x25\x63\x09\x97\xcf\x66\xf2\xdc\x9d\xce\x32\xa1\xed\x88\x0d\x4c\
\x27\xe7\xd8\xb7\x2b\xca\xfa\x25\x00\x33\x7b\x3d\x6b\xea\xea\xea\
\x00\xcc\x75\x2a\x95\x32\x00\x4a\x2b\x10\xa0\xb9\x5a\x70\xe1\x9d\
\x63\x28\x2c\xca\xe6\xc6\xd9\x10\x8f\x52\x94\x92\x7b\xc3\x7d\x24\
\x65\x05\xdb\xda\x7f\x4c\x4d\xdb\xcb\x7c\x3c\x9c\x66\xd2\x5f\xc0\
\xcd\x78\x2c\xcc\x6b\x2f\x78\x20\x00\xb5\x74\x3a\x42\xa1\x90\x09\
\x2d\xdd\xea\x1f\x8e\x01\x2a\xf8\x3e\x60\xc1\xc6\xb8\xa0\x50\x1c\
\x23\x1c\x8b\x53\xb7\xa5\x96\x92\x78\x76\x7d\x05\xe9\xac\xc7\x68\
\xff\x9f\x98\xae\xbc\x4c\xcb\xf6\x83\xb8\x0b\x61\xbc\x82\xa4\x58\
\x94\x78\xda\x21\xc7\x42\x2d\xaa\x80\xe3\x69\xa0\x96\xd5\x15\x01\
\x00\xd6\xc7\x43\x84\xca\x23\xfc\xbf\x6a\x63\x21\x9e\xa9\x0c\x73\
\xe1\xdf\x83\xec\xd9\xf9\x13\xca\xa3\x0e\xb9\x32\x47\x03\x28\x03\
\x61\x6b\x00\x16\x4b\x21\xa5\x1c\x25\x30\x2a\x15\xa4\x5c\x05\x40\
\x58\xa5\x2a\xcc\xf5\x23\xfa\x70\x6c\x86\xf1\x59\x8f\xef\xfd\xfa\
\x8f\xdc\xca\xd4\xe0\x44\x5c\xa2\x11\x1b\xcf\x93\x14\x3d\x07\xd3\
\x01\xa5\x90\x52\xf2\x50\x6a\x59\x01\x56\x05\x10\x08\x4c\x0d\x04\
\x18\x9d\x76\xf9\xd5\x5f\x86\x18\xbd\xb7\x80\x3d\x93\x67\xd3\xba\
\x32\xf2\x79\x5f\xbb\x68\xea\xce\xaf\xd4\x70\xf9\xdd\xe0\x25\x00\
\x9e\x78\x09\x4c\xb8\x10\x3c\xa2\xd6\x2f\x55\xf2\x87\x1f\x3e\xcf\
\xf5\x4f\x33\x44\x1b\xb7\xb1\xf3\xc5\x97\x59\x12\x5c\x4e\x60\x8e\
\xdb\x53\x01\x28\xc0\x12\x25\x00\x6d\xd4\x52\x7d\xb1\xb5\x96\xdd\
\x5b\xe2\x74\xbf\x97\xa5\x6a\xf7\x57\xf9\xd1\x1b\x6f\x10\xa0\xb5\
\x03\x98\xb5\x37\xd5\xd8\x08\x01\xd2\xcb\x53\x70\x53\x78\xf3\x33\
\x14\xb3\x69\x0a\x19\x1f\x25\xfd\xd5\x82\xd6\x08\xf0\xf0\x29\xe7\
\xe3\xe7\x33\x14\xe6\x75\xa8\x0e\xd6\x00\xcb\xf7\x89\x10\xc1\x33\
\x7d\xfa\xd7\x72\x8c\xb2\x13\x37\x03\xc7\x01\xb2\x1e\xfe\xad\x94\
\xcc\x6f\xf7\x44\x54\x03\xd8\x5f\x70\x07\x08\x92\x09\xfd\xd7\x3d\
\x3f\xfd\x7e\x42\xa6\xcf\xdf\xf6\xef\x02\xee\x76\x3b\xfc\x92\x06\
\xa8\xe3\x73\xca\x75\x5d\x1f\x70\x57\xed\x00\x40\x32\xab\x0a\x1f\
\x7e\x2a\xd3\xbd\xb7\xfc\xd4\xcd\x69\x39\x05\xf4\x03\x97\x74\x68\
\xbf\x10\xa2\xd3\xb6\xed\xaf\x7d\x9e\x25\x58\x58\x58\xf0\x07\x06\
\x06\xd2\x27\x4f\x9e\x9c\x06\xba\x83\x00\x3e\x1a\x49\xca\xad\xe3\
\xb3\x2a\xd7\x3b\xe2\xa7\x6e\x4c\xcb\xd1\x52\xe8\x59\x1d\x74\x8b\
\x00\x3d\x09\xc0\xd0\xd0\x90\xdb\xd3\xd3\x93\xd2\x4e\xcf\xce\xce\
\x9e\x2e\xbd\x1d\xdf\x08\x02\xe8\xee\xea\x29\x00\x8c\x04\x84\x06\
\x85\xaf\x08\x30\x35\x35\x55\xd0\x2f\x22\xa9\x53\xa7\x4e\x25\xc7\
\xc7\xc7\x2f\x03\x67\x81\x7e\x1d\xec\xae\xb8\x09\x4b\xdf\x76\xda\
\x4f\x26\x85\x01\x40\x08\x40\x61\x5a\xfc\xde\xe0\x60\xba\xbb\xbb\
\x3b\xa5\xdf\x8a\xcc\x24\xd0\x5e\xed\x73\xcd\x61\xed\x9a\x77\x33\
\x6e\x11\x60\x70\xf0\xfd\x74\x5f\x5f\x5f\xfa\xcc\x99\x33\xa6\xc5\
\xa5\xd0\x8f\x78\x02\x89\xb5\x9e\x63\x21\x44\x18\x78\x13\xd8\x4f\
\x69\x73\x06\xb4\xf8\xb1\xfa\x1f\xbd\xfa\x2a\x5f\xf2\xd8\x15\x9d\
\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x04\xa3\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x04\x35\x49\x44\x41\x54\x58\xc3\xe5\
\x97\xcd\x8f\x54\x45\x14\xc5\x7f\xb7\xea\xd6\x7b\xaf\xdb\x6e\xc7\
\xf9\x40\x9d\x89\x46\x4d\x34\x99\x44\x8d\x1a\x48\x98\xc4\x8c\x1f\
\x1b\xfe\x02\x4c\x5c\xf1\x07\x18\x16\x2e\x4d\x5c\x6b\x58\xc3\x8e\
\xc4\x8d\x1b\x17\xce\x82\x68\x74\x41\x5c\x18\x0d\xe2\xc4\xc6\x00\
\x3d\x60\x50\x51\x19\x60\x02\xa2\x0e\x0c\x83\xd3\xfd\x5e\xf7\x94\
\x8b\xaa\xee\xf9\x60\xe6\x0d\x84\x51\x16\x56\x52\xa9\xce\x7b\xb7\
\xeb\x9e\x3a\xf7\xd4\xa9\x7a\xea\xbd\xe7\x7e\x36\xe5\x3e\xb7\x3e\
\x80\x5d\xbb\x76\xbd\x03\xec\xfd\x8f\xf2\x4e\x35\x1a\x8d\x03\xeb\
\x19\xd8\xbb\xef\xbd\xa3\x3b\x1f\x1f\x76\x00\x9c\x3c\x3a\xcf\xcc\
\x97\x37\x58\x9c\xef\xdc\x53\xa6\xda\xa0\xf2\xdc\x6b\x03\xbc\xb8\
\x67\x10\x80\x8b\x7f\x16\x7c\xf8\xee\x1e\x80\xdb\x00\x70\xfc\xec\
\x1c\xdf\x3f\x30\x04\x78\x2e\xfd\xb8\xc0\xfe\xb7\xce\x6f\xcb\x72\
\x0f\x1d\x79\x9a\x0b\x23\x96\xd3\x9f\x1f\x64\xfc\xd5\x7d\x9b\x6b\
\x40\x45\xb0\x16\x40\x78\x70\x2c\x23\xcb\xb2\x6d\x01\x30\x30\x96\
\x61\x8d\x50\x1b\x7c\x14\x23\x25\x22\x14\x2b\xd8\x18\x91\xd5\x95\
\x73\xe7\xce\x83\x2a\xb8\x04\xd2\x14\xb2\x0c\xd2\x2c\x8c\x49\x0a\
\x49\x12\xde\x77\x3a\x90\xe7\x90\xb7\xa1\xd5\x82\x76\x2b\x8e\x6d\
\x28\x72\xb2\xfa\x38\xd6\x0a\xe3\xaf\xbc\x49\x6b\xf1\xfa\xe6\x00\
\xac\x15\xac\x15\x04\xb0\x46\xd8\xbd\x7b\xe7\x16\x6b\xeb\x86\xae\
\x80\x5a\xa8\x56\x81\xea\x6d\x51\x8d\xaf\x04\xb5\x82\xf7\xa0\xa6\
\x84\x01\x67\x05\x35\x82\x08\xa8\x0a\x95\x2c\xc3\x23\x20\x1e\x08\
\xc0\xf0\x1e\x2f\x02\xde\x23\x12\x26\x15\x7c\x88\x23\xc4\x21\x1e\
\x3c\x21\x5e\x40\x4d\x58\x18\x40\xd7\x4a\x89\x06\xac\xa0\xda\x63\
\x00\x9a\x33\xbf\x05\x8a\x53\x07\x69\x02\x95\x04\xb2\x34\xf6\x04\
\x12\x07\x4e\xa1\xe8\x40\x5e\x40\x2b\x8f\xbd\x05\x4b\x39\xb4\x73\
\xc8\x0b\x54\x87\x71\x3d\x00\x2a\xe5\x25\x70\x31\x40\xd5\x30\x39\
\xf9\xd2\xd6\x0a\xf3\x3e\xd0\xaf\x16\xaa\x1b\x8b\xf6\xd8\x27\x61\
\x61\xbd\x1c\x25\x25\x20\x00\xf0\x81\x8d\x34\x4d\xa3\x3a\xc3\xb3\
\x98\x11\x89\x6c\x07\xda\x63\x09\x56\x98\x5f\x29\x46\xfc\x61\xcd\
\x72\x7f\x61\x1d\x2d\xd1\x80\x3a\x09\x54\x49\x18\x4f\x34\x2f\xe0\
\x9d\x85\xc4\x21\x89\xc3\x67\x09\x92\x69\xd8\x11\x89\xe2\x13\x87\
\x58\x8b\xef\x76\x91\xbc\x80\xbc\x03\xed\x02\xdf\x6a\x23\xed\x02\
\xf2\x02\x9f\x77\x50\x1d\x45\xd5\x20\x78\x3a\xeb\x54\x78\x9b\x06\
\x9c\x33\x78\x0f\x03\x8f\x24\xbc\xfe\xf2\xf3\x77\x68\xe8\x36\x68\
\xa4\xbe\xf1\xeb\xc6\xfc\xdf\xb1\x04\x52\x5e\x82\x44\x4d\x5f\x84\
\x8f\x0d\xa5\x38\xe7\xb6\xc5\x88\x9e\x18\x4b\xb9\x76\xb3\x03\x08\
\x9d\x52\x11\xaa\x90\xb8\x50\xef\x5a\xc5\x30\x7d\xb1\xcb\x40\xc5\
\xb0\x0e\xf4\x26\xad\x57\xf9\x55\x2e\xe1\xe1\xc6\xd2\x32\xf5\xcc\
\x70\x7d\xc9\x84\x2d\xe9\x4a\x19\x10\x9c\x1a\xc0\x73\xe5\x66\x97\
\x2b\x37\xbb\xac\x51\x57\x3f\xd7\xaa\x64\x7e\xc5\x27\xa2\x29\xac\
\x05\x15\xc3\x9c\x0b\xb5\x77\xa6\x6c\x17\xa8\xc1\xa9\x20\xc8\x1a\
\x35\xaf\x9b\x35\x1a\x8f\x59\x31\x9e\xfe\x7b\xe9\xef\x14\x00\xf1\
\x82\xef\x9b\x58\x30\x2b\x57\x56\x02\x55\x21\xd1\x90\xfc\xe7\x53\
\xdf\xf2\xeb\x99\x13\x2c\x2d\xde\xb8\xa7\xfa\x57\x6a\x03\x3c\xf5\
\xec\x4e\x9e\x79\x61\x02\x0f\xa8\x33\x5b\x31\x10\x03\x7c\x87\xf7\
\xf7\xbf\xc1\xc2\xc2\x02\xb7\x6e\xdd\xa2\x28\x0a\x44\x04\x6b\x2d\
\xd6\x5a\x54\x15\x55\xc5\x39\x87\xaa\x62\xad\xc5\x98\xf0\xdf\xe5\
\xe5\x65\xf2\x3c\xef\xf7\x23\xcd\xf9\xb8\xf2\x2d\x18\x70\x56\x50\
\x17\x18\xdc\x31\x3a\xb6\x72\x4f\x38\x7e\x9c\xe9\xe9\x69\x8c\x31\
\x78\xef\x99\x98\x98\x60\x72\x72\xf2\x8e\x59\xd8\x31\x3a\xd6\xdf\
\x86\xae\xd4\x09\x55\x70\x36\xac\xa2\x56\xaf\xf7\x6b\x39\x33\x33\
\xc3\xd0\xd0\x10\xd6\x5a\xbc\xf7\x34\x9b\xcd\xbb\x02\x50\xab\xd7\
\x70\xd1\x88\xb4\xd4\x88\x14\x9c\x0b\x27\x5c\xa0\x2a\x00\xa8\x56\
\xab\x64\x59\xd6\xa7\xb8\x37\xde\x69\x73\x1a\xa9\x17\x41\x4b\xad\
\x38\x1e\xc7\xbd\x23\xb4\xd7\x8c\x31\x88\x44\xdf\x8f\x3a\xb8\xab\
\x9b\xaf\x35\xa8\x0d\xf3\xf6\x18\x2e\x3d\x8e\x83\x29\x6d\xe3\xd5\
\xdb\x12\xa9\xf7\xe5\x56\x6c\xad\xf4\x91\x0e\x8e\x0c\xc3\xf2\xef\
\xdb\x02\xe0\xa1\x91\x61\xd4\xc2\xb5\x2b\x97\x59\x9c\xbf\xbe\x05\
\x03\x36\xf8\xc0\x60\xad\x02\x0b\xdb\xc3\xc0\x50\xad\xc2\xec\xc5\
\x4b\x9c\xfd\xee\x1b\xce\x9f\x9c\x9e\x03\xa6\x36\x04\x60\x24\x5e\
\x4a\x05\x12\x0b\xed\x91\x27\xa9\x3d\x0c\x6f\x1f\x38\xc8\x66\xc7\
\x81\x27\x3a\xf1\x2a\xe7\x35\x1e\x32\x81\x14\x28\xba\x70\xf9\xea\
\x55\xce\x34\x8e\xd1\xfc\xfa\x8b\xb9\xd9\x1f\x4e\x1d\x02\x0e\x6f\
\x08\xe0\xb3\x8f\x3e\xe0\xa7\xd3\x27\x57\x99\xe9\xda\xa3\x86\x55\
\xe6\xbb\x1e\x04\x1b\x3c\x5f\x1d\x6f\x7c\x77\xee\x8f\xd9\x5f\x0e\
\x01\x87\x1b\x8d\xc6\x5f\x1b\x01\x98\x9a\xfe\xf4\xe3\x7f\xf5\x73\
\x6c\x7d\xf2\x35\x00\xe2\xb7\xda\x81\xff\xdd\xd7\xf1\x3f\x4d\xf0\
\x4b\xb9\xe8\x46\x89\xaf\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\
\x60\x82\
\x00\x00\x08\x19\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x07\xab\x49\x44\x41\x54\x58\xc3\xad\
\x57\x5b\x50\x93\x67\x1a\xf6\xca\xce\xec\xcc\xf6\x62\x2f\xbc\xd9\
\xe9\xce\xec\x6e\xbd\xda\xd9\x9b\xb5\xce\xba\x3b\x7b\xb0\xad\xcc\
\x7a\xb1\xce\xce\x3a\xb3\x76\x54\x70\x75\xdb\xe2\x81\xd6\xb6\x54\
\x04\xbb\xa5\x20\x6d\xc1\x82\x06\x08\x07\x51\x42\x80\x80\x80\x02\
\x21\x81\x10\x92\x40\x48\x10\x73\x24\x21\x67\x72\x80\x04\x42\x20\
\x9c\x09\x47\xb5\x54\x78\xf6\xfb\x7e\x13\x16\x30\x58\x8b\x7d\x67\
\x9e\xf9\x2f\x92\xfc\xcf\xfb\x3e\xcf\xfb\xbe\xdf\x97\x5d\x00\x76\
\xfd\x98\x20\xf1\x0b\x82\x14\x02\x03\xc1\x75\x82\x03\xcf\xfd\xfe\
\x8f\x48\xbc\x9b\x20\xe1\x57\xaf\xef\xb5\x2a\x8c\xd6\x65\xdb\x02\
\x60\x19\x1e\x5b\x09\x27\xf1\x33\xfa\x19\x81\x22\xfc\xdc\x3e\x76\
\x48\x7e\x8a\xa0\xb9\xb6\x59\x1c\x32\xcf\xad\x42\x39\xfe\x1d\x44\
\xf6\x51\xd8\xc7\xe6\xe8\x87\x86\x3d\x7b\xf6\x58\x53\x52\xae\x2c\
\xca\x3a\x3a\x10\x4e\xe2\xe5\x49\xc3\xc4\x31\x04\xb7\x3e\x49\xf9\
\x2c\x60\x9b\x5d\x59\x53\x4d\x03\x4d\xb6\x11\x34\xeb\xfb\x20\x31\
\x79\x60\x19\x9d\xc5\xbb\xef\xbe\x3f\xc5\xab\xbe\x83\xf1\x89\x29\
\x4c\x4f\xcf\xae\x92\xef\xd7\xbc\x74\x02\x11\x9f\x0f\xbe\x1d\xe3\
\xb2\x04\x43\x4f\xb4\x33\x40\x8b\x7b\x06\xcd\x3d\x2e\x34\xeb\xec\
\xa8\x57\xf6\x20\x87\x53\x85\x32\x5e\x35\x43\xbc\xb0\xf4\x90\x81\
\xc1\x60\x5c\x26\xbf\x4b\x7c\xe1\x04\x48\x1c\x24\x38\x41\xfd\xdd\
\xea\x73\x27\xf1\xb9\x27\x04\x48\x87\x97\xc1\xd7\xbb\x20\x22\x55\
\x37\xdc\x37\xa2\xb8\x4e\x88\x2c\x56\x3e\xcc\x56\xdb\x3a\x71\x04\
\x2c\x16\x6b\x2c\xfc\xce\xe7\x27\x10\x91\x36\x93\x95\x3f\x46\x7d\
\xa5\xfe\x12\xc4\x6f\xf4\x59\x31\xb6\x02\x7e\xef\x20\x5a\x7b\x9c\
\xe0\x3f\x30\xa1\x4c\x28\x43\x46\x0e\x1b\xb2\x0e\xf9\x26\xd2\xf9\
\xc5\x65\xcc\x2d\x2c\x21\x34\xbf\x88\xbd\x7b\xf7\x5a\xc9\x3b\x7e\
\xba\x6d\x02\x24\x7e\x43\x90\x46\x3d\x35\x13\x69\x75\xb3\x80\xd2\
\x3f\x0f\xcb\xc4\xe2\x9a\x50\xa1\x5a\xb4\x6c\xf1\x59\xa0\xb6\xa0\
\xa6\x5d\x8d\x2f\xb2\x73\x71\xb7\x9e\xff\x0c\x31\x25\x9d\x09\xcd\
\x63\x62\x6a\x06\x83\x43\x81\x27\xe4\xdd\xbc\x2d\xd3\xb0\x3b\x92\
\x03\x33\x26\xd4\x53\xb5\xd3\xfb\x58\x4f\x88\xc5\x03\x21\x88\x2c\
\x43\x50\xba\x46\xd0\xed\x09\x42\xe5\x9b\x42\x9b\x73\xfc\xa9\xcf\
\x5a\x1b\xee\x2a\x74\xc8\xbc\xc9\x45\x09\xa7\x6c\x93\xcf\x9b\x88\
\x27\xa7\x11\x18\x1d\xc3\x80\x6f\x08\xa2\xd6\xd6\x25\xc2\x51\xdb\
\x28\x12\x87\xc6\x1f\xaf\x82\x2f\x62\x94\x4d\x89\x24\x90\x22\xea\
\x52\x2d\x9a\x42\xab\xe8\x18\x79\x04\xa1\xc5\xcf\x10\x53\x74\xf6\
\x0d\xa3\xd3\xe1\x87\xd4\x3c\x80\x16\xbd\x03\x0d\x5d\x06\x14\xd5\
\x0a\x90\x91\x95\x0d\x2f\x79\xf1\xc6\xaa\xa9\xd4\xb3\x73\x0b\x4c\
\xc5\x94\xd8\xdd\xef\x85\xc9\x62\x05\xb7\xbc\x12\xa5\xe5\x95\x4b\
\x13\xf3\xcb\xab\x23\x0f\x01\x37\xd9\x11\xe6\xd9\x15\x84\x97\x15\
\x13\x06\xcb\x3c\xd0\x68\xf2\xa3\xdd\xee\x5f\x27\x96\x3b\x86\x20\
\xb3\x78\xd7\x7d\xe6\x08\xa4\xf8\x3c\x33\x1b\x2a\x8d\x36\xaa\xdc\
\x53\x33\x21\x8c\x8e\x8d\x33\x15\xd3\x26\xe4\x37\x09\xf1\xc1\xc5\
\x8f\x51\x73\xaf\x01\xbe\x65\x60\xfc\x11\xa0\x23\x13\x23\xf2\xce\
\xa1\xbe\x5d\xb9\xb8\x51\x01\x83\x81\x74\x74\x4d\xa7\x1e\x0a\x67\
\x80\xa9\xb8\xdd\xea\x83\xd8\xe8\x42\x93\xca\xcc\xf8\x7c\xe5\xcb\
\x2c\x88\xda\x24\x51\x89\xa7\x67\xe7\x18\x1b\x86\x86\x47\x60\x77\
\x38\x49\x82\x3a\x24\x7c\xf8\x21\xae\xb3\x0b\xe1\x99\x5c\x80\x6f\
\x09\xd0\x90\xde\xe1\x0f\x2c\x81\xab\x1f\xc4\x7d\xef\x04\xdd\x07\
\x1d\x61\xeb\xff\x9f\xc0\x1d\xb9\x16\x1d\xf6\x21\x48\xcc\xfd\x4f\
\x7d\xee\xd4\x22\x9d\x55\x84\xaa\x9a\xba\x4d\x3e\x47\xe4\x8e\xf8\
\x3c\x3c\x12\x84\xd3\xdd\x0f\xbd\xc1\x88\xc2\xe2\x62\x9c\x7e\x2f\
\x1e\x3d\x03\x01\xf4\x2f\x02\x83\x84\xbc\xc5\xff\x2d\xee\x3a\x43\
\x28\x51\x91\xf7\xf6\x05\xf1\x4e\xdc\xbf\x7d\x84\x33\x69\xe3\x20\
\x18\xf4\x33\xab\xe0\xc9\x54\x68\x35\x38\xd1\xd8\xdd\x0b\x9e\x58\
\x89\xac\x5c\xf6\x33\x3e\x47\xaa\x9e\x9c\x9e\x65\xe4\xee\xf7\x0e\
\xa2\xd7\x6c\x41\x43\x03\x1f\x27\x62\xe3\x20\xe9\xd6\xc0\x45\xcf\
\x01\x52\x90\x24\xb8\x86\xb2\x9e\x00\x6e\xb4\xdb\x50\xd1\x1b\x44\
\x85\xce\x8b\x4a\x7e\x0b\x6d\xbe\x9b\x5b\x27\xd1\xa0\x99\xf8\x16\
\x65\x22\x05\xee\x29\xf4\x28\x13\xc8\x90\x78\x35\x0b\x1a\xad\x3e\
\xaa\xdc\x63\x13\x93\xf0\x0d\x0d\xc3\x66\xef\x83\xb4\x5d\x8e\xc4\
\x4b\x97\x90\xc3\xca\xc3\xd4\x63\xc0\x4e\x7a\x49\x31\x4e\xfa\x89\
\x94\x7f\x5b\x3b\x84\x7c\x85\x13\x25\x6a\x1f\x4a\xd5\x03\xe8\xf2\
\x30\xa3\x28\x22\xf8\xf9\x33\x09\x74\x8f\x2e\xa1\xa8\xbe\x15\xa5\
\x7c\x09\xb2\x4a\x2a\xf0\xcf\xe3\x71\x51\xe5\xf6\x07\x46\xd1\xe7\
\xf2\x40\xab\x37\x20\xfd\x6a\x06\x92\xbf\x48\x83\xcd\x37\x02\x27\
\xa9\xda\x40\x1a\x4c\xe0\x7b\x88\x52\x9d\x1f\x45\xdd\xfd\x0c\x71\
\x41\x97\x1b\xc5\xdd\x1e\x88\x9c\x41\xfc\xf9\xcd\xb7\x5d\x84\xeb\
\x6c\xb4\x43\xd0\x28\xf7\x4e\x23\xa7\xfc\x1e\xb2\x4b\xab\xf1\x51\
\xea\x57\x48\xfe\x6f\xea\xfa\x58\x51\xb9\x47\x82\xe3\xf0\x0c\xf8\
\x60\x34\x99\x51\xc9\xab\xc2\xfb\x67\xcf\x41\xfe\x40\x03\x3f\xe9\
\x6e\xb2\x8d\x19\xb9\x6f\x69\x06\x19\xd2\x9b\x2a\x2f\x72\xe5\x0e\
\xe4\x75\xf6\xa1\xf0\xbe\x1b\x1c\x95\x1b\xf9\x9c\xca\x29\xc2\x53\
\xb8\xdd\x29\xdc\x2b\x76\x04\x90\x51\xc8\xc5\x95\x6b\x79\x38\x11\
\x9f\x80\x9b\xb7\x6e\x33\x63\x15\x91\xdb\x6a\x73\x40\x22\x6d\xc7\
\x85\x84\x0f\x50\x74\xbb\x0c\xf3\x2b\x80\x9f\x34\x58\xf7\x24\x20\
\x1c\x7c\x84\x4a\xd3\x18\x38\xfa\x61\x86\x9c\x56\xfd\x55\xb3\x1e\
\xac\x0e\x3b\xb8\x3a\x1f\xd9\x21\x1e\x7a\x2f\xe0\x13\xbc\xba\x5d\
\x02\x26\xbe\xc1\x83\x94\x6f\xd8\x38\x9f\x9c\x8a\x03\x7f\x3d\x04\
\x63\xaf\x99\xe9\x6e\x2a\xb7\x46\xd7\x83\xa4\xcb\xc9\x48\xff\x3a\
\x8b\x8c\xd5\x3c\x53\xb5\x71\xf6\xa9\xdc\x35\xf6\x69\x5c\x97\x59\
\x19\xd9\xbf\x6e\x21\xa7\xa0\xd4\x82\x74\xbe\x1a\x57\x9b\x34\x60\
\xc9\xcc\x10\xbb\x82\xf8\xe5\xaf\x5f\xa7\x67\xc0\x3b\xe1\x75\x1f\
\x35\xcc\x35\xdd\x66\x7c\x94\x96\x85\xb8\x73\x17\xf1\x97\x43\x31\
\x4c\xd5\x74\x99\xf0\xaa\xaa\x71\xfa\xf4\x19\x68\xcc\x0e\x8c\x92\
\x2d\x36\x14\x1e\xab\x5a\xc7\x0c\x78\xe6\x71\x70\x0d\x23\x4c\xa3\
\x65\x8a\x0c\x8c\xec\xb4\xfa\x9c\xb6\x5e\x94\x74\x39\xd0\x66\xf7\
\xaf\x1e\x3d\x11\x4b\x47\x2e\x6f\xc3\x79\x13\x35\x2c\x5c\x99\x1a\
\xf1\x97\x3e\xc7\xd1\xd8\x33\xf8\x38\x31\x09\x86\x5e\x13\x1a\x9b\
\x04\xf8\xdd\x1b\xfb\x51\x4f\xd4\xf1\x90\x99\xee\x9a\x00\xaa\xad\
\x93\x60\x2b\x5d\x0c\x39\xf5\xbc\xf0\xbe\x67\xbd\xea\xcc\x16\x3d\
\x4a\x55\x1e\x08\x6d\x01\x94\xd4\xf1\x43\xe1\x65\x53\x40\xf0\xca\
\xf7\x25\x60\x2b\x6e\x6a\xc7\xa9\x84\x44\xc4\x1c\x39\x8a\xdc\x7c\
\x36\x5a\x5a\xc5\x38\x14\x13\x83\x2f\x39\x35\xc8\x14\x6a\x98\xe6\
\xa2\xd5\xd2\x27\xf5\x9a\x7a\x4c\x13\xa1\x49\x64\xb7\x99\x90\xdb\
\x6e\x46\xb9\xda\x8d\x06\xa5\x76\x39\x2c\x39\x3d\xf9\x4e\x13\xec\
\xd9\x72\xd4\x47\x0d\x3b\xab\x46\x88\x63\xff\x39\x8f\xdf\xee\xfb\
\x3d\x1a\xf9\x02\x9c\xbf\x90\x80\x93\xf1\x17\x70\xa3\xad\x07\x19\
\xc4\x4f\x4a\x14\xe9\x6e\xba\x58\xa8\xef\x2c\xfa\x94\x98\x50\x28\
\xb7\x40\xe9\x0e\x3c\xf9\x57\xec\x29\x2a\x77\x2d\xc1\x67\x04\xfb\
\xb6\xb9\xe4\x44\x8d\xbe\xcc\xb2\x5a\xfc\xe3\xe4\x19\x1c\x3c\xf4\
\x37\xb0\x72\xf3\xb0\xef\xc0\x1f\x50\x20\xd1\x21\x89\x27\x65\x2a\
\xa6\x4b\x85\x3e\xbf\x21\xd5\x46\xe4\x2e\x90\x5b\x21\xb0\x0c\xae\
\xe5\xdc\xe2\xd2\x11\x13\x13\xe4\x87\x6f\x3c\xaf\x3c\xe7\x96\x15\
\x35\x9c\x69\x45\xe5\xf8\xfb\xb1\x58\x1c\x3f\x19\x87\x37\xf6\xef\
\xc7\x8d\x3a\x11\x92\xab\xa4\x0c\x21\xed\x70\xea\x35\x55\x21\x8b\
\x34\x5b\xc9\x03\x37\x2a\x34\x6e\xd4\x49\x3a\x17\xc3\x72\x73\x08\
\x8e\x6d\x95\xfb\x87\x24\xe0\x4a\x65\x73\x70\xe4\xf8\x29\x1c\x3e\
\x7c\x98\x8c\x63\x2e\x32\x05\x2a\x5c\x22\xd5\xd3\x5d\x7e\x4d\xdc\
\x0b\x36\xe9\x74\x76\xa7\x1d\x77\x8c\xe4\x88\xb6\xf9\x9e\x84\xb7\
\x1a\x95\xfb\x22\xbd\x49\xfd\x80\x0b\x6d\xf4\x04\x32\x4a\x78\x4c\
\x0f\x9c\x4b\x49\xc3\xb5\xa6\x2e\x7c\xc2\x6d\x65\x36\x59\xf1\x83\
\x01\x5c\x97\x9a\xc1\x51\x7b\x20\xf3\x04\xd7\xce\x25\x26\x05\x36\
\xc8\xfd\xc7\x9d\xc8\x1d\xd5\x82\xdc\x1a\x01\xce\x5e\x4e\x45\x81\
\x58\x85\x78\xf6\x5d\x5c\xa9\x55\x90\xaa\xfb\xc0\x96\xdb\x50\xad\
\x75\xe3\xae\x54\x41\x2f\x10\xca\x0d\x72\xbf\xba\xd3\x6a\xa3\x05\
\xb7\xa2\x51\xf8\x1d\xaf\x43\x8d\x4f\xb9\x2d\x88\xcb\xe6\xe1\x9a\
\x48\x8f\xaa\x1e\x2f\x9a\x35\xe6\xc7\x7f\x7a\xf3\x2d\x57\x78\xac\
\xa8\xdc\xaf\xbd\xac\xdc\xd1\xe2\x08\xdd\x05\x5c\x75\x1f\xde\xcb\
\xaf\x45\xb9\x76\x00\x32\x67\x60\xf5\xc2\xa7\x97\xa9\xdc\xf7\x08\
\xd2\xa9\xdc\x3b\xf8\x03\xf3\xc2\xf1\x13\x82\xca\x1c\xee\x9d\x50\
\x0b\x39\x94\xb8\x0d\xc2\xc8\x16\xa3\x17\x87\xc3\x2f\x22\xf7\x0e\
\xff\xda\x6d\x8a\xdd\x61\x99\xd5\x1b\xb6\xd8\x6b\xbb\x5e\x32\xbe\
\x2f\x89\xff\x01\x66\xb9\x5f\xfc\x11\x80\x3d\xcf\x00\x00\x00\x00\
\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x2b\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x04\xbd\x49\x44\x41\x54\x58\xc3\xed\
\x57\x6b\x4c\x93\x57\x18\x3e\x23\x71\xc9\x32\xe9\x16\x97\xa8\x54\
\x65\x38\x9d\x02\x15\xf6\x03\x87\x32\x93\x01\x66\x2c\x5b\x70\xc4\
\x30\xff\x60\xa2\x2e\x1a\x3a\x1d\x4e\x03\xba\x31\x89\x5b\xb3\x80\
\xd9\x0c\x84\x02\x19\x58\x1c\x14\x8b\x85\xb2\x82\x95\x5e\xe4\x66\
\x0b\x8e\x31\xf8\xc3\x46\xcb\x2d\x81\x15\xdc\xa8\xc2\x1c\x1b\xb7\
\x6a\x69\x91\xf2\xee\xbc\x87\xaf\x0c\xdc\xb8\x0d\x61\xd9\xb2\x93\
\x3c\xed\x97\xf3\x7d\xfd\xde\xe7\xbc\xef\xf3\x5e\x4a\x00\x80\xfc\
\x93\x20\xff\x0a\x02\x74\x09\x28\x44\x14\xd9\x14\x71\x14\x01\x2b\
\x46\x80\xae\xdd\x64\xdd\xc6\x66\x22\x4c\xf8\x95\xc4\x8b\x47\xc8\
\xa1\xd3\xf7\xc8\x8e\x97\x3b\x38\x32\x61\x2b\x41\x20\x85\x9c\xbe\
\x30\x48\x2e\xdd\x80\x19\x40\x32\xab\x79\x4d\xf4\xbe\xfb\x72\x13\
\x68\x64\x06\x91\x04\x5e\xa3\x51\xf4\x06\xee\x85\x47\xf5\xd0\xbd\
\x83\xcb\x4d\x20\x9b\x9d\xf6\x40\x74\x2f\xbd\x16\x32\x3d\x20\x89\
\x3f\x48\xa5\x2c\x1b\x01\x8c\x31\x79\xc1\xbb\x9d\x88\x4b\xc6\xd7\
\xc6\x26\x0e\xa0\x10\xb9\xfd\x42\xfe\xc5\x2b\x36\x46\x8c\x12\x5c\
\x4e\x02\x93\xa7\xa7\xa7\x0d\xcc\xd3\x39\xb9\x98\x63\x36\x14\x0a\
\xd2\xe4\xa3\x2b\x41\x20\x8c\x29\x9e\x2a\xdf\x37\x47\xeb\xdc\x7b\
\xb5\xcc\x89\x9e\x40\x44\x96\x54\x83\x2b\x2c\x0b\x36\x46\x48\x08\
\x13\xf5\x64\x2a\x7b\x2e\x54\x03\x01\xf8\x03\x37\xbf\xc0\x0e\x34\
\x2a\x54\xdf\x62\x88\x52\xd5\x2c\x58\x03\x74\x1d\x16\x08\x04\x7a\
\x45\x55\xf5\xc8\xa0\x6d\x74\xc2\xd4\x73\xf7\x21\xbe\x73\x51\x95\
\x90\xae\x8f\xd0\x13\xcf\xe5\x94\x83\x87\xb4\x02\x9e\xcc\x2e\x03\
\xd4\x06\xdd\xaf\x99\xcb\xb0\xaf\xaf\xaf\x3e\xbf\xd2\x60\xb5\xdb\
\xed\x80\xf8\x79\xe4\x3e\xc4\x5e\xab\xb4\xb9\x88\x2f\x86\x80\x27\
\xd3\xc0\x67\xf9\x8e\x19\xf5\x60\xd7\x5e\x33\xba\x76\xda\x73\xee\
\x68\xd8\xc7\xc7\x47\x9f\xab\xab\xb0\x0e\x0f\x0d\xc1\x10\x87\xb2\
\xf6\x2e\xe7\x96\x37\xf7\x77\x73\x61\xd8\xbd\xe8\x5e\x80\x2f\x66\
\x9a\xa0\x86\xdf\xa9\x36\x42\xf7\xf0\x03\xd8\x19\x9f\xd4\xcf\xa5\
\xe7\x1a\x8a\x98\x2d\x7e\xfe\x6d\x97\x54\x1a\x6b\x5f\x5f\x1f\xb8\
\xd0\xd1\x73\x07\x62\x72\x15\x56\x4e\xc4\x87\x97\xd4\x8c\x30\x14\
\xe9\x15\xb7\x1e\x38\x1c\x0e\x40\xa4\xd6\x19\x31\x9e\x85\x9b\x05\
\x7e\x6d\xa9\x25\x1a\x5b\x97\xd9\x0c\xe6\x2e\x0a\xf3\x24\x14\xdf\
\x36\x8e\x7b\xbd\x1e\xd1\xcd\x42\xc8\x09\x6f\xa9\x04\x3c\xd1\xbd\
\x56\xab\x15\x10\x77\x7f\x1b\x84\xf3\x92\x5c\xbb\x52\xa9\x84\xfa\
\xfa\x7a\x30\x99\x4c\x0c\x75\xdf\x35\xc1\x51\xb1\x64\x18\xc9\x51\
\x44\x3e\xb6\x76\xcc\xb4\x40\x4f\x93\x5f\x7e\xd3\xd6\xdf\xdf\x0f\
\x32\x99\x0c\x44\x22\x11\xa8\x54\x2a\x90\x4a\xa5\xa0\xd1\x68\x20\
\x4b\x5b\x39\xbe\xe9\x95\xe0\x1f\xb8\x53\xaf\x79\x2c\xf3\x00\x97\
\x8e\x22\x9e\xc7\x86\xe6\x53\x29\x19\xf6\x82\x82\x02\xe6\xe2\xa0\
\xa0\x20\xe0\xf1\x78\x60\xb1\x58\x40\x5b\x5e\x01\xfb\xcf\x26\x0c\
\x2d\xa6\x53\xce\x67\x94\xcf\x09\x4c\x83\xe2\x5b\x7b\xe6\xc2\x60\
\x9a\xb2\x14\x14\x0a\x05\x88\xc5\x62\xc8\xcc\xcc\x84\xa2\xa2\x22\
\x50\xab\xd5\xd0\xd9\xd9\xc9\x60\xec\xfe\xc9\xb9\xc9\xdb\xa7\x75\
\x2e\xb7\xcf\x4b\x80\xae\xb7\xd8\x29\x70\x0e\xc0\x6a\x97\xac\x78\
\x88\xca\x7f\x82\xe2\x29\x89\x0e\x3e\x97\x2b\x21\x5b\x96\x0f\x07\
\x63\xe3\x47\x84\x1f\x26\xd8\x92\x72\x64\x8e\x6f\x1a\xbf\x07\xa3\
\xd1\x08\x2d\xad\x2d\xf0\xcb\xc0\x20\x1c\x38\xf1\xbe\x05\xb3\x62\
\xc1\x04\x5c\x69\x84\x85\x85\x84\x46\xdc\x26\xe7\x32\xac\x2c\xcf\
\x33\xb5\x13\xec\x3b\xe3\xba\xd3\x33\xaf\x82\xe5\xfe\x7a\x89\x06\
\x9e\xde\xfc\x62\x1b\xf7\x3c\x92\x8d\x7b\x66\xab\x4f\x5b\xca\x35\
\xed\x58\x43\x43\x3d\x34\x34\x34\x80\xa5\xb7\x17\x32\x14\xc5\xc3\
\xf3\xe9\xc0\x65\x3c\x92\xe5\x28\x9e\x36\x5d\xe5\x9c\x2a\x32\x78\
\x7d\xf4\x83\x2e\x5a\x6c\x12\x31\x0c\x1b\x25\xea\x71\xf7\x2f\xcb\
\x27\xef\x05\x87\x5f\xfe\xd3\xe4\x44\x0b\x4c\x68\xf4\xc9\x3e\x75\
\x95\x1e\x0c\x06\x03\xb4\xb7\xb7\xc3\xd7\xc6\x96\x31\xae\x81\x09\
\x66\xf1\x36\x6d\x38\x68\x3c\x49\x3a\x3a\x65\xf8\x62\x81\x83\x44\
\xbd\x57\x43\xb6\x0a\x5e\x9b\x2a\xc3\x94\x5c\xb0\x42\x0f\xab\x24\
\xb4\x04\x9f\x4a\xaa\x9b\x43\x37\x31\x28\xd4\x4f\xf2\x0a\xc7\x74\
\x3a\x1d\xd4\xd6\xd6\x82\xc9\x7c\xdb\xb9\x61\x9b\xf7\x5f\xea\x62\
\xb2\xe5\x7e\x9c\x75\x1f\x0d\xf3\xb2\xd4\x4e\xf2\xf6\xb1\xeb\x2e\
\xb6\xae\x94\xc3\x90\x6c\x97\x55\xc1\x4b\x57\xab\x80\x9c\x4d\x6e\
\x5a\xd0\x1c\x49\xbd\xb1\xe7\x88\xb0\xef\xca\x57\xc5\x50\x5a\x5a\
\x0a\x1d\x3f\xf6\x4c\x04\x06\x87\x74\x3c\xaa\x0b\xc2\x84\x46\x8d\
\x07\xc8\x6f\x02\xd9\xf9\xaa\x7e\x9a\xf1\x30\x46\x8e\x36\x20\xaf\
\xbc\x4a\x78\x43\x69\x00\x92\x28\x1d\x98\xcd\x95\xb3\x79\xc3\x7d\
\x3d\xbf\xf9\x44\x6a\xa6\x5d\x2e\x97\x43\x53\x4b\x2b\x44\x1c\x7b\
\xf7\xce\xf4\x14\x25\xae\xf1\x8a\xf5\x77\x9c\xf5\x70\x02\xc2\xd9\
\x0f\x89\xd1\x81\x03\x4f\x8e\xf7\xdc\xd2\x69\xe7\xf3\xdf\x75\xfc\
\x6f\x14\x2e\x36\xd2\xef\xd8\x17\x69\x49\xbe\x2c\x9d\xc8\xd3\x96\
\x3b\xa7\x0f\x31\x8c\x25\xc6\xdf\x9f\xba\x77\x5f\x71\x35\xa0\x41\
\x6c\xb5\x08\x8c\xf9\x94\xf1\xe0\xf0\x33\x4b\x9a\x7c\x68\x13\x5a\
\xbd\xce\xa3\xd9\x6b\x4f\x48\xf7\x0c\x0f\xb0\x0f\xfe\xf3\x87\xc8\
\xf9\x2f\xee\xb9\x49\x6e\x00\xf6\x7b\x3e\xed\xf7\x08\x1e\x2a\x3e\
\x5d\xe5\x58\xaa\xf1\x47\x5a\xf5\xb6\x59\x0b\x11\x1d\xb3\x43\xc9\
\x91\x38\x09\x39\xf9\xa9\x96\x21\xfa\x5c\x1a\x0d\xcf\xb3\xff\xff\
\x37\xfc\x4f\x13\xf8\x1d\xe7\x87\x19\xb9\x44\xc3\x01\xcf\x00\x00\
\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
\x00\x00\x05\x3a\
\x89\
\x50\x4e\x47\x0d\x0a\x1a\x0a\x00\x00\x00\x0d\x49\x48\x44\x52\x00\
\x00\x00\x20\x00\x00\x00\x20\x08\x06\x00\x00\x00\x73\x7a\x7a\xf4\
\x00\x00\x00\x04\x67\x41\x4d\x41\x00\x00\xd6\xd8\xd4\x4f\x58\x32\
\x00\x00\x00\x19\x74\x45\x58\x74\x53\x6f\x66\x74\x77\x61\x72\x65\
\x00\x41\x64\x6f\x62\x65\x20\x49\x6d\x61\x67\x65\x52\x65\x61\x64\
\x79\x71\xc9\x65\x3c\x00\x00\x04\xcc\x49\x44\x41\x54\x58\xc3\xb5\
\x97\x5d\x4c\x5b\x65\x1c\xc6\x77\x6f\xbc\xd9\xe5\x12\x49\x20\x71\
\xd7\x26\xe3\x4e\x13\xb8\x70\xd1\x85\x44\xbd\x50\xe3\x10\x18\xe5\
\x2b\x2e\x26\x4a\x04\x27\x86\xaa\x8b\x99\xe0\xd0\xa2\x6c\x19\x86\<|fim▁hole|>\xf7\x3c\xef\x47\x0f\x87\x00\x1c\xca\x46\xcf\xbd\xfa\xe9\xbb\x4c\
\x5a\x26\x61\x0f\x6a\x60\xca\xd9\xe9\x79\xd9\x9a\x3f\x5d\x50\xf2\
\xa5\xc1\xe9\x8f\xa7\x57\xc3\x40\x30\x02\x84\xa2\x19\xad\xc7\x32\
\x8a\x27\x81\x58\x22\x73\xbf\x79\x6b\xda\x4b\x10\x72\x02\x1c\x7b\
\xe7\xac\xda\x1c\xd8\xc8\x98\x12\x40\x84\x99\x85\xe3\x19\x91\x31\
\x29\x1a\x4b\x61\x25\x94\x44\x38\x9a\x42\x73\x87\xc6\xbe\x13\xc4\
\xff\x02\x90\x12\x93\x79\x24\xf1\xc8\x58\x92\xcf\x1f\x84\x5d\x8c\
\xc2\xe5\x09\x22\x12\x4b\xa3\xf4\xc3\xef\x4d\x34\x75\x59\x01\xb0\
\xeb\xd8\x36\xd5\x90\x9e\x3a\xfc\xcc\xb9\xe7\x5f\x2e\x11\x3f\x56\
\x9e\x45\x45\x55\x0d\x2a\x99\xde\xaf\xad\xc3\x9d\xb1\x89\xc7\x00\
\xac\xb6\x25\xfc\xb9\xe8\x87\x6b\x15\x58\xf6\x04\x10\x08\xc6\xd2\
\xaf\x9c\xbe\x70\x9f\x41\x1c\xd9\x15\x80\x5d\x87\x99\x1a\x8a\x8a\
\x8a\xcc\x92\x5a\x5b\x5b\xdd\xa4\xaf\x55\xad\xfe\xaf\x54\xdf\xa6\
\x06\x06\x06\x31\x39\x35\x85\xd9\xb9\x39\xe8\x26\x26\x50\x50\x50\
\x80\x21\xcd\x6f\x7c\xde\x49\xa6\xf9\x05\xcc\x98\x5c\x1c\xc0\xe1\
\x4f\x41\xf4\x85\xf0\x43\xaf\xce\xcd\x00\x6a\xf6\x02\x50\x43\x66\
\xd8\xe5\x8a\xc7\xe3\xf0\x7a\xbd\x48\xa7\xd3\x98\x9c\x9c\x44\x65\
\x65\x35\x66\x67\x8d\xbc\x81\x07\x66\x1b\x74\xd3\x16\x0e\x40\x32\
\x2d\x78\xf0\xdd\x8d\x51\x8f\xac\x00\xe1\x70\x18\x46\xa3\x91\x8f\
\x53\xa9\x14\x7e\xea\xed\x45\xe3\x27\x9f\x61\x86\x41\x38\x96\xdc\
\x50\x77\x75\xe3\x4c\x43\x23\xce\x35\x9d\xc7\xed\x91\x71\x5c\xbc\
\x3e\x2c\x2f\xc0\xc6\xc6\x06\xf4\x7a\xfd\x63\x40\x7d\x7d\xfd\x50\
\x32\x88\xd0\x46\x1c\x66\x9b\x0b\x82\xc1\x88\xa9\x19\x13\xac\x0e\
\x11\x97\xba\x64\x6e\x80\x00\xa6\xd8\x3a\xd8\x7e\x45\x22\x11\x94\
\x2b\x2a\x30\xae\x13\x40\xe7\x04\x6d\x57\xda\xaa\x34\xbe\x7c\x53\
\xe6\x35\x40\x66\x3a\x9d\x0e\xc3\xc3\xc3\xe8\x65\xf5\xf7\xf7\xf7\
\x43\xab\xd5\xa2\xaa\xba\x06\x63\x77\xf5\x90\x0e\x2a\x77\x90\xed\
\x04\xb6\x0e\xda\xbb\x65\x06\xa0\x79\xb7\xdb\xed\x18\x1a\x1a\x42\
\x67\x67\x27\x7a\x7a\x7a\x38\x50\x49\x69\x19\x6e\x69\xf5\x10\xd7\
\x00\x6f\x08\xb0\xf9\x00\x67\x00\xb8\xd0\x25\x33\xc0\xd6\xd6\x16\
\xdf\x09\x81\x40\x00\xa2\x28\xc2\xef\xf7\x63\x6d\x6d\x0d\xa7\x14\
\x95\xd0\xfc\xae\xe7\xa9\xc9\x7c\xc1\x0b\x98\x3d\x40\x9b\xdc\x00\
\xdb\x41\x36\x37\x37\xf9\x76\xa4\x56\x14\x15\xd5\xe8\xfb\x55\xe0\
\xa9\x1d\x81\x47\x00\xe7\x3b\x0f\x00\x80\xcc\x25\x80\x24\x33\x4f\
\x24\x12\x28\x2b\xaf\xe2\x00\x7f\xb8\x00\x8b\x98\x01\xa0\x36\x5a\
\xd5\x07\x30\x05\xff\x98\x27\x93\x3c\x3d\x4d\x49\xc9\xa9\x4a\x0e\
\xa0\xb7\xb3\x03\x89\x3d\xc5\xf8\x17\x30\xb1\x00\x7c\x71\xf5\x00\
\x00\xa4\xea\xc9\x98\x14\x8b\xc5\x50\xa6\xa8\x82\x7a\x48\xc0\x98\
\x19\xb8\x6b\x05\xe6\x9c\x99\xfb\xe7\x57\x64\x04\x90\xd2\x53\x6a\
\x02\x88\x46\xa3\xdc\x3c\x14\x0a\xa1\xb8\xb4\x02\xd7\x06\x05\xdc\
\x66\x87\xe4\xa0\x01\x1c\x64\xc4\x04\x28\x3b\x64\x06\x48\x3d\x9c\
\x73\x12\x99\xd3\xb9\x40\x20\xc5\x65\x55\xb8\xd8\x2d\xa0\x7f\x3a\
\x63\xae\x7d\x90\x69\xe0\xa3\x76\x99\x00\xfe\x5d\x3d\xa5\x26\xad\
\xae\xae\x72\x88\xb7\x4a\x2a\x70\xb9\x57\xc0\x3d\x1b\xb8\x7e\x9e\
\x01\xee\xcc\x03\x67\x2e\xed\x13\x40\xaa\x9d\x44\x8b\x8e\x92\xd3\
\x71\x4c\xdf\x01\x2b\x2b\x2b\x58\x5f\x5f\xe7\x10\x27\x59\x03\xdf\
\x74\x09\x50\x4f\x00\xbf\xcc\x65\x1a\xb8\x32\x06\x34\xec\xa7\x01\
\xc9\x58\xda\xeb\x64\x4e\x69\x29\x39\x1d\x44\x04\x40\xf5\xd3\xcf\
\xde\x7c\x5b\x81\x96\xeb\x02\x4f\x7e\x75\x1c\xb8\x71\x0f\xf8\x71\
\x2c\x9e\x7e\xbd\x4e\x6d\xa6\x37\xaa\xac\x00\x9e\x64\x2c\x6d\x37\
\x32\x25\x00\xd1\x23\xf2\xe4\x12\xcc\x1b\x27\x15\x68\xef\x11\xa0\
\xbc\x66\x5b\x7f\x4f\x35\xe2\x3c\x71\x9a\xbf\x8e\x69\xf7\xfc\x4a\
\x26\x01\x90\xa9\x24\x69\xb5\x53\x42\x32\x0f\x06\x83\x70\xb9\x5c\
\xdc\x90\x5e\x4a\xe8\xb3\xc7\xe3\x81\xdb\xed\xc6\xf1\x13\xaf\x25\
\x9f\x7d\xa1\x9c\x4c\x3b\x98\x8a\x99\x8e\x3e\xc9\x78\x47\x00\x95\
\x4a\xc5\x01\xa4\x15\x2e\xcd\x37\x19\x52\x52\x3a\xf7\x29\xb5\xc3\
\xe1\xe0\x22\xe3\xc5\xc5\x45\x0e\xf5\xe2\xf1\x97\x5c\xf4\x1e\xb9\
\x93\xe9\xae\x00\x2d\x2d\x2d\x6e\xe9\x60\xa1\xd4\xd2\x97\x0d\x8d\
\x97\x97\x97\xe1\xf3\xf9\x60\xb3\xd9\xf8\x7d\x69\x69\x89\x43\x10\
\x00\x8d\x0b\x0b\x0b\xcd\xb2\x00\xd0\xa2\x92\x52\x93\x11\x8d\xe9\
\x4e\xdf\x78\x54\x3b\x35\x60\xb5\x5a\x79\xf5\xd4\x0a\xfd\xce\x60\
\x30\x24\xf2\xf2\xf2\xee\xb3\x67\x1c\xd9\x17\x40\x53\x53\x93\x5b\
\x9a\x67\x4a\x4f\x22\x13\xaa\x9a\xc6\x16\x8b\x99\x37\x40\x9f\x47\
\x47\x47\x23\x6d\x6d\x6d\xde\xfc\xfc\x7c\x13\xfb\xdb\x41\xa6\xb2\
\xbd\x9a\xff\x27\x40\x73\x73\x33\x9f\x02\x4a\x47\x10\x54\x3f\x55\
\x3f\x3f\x3f\xcf\xeb\xd6\x68\x34\x91\xba\xba\x3a\xe7\xc3\xb4\x5d\
\x4c\x1f\x30\x1d\xcd\xc6\x78\x47\x00\xa5\x52\xe9\x76\x3a\x9d\xbc\
\x62\x4a\x4a\x6f\x3e\x94\xb4\xbe\xbe\xde\x99\x93\x93\x23\x99\x16\
\x67\x53\x75\x56\x00\x8d\x8d\x8d\x6e\x8b\xc5\x82\x81\x81\x81\x48\
\x6d\x6d\xad\x33\x37\x37\x57\x56\xd3\xdd\x00\xf8\x7f\x46\x4c\xc2\
\x41\x99\x6e\xd7\xdf\x43\x39\x56\x18\x85\x70\xc8\x04\x00\x00\x00\
\x00\x49\x45\x4e\x44\xae\x42\x60\x82\
"
qt_resource_name = b"\
\x00\x06\
\x07\x03\x7d\xc3\
\x00\x69\
\x00\x6d\x00\x61\x00\x67\x00\x65\x00\x73\
\x00\x07\
\x04\xca\x57\xa7\
\x00\x6e\
\x00\x65\x00\x77\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x09\
\x0a\xa8\xba\x47\
\x00\x70\
\x00\x61\x00\x73\x00\x74\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x08\xc8\x58\x67\
\x00\x73\
\x00\x61\x00\x76\x00\x65\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x06\xc1\x59\x87\
\x00\x6f\
\x00\x70\x00\x65\x00\x6e\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x07\
\x0a\xc7\x57\x87\
\x00\x63\
\x00\x75\x00\x74\x00\x2e\x00\x70\x00\x6e\x00\x67\
\x00\x08\
\x06\x7c\x5a\x07\
\x00\x63\
\x00\x6f\x00\x70\x00\x79\x00\x2e\x00\x70\x00\x6e\x00\x67\
"
qt_resource_struct = b"\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\
\x00\x00\x00\x00\x00\x02\x00\x00\x00\x06\x00\x00\x00\x02\
\x00\x00\x00\x12\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\
\x00\x00\x00\x7e\x00\x00\x00\x00\x00\x01\x00\x00\x1b\xbc\
\x00\x00\x00\x54\x00\x00\x00\x00\x00\x01\x00\x00\x0e\x70\
\x00\x00\x00\x3e\x00\x00\x00\x00\x00\x01\x00\x00\x09\xc9\
\x00\x00\x00\x26\x00\x00\x00\x00\x00\x01\x00\x00\x03\x58\
\x00\x00\x00\x6a\x00\x00\x00\x00\x00\x01\x00\x00\x16\x8d\
"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()<|fim▁end|>
|
\x39\x17\xdc\x1a\x16\x98\x80\x40\x6c\xa6\x43\xca\x20\x2b\x83\x1e\
\x28\xcc\xda\xd1\x96\xd2\xd2\x4a\x7b\xfa\x01\xa5\xd0\xef\x16\x1e\
\xdf\xff\xdb\x1d\xc7\xcc\x04\x2a\x87\x93\x3c\x39\x6f\x21\x9c\xe7\
|
<|file_name|>scrapeMps.js<|end_file_name|><|fim▁begin|>/* eslint-disable */
// not run locally - here for reference
function pageFunction(context) {
// called on every page the crawler visits, use it to extract data from it
var $ = context.jQuery;
var result = {
constituency: $('#commons-constituency').text(),
fullName: $('#commons-biography-header h1').text(),
party: $('#commons-party').text(),
email: $('#ctl00_ctl00_FormContent_SiteSpecificPlaceholder_PageContent_addParliamentaryAddress_rptAddresses_ctl00_hypEmailAddress').text(),<|fim▁hole|> $('.social-media li').each(function() {
const type = $(this).find('span').text().slice(0,-1).toLowerCase();
console.log(type);
const link = $(this).find('a').attr('href');
console.log(link);
result[type] = link;
});
return result;
}<|fim▁end|>
|
parlTel: $('#ctl00_ctl00_FormContent_SiteSpecificPlaceholder_PageContent_addParliamentaryAddress_rptAddresses_ctl00_pnlTelephone').text(),
conAddress: $('#ctl00_ctl00_FormContent_SiteSpecificPlaceholder_PageContent_addConstituencyAddress_rptAddresses_ctl00_pnlAddress').text(),
conTel: $('#ctl00_ctl00_FormContent_SiteSpecificPlaceholder_PageContent_addConstituencyAddress_rptAddresses_ctl00_pnlTelephone').text().slice(5)
};
|
<|file_name|>pyarena.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
pub enum PyArena {}
|
<|file_name|>tensor_types.d.ts<|end_file_name|><|fim▁begin|>/**
* @license<|fim▁hole|> * Copyright 2018 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import { Tensor, Variable } from './tensor';
/** @docalias {[name: string]: Tensor} */
export declare type NamedTensorMap = {
[name: string]: Tensor;
};
export interface NamedTensor {
name: string;
tensor: Tensor;
}
export declare type NamedVariableMap = {
[name: string]: Variable;
};
export declare type GradSaveFunc = (save: Tensor[]) => void;
/**
* @docalias void|number|string|TypedArray|Tensor|Tensor[]|{[key:
* string]:Tensor|number|string}
*/
export declare type TensorContainer = void | Tensor | string | number | boolean | TensorContainerObject | TensorContainerArray | Float32Array | Int32Array | Uint8Array;
export interface TensorContainerObject {
[x: string]: TensorContainer;
}
export interface TensorContainerArray extends Array<TensorContainer> {
}<|fim▁end|>
| |
<|file_name|>roi_gcibs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
'''
'roi_gcibs.py' compares two groups informed by an a priori bootstrap analysis.
'''
import os
import sys
import argparse
import tempfile, shutil
import json
import pprint
import copy
from collections import defaultdict
from _common import systemMisc as misc
from _common import crun
import error
import message
import stage
import fnndsc as base
class FNNDSC_roigcibs(base.FNNDSC):
'''
This class is a specialization of the FNNDSC base and geared to dyslexia
curvature analysis.
'''
#
# Class member variables -- if declared here are shared
# across all instances of this class
#
_dictErr = {
'subjectSpecFail' : {
'action' : 'examining command line arguments, ',
'error' : 'it seems that no subjects were specified.',
'exitCode' : 10},
'noFreeSurferEnv' : {
'action' : 'examining environment, ',
'error' : 'it seems that the FreeSurfer environment has not been sourced.',
'exitCode' : 11},
'noStagePostConditions' : {
'action' : 'querying a stage for its exitCode, ',
'error' : 'it seems that the stage has not been specified.',
'exitCode' : 12},
'subjectDirnotExist': {
'action' : 'examining the <subjectDirectories>, ',
'error' : 'the directory does not exist.',
'exitCode' : 13},
'Load' : {
'action' : 'attempting to pickle load object, ',
'error' : 'a PickleError occured.',
'exitCode' : 14},
'outDirNotCreate': {
'action' : 'attempting to create the <outDir>, ',
'error' : 'a system error was encountered. Do you have create permission?',
'exitCode' : 15},
'workingDirNotExist': {
'action' : 'attempting to access the <workingDir>, ',
'error' : 'a system error was encountered. Does the directory exist?',
'exitCode' : 16},
}
def l_pval(self):
return self._l_pval
def l_roi(self):
return self._l_ROI
def l_hemisphere(self):
return self._l_hemi
def l_surface(self):
return self._l_surface
def l_statFunc(self):
return self._l_statFunc
def l_group(self):
return self._l_group
def l_curvFunc(self):
return self._l_curvFunc
def pval(self):
return self._str_pval
def topDir(self, *args):
if len(args):
self._topDir = args[0]
else:
return self._topDir
def dirSpec(self):
"""
Return the dirSpec based on internal pipeline._str_* variables
"""
return '%s/%s/%s/%s/%s/%s/%s' % (
self.outDir(),
self._str_annotation,
self._str_group,
self._str_pval,
self._str_statFunc,
self._str_surface,
self._str_hemi
)
def dirSpecPartial(self):
"""
Return the dirSpec based on internal pipeline._str_* variables w/o
the leading directories.
"""
return '%s/%s/%s/%s' % ( self._str_pval,
self._str_statFunc,
self._str_surface,
self._str_hemi)
def namespec(self, *args):
'''
Return the namespec based on internal pipeline._str_* variables.
'''
str_sep = "-"
if len(args): str_sep = args[0]
return '%s%s%s%s%s%s%s%s%s%s%s' % (
self._str_annotation, str_sep,
self._str_group, str_sep,
self._str_pval, str_sep,
self._str_statFunc, str_sep,
self._str_surface, str_sep,
self._str_hemi
)
def schedulerStdOutDir(self, *args):
if len(args):
self._str_schedulerStdOutDir = args[0]
else:
return self._str_schedulerStdOutDir
def schedulerStdErrDir(self, *args):
if len(args):
self._str_schedulerStdErrDir = args[0]
else:
return self._str_schedulerStdErrDir
def roi(self):
return self._str_roi
def surface(self):
return self._str_surface
def hemi(self):
return self._str_hemi
def statFunc(self):
return self._str_statFunc
def curvFunc(self):
return self._str_curvFunc
def outDir(self, *args):
if len(args):
self._outDir = args[0]
else:
return self._outDir
def workingDir(self, *args):
if len(args):
self._workingDir = args[0]
else:
return self._workingDir
def clobber(self, *args):
if len(args):
self._b_clobber = args[0]
else:
return self._b_clobber
def group(self):
return self._str_group
def __init__(self, **kwargs):
"""
Basic constructor. Checks on named input args, checks that files
exist and creates directories.
"""
base.FNNDSC.__init__(self, **kwargs)
self._lw = 120
self._rw = 20
self._l_ROI = []
self._l_pval = []
self._l_group = []
self._l_surface = []
self._l_statFunc = []
self._l_curvFunc = []
self._l_hemi = []
self._l_annot = []
self._outDir = ''
self._workingDir = ''
self._stageslist = '12'
self._f_lowerBoundHard = 0.0
self._f_lowerBoundSoft = 0.0
self._f_upperBoundSoft = 0.0
# Internal tracking vars
self._str_pval = ''
self._str_group = ''
self._str_roi = ''
self._str_hemi = ''
self._str_surface = ''
self._str_statFunc = ''
self._str_curvFunc = ''
self._str_annotation = ''
self._topDir = ''
self._d_bootstrapOccurrence = Tree()
self._d_bootstrapThreshold = Tree()
self._d_bootstrapFiltered = Tree()
# Scheduler std out/err dirs
self._str_schedulerStdOutDir = '~/scratch'
self._str_schedulerStdErrDir = '~/scratch'
self._b_clobber = False
for key, value in kwargs.iteritems():
if key == 'outDir': self._outDir = value
if key == 'workingDir': self._workingDir = value
if key == 'stages': self._stageslist = value
if key == 'curvFunc': self._l_curvFunc = value.split(':')
if key == 'pval': self._l_pval = value.split(',')
if key == 'group': self._l_group = value.split(',')
if key == 'surface': self._l_surface = value.split(',')
if key == 'statFunc': self._l_statFunc = value.split(',')
if key == 'hemi': self._l_hemi = value.split(',')
if key == 'annot': self._l_annot = value.split(',')
if key == 'lowerBoundSoft': self._f_lowerBoundSoft = float(value)
if key == 'lowerBoundHard': self._f_lowerBoundHard = float(value)
if key == 'upperBoundSoft': self._f_upperBoundSoft = float(value)
if key == 'schedulerStdOutDir': self._str_schedulerStdOutDir = value
if key == 'schedulerStdErrDir': self._str_schedulerStdErrDir = value
if not os.path.isdir(self._workingDir): errorFatal(self, 'workingDirNotExist')
def initialize(self):
"""
This method provides some "post-constructor" initialization. It is
typically called after the constructor and after other class flags
have been set (or reset).
"""
# Set the stages
self._pipeline.stages_canRun(False)
lst_stages = list(self._stageslist)
for index in lst_stages:
stage = self._pipeline.stage_get(int(index))
stage.canRun(True)
def run(self):
"""
The main 'engine' of the class.
"""
base.FNNDSC.run(self)
def innerLoop(self, func_callBack, *args, **callBackArgs):
'''
A loop function that calls func_callBack(**callBackArgs)
at the innermost loop the nested data dictionary structure.
The loop order:
annotation, group, pval, statFunc, surface, hemi
Note that internal tracking object variables, _str_gid ... _str_ctype
are automatically updated by this method.
The **callBackArgs is a generic dictionary holder that is interpreted
by both this loop controller and also passed down to the callback
function.
In the context of the loop controller, loop conditions can
be changed by passing appropriately name args in the
**callBackArgs structure.
'''
ret = True
_str_log = ''
for key, val in callBackArgs.iteritems():
if key == 'hemi': self._l_hemi = val
if key == 'surface': self._l_surface = val
if key == 'curv': self._l_curvFunc = val
if key == 'group': self._l_group = val
if key == 'log': _str_log = val
if len(_str_log): self._log(_str_log)
for self._str_annotation in self._l_annot:
for self._str_group in self._l_group:
for self._str_pval in self._l_pval:
for self._str_statFunc in self._l_statFunc:
for self._str_surface in self._l_surface:
for self._str_hemi in self._l_hemi:
for self._str_curvFunc in self._l_curvFunc:
ret = func_callBack(**callBackArgs)
if len(_str_log): self._log('[ ok ]\n', rw=self._rw)
return ret
def outputDirTree_build(self, **kwargs):
'''Build the tree structure containing output images
'''
OSshell('mkdir -p %s/%s' % (
self.dirSpec(),
self._str_curvFunc
))
def tcl_append(self, str_prefix, str_suffix, str_hemi):
"""
Append some text to each tcl file to save snapshots of different brain aspects.
"""
if str_hemi == "lh":
frontal_or_distal = "frontal"
distal_or_frontal = "distal"
medial_or_lateral = "medial"
lateral_or_medial = "lateral"
if str_hemi == "rh":
frontal_or_distal = "distal"
distal_or_frontal = "frontal"
medial_or_lateral = "lateral"
lateral_or_medial = "medial"
str_content = '''
# Initial lateral view
read_binary_curv
redraw
save_tiff %s/lateral-%s.tiff
# inferior view
rotate_brain_x 90
redraw
save_tiff %s/inferior-%s.tiff
# superior view
rotate_brain_x -180
redraw
save_tiff %s/superior-%s.tiff
# reset
rotate_brain_x 90
# %s view
rotate_brain_y 90
redraw
save_tiff %s/%s-%s.tiff
# medial view
rotate_brain_y 90
redraw
save_tiff %s/medial-%s.tiff
# %s view
rotate_brain_y 90
redraw
save_tiff %s/%s-%s.tiff
exit 0
''' % ( str_prefix, str_suffix,
str_prefix, str_suffix,
str_prefix, str_suffix,
distal_or_frontal,
str_prefix, distal_or_frontal, str_suffix,
str_prefix, str_suffix,
frontal_or_distal,
str_prefix, frontal_or_distal, str_suffix)
return str_content
def static_vars(**kwargs):
def decorate(func):
for k in kwargs:
setattr(func, k, kwargs[k])
return func
return decorate
def labelScript_process(self, **kwargs):
"""
Write the tcl files to display filtered ROIs
:param kwargs:
:return:
"""
spec = self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc].keys()[0]
innerDict = self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec]
# print(innerDict)
str_dirSpec = '%s/%s' % (self.dirSpec(), self._str_curvFunc)
os.chdir(str_dirSpec)
str_fileStem = "%s-%s" % (self.namespec("-"), self._str_curvFunc)
str_TCLfileName = '%s.tcl' % (str_fileStem)
str_JSONfileName = '%s.json' % (str_fileStem)
self._log("\n")
str_currentDir = os.getcwd()
l_currentDir = str_currentDir.split('/')
l_workingDir = l_currentDir[-8:-1]
l_workingDir.append(l_currentDir[-1])
index = 0
self._log("Current dir: %s\n" % '/'.join(l_workingDir))
self._log('Creating tcl file: %s...\n' % str_TCLfileName)
with open(str_JSONfileName, 'w') as JSONfile:
json.dump(innerDict, JSONfile, indent=4, sort_keys=True)
self._log('Creating JSON file: %s...\n' % str_JSONfileName)
for key,val in innerDict.iteritems():
if val <= self._f_lowerBoundSoft:
misc.file_writeOnce(str_TCLfileName,
'labl_load %s ; labl_set_color %d 0 0 %d\n' %\
(key, index, 2*int(val)), mode='a')
if val > self._f_lowerBoundSoft and val < self._f_upperBoundSoft:
misc.file_writeOnce(str_TCLfileName,
'labl_load %s ; labl_set_color %d 0 %d 0\n' %\
(key, index, 2*int(val), 2*int(val)), mode='a')
if val >= self._f_upperBoundSoft:
misc.file_writeOnce(str_TCLfileName,
'labl_load %s ; labl_set_color %d %d 0 0\n' %\
(key, index, 2*int(val)), mode='a')
index += 1
misc.file_writeOnce(str_TCLfileName, self.tcl_append(str_dirSpec, str_fileStem, self._str_hemi), mode='a')
str_scriptDir = '/neuro/users/rudolphpienaar/projects/dyslexia-curv-analysis-2/sh'
str_subjectDir = '/neuro/users/rudolphpienaar/projects/dyslexia-curv-analysis-2/results/6-exp-dyslexia-run'
str_execCmd = 'cd %s; %s/%s -S %s -D %s -h %s' % \
(
os.getcwd(),
str_scriptDir,
"./tksurfer-run.bash",
str_subjectDir,
os.getcwd(),
self._str_hemi,
)
self._log("Shell command = %s\n" % str_execCmd)
self._log('Executing tcl file...\n')
OSshell(str_execCmd)
return {"return": "ok"}
def bootstrap_filteredDictionaryBuild(self, **kwargs):
'''Filters group compared results.
:param kwargs:
:return:
'''
spec = self._d_bootstrapOccurrence[self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc].keys()[0]
self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc] = self._d_bootstrapOccurrence[self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc].copy()
for key in self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec].keys():
if self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] < \
self._d_bootstrapThreshold['%s-threshold' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] or \
self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] <= self._f_lowerBoundHard:
self._d_bootstrapFiltered['%s-filtered' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec].pop(key, None)
return {"return": self._d_bootstrapFiltered}
def bootstrap_thresholdDictionaryBuild(self, **kwargs):
'''Sum the intra-group occurrences for a lower confidence bound
:param kwargs:
:return:
'''
str_thresholdOperation = "sum"
for kwarg,val in kwargs.iteritems():
if kwarg == 'threshold': str_thresholdOperation = val
str_g1 = self._str_group[0]
str_g2 = self._str_group[1]
spec = self._d_bootstrapOccurrence[str_g1][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc].keys()[0]
self._d_bootstrapThreshold['%s-threshold' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc] = copy.deepcopy(self._d_bootstrapOccurrence[self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc])
for key in self._d_bootstrapOccurrence[str_g1][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec].keys():
if str_thresholdOperation == "sum":
self._d_bootstrapThreshold['%s-threshold' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] = \
self._d_bootstrapOccurrence[str_g1][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] + \
self._d_bootstrapOccurrence[str_g2][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key]
if str_thresholdOperation == "max":
if self._d_bootstrapOccurrence[str_g1][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] >= self._d_bootstrapOccurrence[str_g2][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key]:
self._d_bootstrapThreshold['%s-threshold' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] = \
self._d_bootstrapOccurrence[str_g1][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key]
else:
self._d_bootstrapThreshold['%s-threshold' % self._str_group][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key] = \
self._d_bootstrapOccurrence[str_g2][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec][key]
return {"return": self._d_bootstrapThreshold}
def bootstrap_occurrenceDictionariesBuild(self, **kwargs):
"""Build the occurrence dictionaries:
This method captures the bootstrap occurrence dictionaries for the
comparison group as well as the two intra-group variance occurrences.
"""
str_g1 = self._str_group[0]
str_g2 = self._str_group[1]
str_g3 = self._str_group
l_subgroup = [str_g1, str_g2, str_g3]
for str_subgroup in l_subgroup:
os.chdir(self._workingDir)
if str_subgroup in [str_g1, str_g2]:
str_g = "%s000" % str_subgroup
str_compGroup = '12'
else:
str_g = '6'
str_compGroup = self._str_group
str_bsDir = "bootstrap-%s-%s/%s/%s" % (str_g,
self._str_curvFunc,
self._str_annotation,
str_compGroup)
os.chdir(str_bsDir)
if self._str_curvFunc == "thickness":
str_cfilt = "thickness"
else:
str_cfilt = "curv"
self._log('Parsing occurrence data for %2d.%s.%s.%s.%s\n' % (int(str_subgroup), self._str_annotation, self._str_hemi, self._str_surface, self._str_curvFunc))
OSshell('find . -iname occurence.txt | grep %s | grep %s | grep %s | ../../../../sh/ocfilt.sh -t 0 | python -m json.tool ' % \
(str_cfilt, self._str_hemi, self._str_surface))
self._d_bootstrapOccurrence[str_subgroup][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc] = json.loads(OSshell.stdout())
spec = self._d_bootstrapOccurrence[str_subgroup][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc].keys()[0]
self._d_bootstrapOccurrence[str_subgroup][self._str_annotation][self._str_hemi][self._str_surface][self._str_curvFunc][spec].pop("", None)
return {"return": self._d_bootstrapOccurrence}
def synopsis(ab_shortOnly = False):
scriptName = os.path.basename(sys.argv[0])
shortSynopsis = '''
SYNOPSIS
%s \\
[--stages <stages>] \\
[-o|--outDir <outputRootDir>] \\
[-w|--workingDir <workingDir>] \\
[-v|--verbosity <verboseLevel>] \\
[-s|--stages <stages>] \\
[-p|--pval <pvalCutoffList>] \\
[-g|--group <groupList>] \\
[-S|--surface <surfaceList>] \\
[-f|--statFunc <statFuncList>] \\
[-c|--curvFunc <curvFuncList>] \\
[-a|--annot <annotList>] \\
[-m|--hemi <hemiList>] \\
[--schedulerStdOutDir <dir>] \\
[--schedulerStdErrDir <dir>] \\
''' % scriptName
description = '''
DESCRIPTION
`%s' performs a group comparison informed by an a priori bootstrap
analysis. The bootstrap analysis provides a bound on the feature
variance within a group, and the bottom bound of the threshold
of significance becomes the sum of the underlying group variances.
ARGS
--stages <stages>
The stages to execute. This is specified in a string, such as '1234'
which would imply stages 1, 2, 3, and 4.
The special keyword 'all' can be used to turn on all stages.
--pval <pvalCutoffList>
The pval cutoffs to consider. In practice, this is always 'le1,le5'
--group <groupList>
The group list to process.
--surface <surfaceList>
The surface list to process. In practice, 'smoothwm,pial'.
--statFunc <statFuncList>
The statistical functional data to analyze. Typically
'ptile-raw,ptile-convex'.
--curvFunc <curvFuncList>
The curvature bootstrap analysis to use. Typically
'K,BE,S', 'H,K1,K2', 'thickness'.
--hemi <hemiList>
The hemispheres to process. In practice, this is always 'lh,rh'.
--annot <annotList>
The annotation list to process.
--threshold <thresholdOperation>
The operation to apply in thresholding variances from the underlying
bootstrap variances. Either "sum" or "max".
--lowerBound <lowerBound>
The lower bound for filtered (and thresholded) comparisons.
--workingDir <workingDir>
The working directory for the script.
--output <outputRootDir>
The top level directory name to contain results. The fully qualified
output dir is <workingDir>/<outputDir>
--clobber
A boolean flag. If true, will not delete existing output directories
but simply add more results down existing trees, clobbering existing
files if they already exist. If not specified then existing output
trees are deleted. This assures that a given run contains only data
from that run.
Note that running the same experiment multiple times with "--clobber"
will *grow* resultant ROI label files!
For a distributed experiment, delete *all* existing roigcibs trees
*before* running the experiment!
EXAMPLES
''' % (scriptName)
if ab_shortOnly:
return shortSynopsis
else:
return shortSynopsis + description
def f_stageShellExitCode(**kwargs):
'''
A simple function that returns a conditional based on the
exitCode of the passed stage object. It assumes global access
to the <pipeline> object.
**kwargs:
obj=<stage>
The stage to query for exitStatus.
'''
stage = None
for key, val in kwargs.iteritems():
if key == 'obj': stage = val
if not stage: error.fatal(pipeline, "noStagePostConditions")
if not stage.callCount(): return True
if not stage.exitCode(): return True
else: return False
#
# entry point
#
if __name__ == "__main__":
# always show the help if no arguments were specified
if len( sys.argv ) == 1:
print synopsis()
sys.exit( 1 )
verbosity = 0
parser = argparse.ArgumentParser(description = synopsis(True))
parser.add_argument('--verbosity', '-v',
dest='verbosity',
action='store',
default=0,
help='verbosity level')
parser.add_argument('--output', '-o',
dest='outDir',
action='store',
default='roigcibs',
help='output root directory')
parser.add_argument('--workingDir', '-w',
dest='workingDir',
action='store',
default='./',
help='output working directory')
parser.add_argument('--clobber', '-C',
dest='clobber',
action='store_true',
default=False,
help='if specified, do not erase existing output dir if found.')
parser.add_argument('--stages', '-s',
dest='stages',
action='store',
default='01',
help='analysis stages')
parser.add_argument('--pval', '-p',
dest='pval',
action='store',
default='le1',
help='comma separated p-val cutoff threshold')
parser.add_argument('--group', '-g',
dest='group',
action='store',
default='13',
help='comma separated group list to process')
parser.add_argument('--surface', '-S',
dest='surface',
action='store',
default='smoothwm',
help='comma separated surface list to process')
parser.add_argument('--statFunc', '-f',
dest='statFunc',
action='store',
default='ptile-raw',
help='comma separated statistical function list to process')
parser.add_argument('--curvFunc', '-c',
dest='curvFunc',
action='store',
default='H,K,K1,K2,C,BE,S,thickness',
help='comma separated curvature function list to process')
parser.add_argument('--hemi', '-m',
dest='hemi',
action='store',
default='lh,rh',
help='comma separated hemisphere list to process')
parser.add_argument('--annot', '-a',
dest='annot',
action='store',
default='aparc.annot',
help='comma separated annotation list to process')
parser.add_argument('--threshold', '-t',
dest='threshold',
action='store',
default='max',
help='the threshold operation -- "max" or "sum"')
parser.add_argument('--lowerBoundHard', '-L',
dest='lowerBoundHard',
action='store',
default=0.0,
help='the hard lower bound for filtered occurrences.')
parser.add_argument('--lowerBoundSoft', '-l',
dest='lowerBoundSoft',
action='store',
default=80.0,
help='the soft lower bound for filtered occurrences.')
parser.add_argument('--upperBoundSoft', '-u',
dest='upperBoundSoft',
action='store',
default=94.0,
help='the soft upper bound for filtered occurrences.')
parser.add_argument('--schedulerStdOutDir',
dest='schedulerStdOutDir',
action='store',
default='~/scratch',
help='top level directory containing stdout from scheduled jobs')
parser.add_argument('--schedulerStdErrDir',
dest='schedulerStdErrDir',
action='store',
default='~/scratch',
help='top level directory containing stderr from scheduled jobs')
args = parser.parse_args()
# A generic "shell"
OSshell = crun.crun()
OSshell.echo(False)
OSshell.echoStdOut(False)
OSshell.detach(False)
OSshell.waitForChild(True)
Tree = lambda: defaultdict(Tree)
roigcibs = FNNDSC_roigcibs(
outDir = '%s' % (args.outDir),
workingDir = args.workingDir,
stages = args.stages,
pval = args.pval,
group = args.group,
surface = args.surface,
curvFunc = args.curvFunc,
statFunc = args.statFunc,
hemi = args.hemi,
annot = args.annot,
lowerBoundHard = args.lowerBoundHard,
lowerBoundSoft = args.lowerBoundSoft,
upperBoundSoft = args.upperBoundSoft,
schedulerStdOutDir = args.schedulerStdOutDir,
schedulerStdErrDir = args.schedulerStdErrDir,
logTo = '%s/roigcibs.log' % args.workingDir,
syslog = True,
logTee = True)
roigcibs.clobber(args.clobber)
roigcibs.verbosity(args.verbosity)
pipeline = roigcibs.pipeline()
pipeline.poststdout(True)
pipeline.poststderr(True)
os.chdir(roigcibs._workingDir)
roigcibs._workingDir = os.getcwd()
roigcibs.topDir(os.getcwd())
stage0 = stage.Stage(
name = 'roigcibs-0-init',
fatalConditions = True,
syslog = True,
logTo = '%s/roigcibs-0-init.log' % args.workingDir,<|fim▁hole|> if key == 'obj': stage = val
if key == 'pipe': pipeline = val
log = stage._log
os.chdir(pipeline._workingDir)
if os.path.isdir(pipeline.outDir()) and not pipeline.clobber():
log('Existing outDir tree found... deleting...\n')
shutil.rmtree(pipeline.outDir())
OSshell('mkdir -p %s' % pipeline.outDir())
os.chdir(pipeline.outDir())
pipeline.outDir(os.getcwd())
if OSshell.exitCode() != 0: error.fatal(pipeline, 'outDirNotCreate')
d_ret = pipeline.innerLoop(
pipeline.outputDirTree_build,
log = "Building output directory tree...\n"
)
stage.exitCode(0)
return True
stage0.def_stage(f_stage0callback, obj=stage0, pipe=roigcibs)
stage1 = stage.Stage(
name = 'roigcibs-1-filter',
fatalConditions = True,
syslog = True,
logTo = '%s/roigcibs-1-filter.log' % args.workingDir,
logTee = True,
)
def f_stage1callback(**kwargs):
for key, val in kwargs.iteritems():
if key == 'obj': stage = val
if key == 'pipe': pipeline = val
os.chdir(pipeline._workingDir)
d_ret = pipeline.innerLoop(
pipeline.bootstrap_occurrenceDictionariesBuild,
log = "Parsing bootstrap occurrences...\n"
)
d_bootstrapOccurrence = d_ret["return"]
# print(json.dumps(d_bootstrapOccurrence, indent=4, sort_keys=True))
d_ret = pipeline.innerLoop(
pipeline.bootstrap_thresholdDictionaryBuild,
threshold = args.threshold,
log = "Building threshold dictionaries...\n"
)
# print(json.dumps(pipeline._d_bootstrapThreshold, indent=4, sort_keys=True))
d_ret = pipeline.innerLoop(
pipeline.bootstrap_filteredDictionaryBuild,
log = "Building filtered dictionaries...\n"
)
# print(json.dumps(pipeline._d_bootstrapFiltered, indent=4, sort_keys=True))
stage.exitCode(0)
return True
stage1.def_stage(f_stage1callback, obj=stage1, pipe=roigcibs)
stage2 = stage.Stage(
name = 'roigcibs-2-labelReader',
fatalConditions = True,
syslog = True,
logTo = '%s/roigcibs-2-labelReader.log' % args.workingDir,
logTee = True
)
# stage4.def_preconditions(stage1.def_postconditions()[0], **stage1.def_postconditions()[1])
stage2.def_preconditions(lambda **x: True)
def f_stage2callback(**kwargs):
for key, val in kwargs.iteritems():
if key == 'obj': stage = val
if key == 'pipe': pipeline = val
os.chdir(pipeline._workingDir)
d_ret = pipeline.innerLoop(
pipeline.labelScript_process,
log = "Writing and processing FreeSurfer tcl label script files...\n"
)
stage.exitCode(0)
return True
stage2.def_stage(f_stage2callback, obj=stage2, pipe=roigcibs)
roigcibslog = roigcibs.log()
roigcibslog('INIT: %s\n' % ' '.join(sys.argv))
roigcibs.stage_add(stage0)
roigcibs.stage_add(stage1)
roigcibs.stage_add(stage2)
# roigcibs.stage_add(stage3)
# roigcibs.stage_add(stage4)
roigcibs.initialize()
roigcibs.run()<|fim▁end|>
|
logTee = True,
)
def f_stage0callback(**kwargs):
for key, val in kwargs.iteritems():
|
<|file_name|>decorators.py<|end_file_name|><|fim▁begin|>from django.http import HttpResponseRedirect
def anonymous_required(view, redirect_to= None):
return AnonymousRequired(view, redirect_to)
class AnonymousRequired(object):<|fim▁hole|> redirect_to = settings.LOGIN_REDIRECT_URL
self.view = view
self.redirect_to = redirect_to
def __call__(self, request, *args, **kwargs):
if request.user is not None and request.user.is_authenticated:
return HttpResponseRedirect(self.redirect_to)
return self.view(request, *args, **kwargs)<|fim▁end|>
|
def __init__(self, view, redirect_to):
if redirect_to is None:
from django.conf import settings
|
<|file_name|>runner.py<|end_file_name|><|fim▁begin|>import os
import subprocess
# This is an example for using Kataja to launch a visualisation from a python script that doesn't use kataja
# structures, but can output bracket trees. Kataja is launched as a separate process so it doesn't stop the
# main script.
def send_to_kataja(tree, image_file=''):
# return os.system(f'python Kataja.py -image_out test.pdf "{tree}"')
args = ['python', 'Kataja.py']
if image_file:
args.append('-image_out')
args.append(image_file)
args.append(tree)
if os.name == 'posix':
# return os.spawnv(os.P_NOWAIT, '', args)
return subprocess.Popen(args, preexec_fn=os.setpgrp, stdout=subprocess.DEVNULL)<|fim▁hole|>
# python Kataja.py -image_out test.pdf "[ [ A {word} ] [.T did [.V happen ] ] ]"
# tree = """[.{CP} [.{DP(0)} [.{D'} [.{D} which ] [.{NP} [.{N'} [.N wine ] ] ] ] ] [.{C'} [.C \epsilon [.{VP} [.{DP} [.{D'} [.D the ] [.{NP} [.{N'} [.N queen ] ] ] ] ] [.{V'} [.V prefers ] [.{DP} t(0) ] ] ] ] ] ]
# """
tree = """[.{FP} {Graham Greene_i} [.{F'} on_j [.{TP} t_i [.{T'} t_j [.{AuxP} t_j [.{PrtP} kirjoittanut_k [.{VP} t_i [.{V'} t_k [.{DP} tämän kirjan ] ] ] ] ] ] ] ] ]
"""
send_to_kataja(tree, 'test.pdf')
print(f"I just sent {tree} to kataja.")
print("thanks, I'm done now!")<|fim▁end|>
|
elif os.name == 'nt' and hasattr(os, 'P_DETACH'):
return os.spawnv(os.P_DETACH, 'python', args)
|
<|file_name|>Role.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
#-*- coding: utf-8 -*-
###########################################################
# © 2011 Daniel 'grindhold' Brendle and Team
#
# This file is part of Skarphed.
#
# Skarphed is free software: you can redistribute it and/or <|fim▁hole|># version 3 of the License, or (at your option) any later
# version.
#
# Skarphed is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied
# warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with Skarphed.
# If not, see http://www.gnu.org/licenses/.
###########################################################
import pygtk
pygtk.require("2.0")
import gtk
from GenericObject import ObjectPageAbstract
from GenericObject import PageFrame
from GenericObject import FrameLabel
from skarphedadmin.gui import IconStock
from skarphedadmin.glue.lng import _
class RolePage(ObjectPageAbstract):
def __init__(self,parent,role):
ObjectPageAbstract.__init__(self,parent,role)
self.roleId = role.getLocalId()
role.fetchPermissions()
self.headline = gtk.Label()
self.pack_start(self.headline,False)
self.info = PageFrame(self,_("Information"), IconStock.ROLE)
self.infobox = gtk.VBox()
self.info.add(self.infobox)
self.pack_start(self.info,False)
self.perm = PageFrame(self,_("Permissions"), IconStock.PERMISSION)
self.permbox = gtk.Table(1,2,False)
self.permbox.set_row_spacings(10)
self.permbox.set_col_spacings(10)
self.permbox.set_border_width(10)
self.perm_permlabel = FrameLabel(self,_("Please choose the Permissions you want to assign to the user here:"), IconStock.PERMISSION)
self.perm_permlistview = gtk.TreeView()
self.perm_permlist = gtk.ListStore(int, str,str)
self.perm_permlistview.set_model(self.perm_permlist)
self.perm_permlist_col_checkbox = gtk.TreeViewColumn('')
self.perm_permlist_col_identifier = gtk.TreeViewColumn(_('Permission Identifier'))
self.perm_permlist_col_name = gtk.TreeViewColumn(_('Permission Name'))
self.perm_permlistview.append_column(self.perm_permlist_col_checkbox)
self.perm_permlistview.append_column(self.perm_permlist_col_identifier)
self.perm_permlistview.append_column(self.perm_permlist_col_name)
self.perm_permlist_renderer_checkbox= gtk.CellRendererToggle()
self.perm_permlist_renderer_identifier = gtk.CellRendererText()
self.perm_permlist_renderer_name = gtk.CellRendererText()
self.perm_permlist_col_checkbox.pack_start(self.perm_permlist_renderer_checkbox)
self.perm_permlist_col_identifier.pack_start(self.perm_permlist_renderer_identifier)
self.perm_permlist_col_name.pack_start(self.perm_permlist_renderer_name)
self.perm_permlist_col_checkbox.add_attribute(self.perm_permlist_renderer_checkbox,'active',0)
self.perm_permlist_col_identifier.add_attribute(self.perm_permlist_renderer_identifier,'text',1)
self.perm_permlist_col_name.add_attribute(self.perm_permlist_renderer_name,'text',2)
self.perm_permlist_renderer_checkbox.set_activatable(True)
self.perm_permlist_renderer_checkbox.connect("toggled",self.toggledRight)
self.permbox.attach(self.perm_permlabel,0,1,0,1)
self.permbox.attach(self.perm_permlistview,0,1,1,2)
self.perm.add(self.permbox)
self.pack_start(self.perm,False)
self.show_all()
self.render()
def render(self):
role = self.getMyObject()
if not role:
return
self.headline.set_markup(_("<b>Edit Role: "+role.getName()+"</b>"))
if role.permissiondata is not None:
self.perm_permlist.clear()
for permission in role.permissiondata:
self.perm_permlist.append((int(permission['granted']),str(permission['right']),''))
def toggledRight(self,renderer = None, path = None):
rowiter = self.perm_permlist.get_iter(path)
perm = self.perm_permlist.get_value(rowiter,1)
val = 1-self.perm_permlist.get_value(rowiter,0)
role = self.getApplication().getLocalObjectById(self.roleId)
if val == 1:
role.assignPermission(perm)
else:
role.removePermission(perm)<|fim▁end|>
|
# modify it under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either
|
<|file_name|>ModuleFile.cpp<|end_file_name|><|fim▁begin|>//===- ModuleFile.cpp - Module description --------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
//
// This file implements the ModuleFile class, which describes a module that<|fim▁hole|>//===----------------------------------------------------------------------===//
#include "clang/Serialization/ModuleFile.h"
#include "ASTReaderInternals.h"
#include "clang/Serialization/ContinuousRangeMap.h"
#include "llvm/ADT/StringRef.h"
#include "llvm/Support/Compiler.h"
#include "llvm/Support/raw_ostream.h"
using namespace clang;
using namespace serialization;
using namespace reader;
ModuleFile::~ModuleFile() {
delete static_cast<ASTIdentifierLookupTable *>(IdentifierLookupTable);
delete static_cast<HeaderFileInfoLookupTable *>(HeaderFileInfoTable);
delete static_cast<ASTSelectorLookupTable *>(SelectorLookupTable);
}
template<typename Key, typename Offset, unsigned InitialCapacity>
static void
dumpLocalRemap(StringRef Name,
const ContinuousRangeMap<Key, Offset, InitialCapacity> &Map) {
if (Map.begin() == Map.end())
return;
using MapType = ContinuousRangeMap<Key, Offset, InitialCapacity>;
llvm::errs() << " " << Name << ":\n";
for (typename MapType::const_iterator I = Map.begin(), IEnd = Map.end();
I != IEnd; ++I) {
llvm::errs() << " " << I->first << " -> " << I->second << "\n";
}
}
LLVM_DUMP_METHOD void ModuleFile::dump() {
llvm::errs() << "\nModule: " << FileName << "\n";
if (!Imports.empty()) {
llvm::errs() << " Imports: ";
for (unsigned I = 0, N = Imports.size(); I != N; ++I) {
if (I)
llvm::errs() << ", ";
llvm::errs() << Imports[I]->FileName;
}
llvm::errs() << "\n";
}
// Remapping tables.
llvm::errs() << " Base source location offset: " << SLocEntryBaseOffset
<< '\n';
dumpLocalRemap("Source location offset local -> global map", SLocRemap);
llvm::errs() << " Base identifier ID: " << BaseIdentifierID << '\n'
<< " Number of identifiers: " << LocalNumIdentifiers << '\n';
dumpLocalRemap("Identifier ID local -> global map", IdentifierRemap);
llvm::errs() << " Base macro ID: " << BaseMacroID << '\n'
<< " Number of macros: " << LocalNumMacros << '\n';
dumpLocalRemap("Macro ID local -> global map", MacroRemap);
llvm::errs() << " Base submodule ID: " << BaseSubmoduleID << '\n'
<< " Number of submodules: " << LocalNumSubmodules << '\n';
dumpLocalRemap("Submodule ID local -> global map", SubmoduleRemap);
llvm::errs() << " Base selector ID: " << BaseSelectorID << '\n'
<< " Number of selectors: " << LocalNumSelectors << '\n';
dumpLocalRemap("Selector ID local -> global map", SelectorRemap);
llvm::errs() << " Base preprocessed entity ID: " << BasePreprocessedEntityID
<< '\n'
<< " Number of preprocessed entities: "
<< NumPreprocessedEntities << '\n';
dumpLocalRemap("Preprocessed entity ID local -> global map",
PreprocessedEntityRemap);
llvm::errs() << " Base type index: " << BaseTypeIndex << '\n'
<< " Number of types: " << LocalNumTypes << '\n';
dumpLocalRemap("Type index local -> global map", TypeRemap);
llvm::errs() << " Base decl ID: " << BaseDeclID << '\n'
<< " Number of decls: " << LocalNumDecls << '\n';
dumpLocalRemap("Decl ID local -> global map", DeclRemap);
}<|fim▁end|>
|
// has been loaded from an AST file.
//
|
<|file_name|>PreviousPosition.java<|end_file_name|><|fim▁begin|>/** __ __
* _____ _/ /_/ /_ Computational Intelligence Library (CIlib)
* / ___/ / / / __ \ (c) CIRG @ UP
* / /__/ / / / /_/ / http://cilib.net
* \___/_/_/_/_.___/
*/
package net.sourceforge.cilib.problem.boundaryconstraint;
import net.sourceforge.cilib.entity.Entity;
import net.sourceforge.cilib.entity.Property;<|fim▁hole|>import net.sourceforge.cilib.type.types.Numeric;
import net.sourceforge.cilib.type.types.Types;
import net.sourceforge.cilib.type.types.container.Vector;
/**
* Once the entity has over shot the search space boundaries, re-initialise
* the Entity once again to be within the search space of the problem at a
* random position.
*
* @see Types#isInsideBounds(net.sourceforge.cilib.type.types.Type)
*/
public class PreviousPosition implements BoundaryConstraint {
private double bound = 1.0e290;
/**
* {@inheritDoc}
*/
@Override
public PreviousPosition getClone() {
return this;
}
/**
* {@inheritDoc}
*/
@Override
public void enforce(Entity entity) {
double total = 0;
for (Numeric curElement : (Vector) entity.getPosition()) {
total += Math.abs(curElement.doubleValue() * 2);
}
if (total > bound || Double.isNaN(total) || total == Double.POSITIVE_INFINITY) {
entity.setPosition(entity.get(Property.PREVIOUS_SOLUTION));
}
}
public void setBound(double bound) {
this.bound = bound;
}
}<|fim▁end|>
| |
<|file_name|>CollapsibleToolbar.js<|end_file_name|><|fim▁begin|>//>>built
define(<|fim▁hole|>);<|fim▁end|>
|
"dojox/editor/plugins/nls/hr/CollapsibleToolbar", ({
"collapse": "Spusti traku s alatima editora",
"expand": "Proširi traku s alatima editora"
})
|
<|file_name|>healthToMongo.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2014 Mozilla Corporation
#
# Contributors:
# Anthony Verez [email protected]<|fim▁hole|>from datetime import datetime
from configlib import getConfig, OptionParser
from logging.handlers import SysLogHandler
from pymongo import MongoClient
import os
sys.path.append(os.path.join(os.path.dirname(os.path.abspath(__file__)), '../lib'))
from utilities.toUTC import toUTC
from elasticsearch_client import ElasticsearchClient
from query_models import SearchQuery, TermMatch
logger = logging.getLogger(sys.argv[0])
def loggerTimeStamp(self, record, datefmt=None):
return toUTC(datetime.now()).isoformat()
def initLogger():
logger.level = logging.INFO
formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s')
formatter.formatTime = loggerTimeStamp
if options.output == 'syslog':
logger.addHandler(
SysLogHandler(
address=(options.sysloghostname,
options.syslogport)))
else:
sh = logging.StreamHandler(sys.stderr)
sh.setFormatter(formatter)
logger.addHandler(sh)
def getFrontendStats(es):
search_query = SearchQuery(minutes=15)
search_query.add_must([
TermMatch('_type', 'mozdefhealth'),
TermMatch('category', 'mozdef'),
TermMatch('tags', 'latest'),
])
results = search_query.execute(es, indices=['events'])
return results['hits']
def writeFrontendStats(data, mongo):
# Empty everything before
mongo.healthfrontend.remove({})
for host in data:
for key in host['_source']['details'].keys():
# remove unwanted data
if '.' in key:
del host['_source']['details'][key]
mongo.healthfrontend.insert(host['_source'])
def writeEsClusterStats(data, mongo):
# Empty everything before
mongo.healthescluster.remove({})
mongo.healthescluster.insert(data)
def getEsNodesStats():
r = requests.get(options.esservers[0] + '/_nodes/stats/os,jvm,fs')
jsonobj = r.json()
results = []
for nodeid in jsonobj['nodes']:
# Skip non masters and data nodes since it won't have full stats
if ('attributes' in jsonobj['nodes'][nodeid] and
jsonobj['nodes'][nodeid]['attributes']['master'] == 'false' and
jsonobj['nodes'][nodeid]['attributes']['data'] == 'false'):
continue
results.append({
'hostname': jsonobj['nodes'][nodeid]['host'],
'disk_free': jsonobj['nodes'][nodeid]['fs']['total']['free_in_bytes'] / (1024 * 1024 * 1024),
'disk_total': jsonobj['nodes'][nodeid]['fs']['total']['total_in_bytes'] / (1024 * 1024 * 1024),
'mem_heap_per': jsonobj['nodes'][nodeid]['jvm']['mem']['heap_used_percent'],
'cpu_usage': jsonobj['nodes'][nodeid]['os']['cpu_percent'],
'load': jsonobj['nodes'][nodeid]['os']['load_average']
})
return results
def writeEsNodesStats(data, mongo):
# Empty everything before
mongo.healthesnodes.remove({})
for nodedata in data:
mongo.healthesnodes.insert(nodedata)
def getEsHotThreads():
r = requests.get(options.esservers[0] + '/_nodes/hot_threads')
results = []
for line in r.text.split('\n'):
if 'cpu usage' in line:
results.append(line)
return results
def writeEsHotThreads(data, mongo):
# Empty everything before
mongo.healtheshotthreads.remove({})
for line in data:
mongo.healtheshotthreads.insert({'line': line})
def main():
logger.debug('starting')
logger.debug(options)
try:
es = ElasticsearchClient((list('{0}'.format(s) for s in options.esservers)))
client = MongoClient(options.mongohost, options.mongoport)
# use meteor db
mongo = client.meteor
writeFrontendStats(getFrontendStats(es), mongo)
writeEsClusterStats(es.get_cluster_health(), mongo)
writeEsNodesStats(getEsNodesStats(), mongo)
writeEsHotThreads(getEsHotThreads(), mongo)
except Exception as e:
logger.error("Exception %r sending health to mongo" % e)
def initConfig():
# output our log to stdout or syslog
options.output = getConfig('output', 'stdout', options.configfile)
# syslog hostname
options.sysloghostname = getConfig('sysloghostname', 'localhost',
options.configfile)
# syslog port
options.syslogport = getConfig('syslogport', 514, options.configfile)
# elastic search server settings
options.esservers = list(getConfig('esservers', 'http://localhost:9200',
options.configfile).split(','))
options.mongohost = getConfig('mongohost', 'localhost', options.configfile)
options.mongoport = getConfig('mongoport', 3001, options.configfile)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option(
"-c",
dest='configfile',
default=sys.argv[0].replace('.py', '.conf'),
help="configuration file to use")
(options, args) = parser.parse_args()
initConfig()
initLogger()
main()<|fim▁end|>
|
import logging
import requests
import sys
|
<|file_name|>histogram.py<|end_file_name|><|fim▁begin|># Plot histogram
import os
import numpy as np
from plantcv.plantcv.threshold import binary as binary_threshold
from plantcv.plantcv import params
from plantcv.plantcv import fatal_error
from plantcv.plantcv._debug import _debug
import pandas as pd
from plotnine import ggplot, aes, geom_line, labels, scale_color_manual
def _hist_gray(gray_img, bins, lower_bound, upper_bound, mask=None):
""" Prepare the ready to plot histogram data
Inputs:
gray_img = grayscale image to analyze
bins = divide the data into n evenly spaced bins
lower_bound = the lower bound of the bins (x-axis min value)
upper_bound = the upper bound of the bins (x-axis max value)
mask = binary mask, calculate histogram from masked area only (default=None)
Returns:
bin_labels = an array of histogram bin labels
hist_percent = an array of histogram represented by percent values
hist_gray_data = an array of histogram (original values)
:param gray_img: numpy.ndarray
:param bins: int
:param lower_bound: int
:param upper_bound: int
:param mask: numpy.ndarray
:return bin_labels: numpy.ndarray
:return hist_percent: numpy.ndarray
:return hist_gray_data: numpy.ndarray
"""
params.device += 1
debug = params.debug
# Apply mask if one is supplied
if mask is not None:
min_val = np.min(gray_img)
pixels = len(np.where(mask > 0)[0])
# apply plant shaped mask to image
params.debug = None
mask1 = binary_threshold(mask, 0, 255, 'light')
mask1 = (mask1 / 255)
masked = np.where(mask1 != 0, gray_img, min_val - 5000)
else:
pixels = gray_img.shape[0] * gray_img.shape[1]
masked = gray_img
params.debug = debug
# Store histogram data
hist_gray_data, hist_bins = np.histogram(masked, bins, (lower_bound, upper_bound))
# make hist percentage for plotting
hist_percent = (hist_gray_data / float(pixels)) * 100
# use middle value of every bin as bin label
bin_labels = np.array([np.average([hist_bins[i], hist_bins[i+1]]) for i in range(0, len(hist_bins) - 1)])
return bin_labels, hist_percent, hist_gray_data
# hist_data = pd.DataFrame({'pixel intensity': bin_labels, 'proportion of pixels (%)': hist_percent})
# return hist_data
def histogram(img, mask=None, bins=100, lower_bound=None, upper_bound=None, title=None, hist_data=False):
"""Plot histograms of each input image channel
Inputs:
img = an RGB or grayscale image to analyze
mask = binary mask, calculate histogram from masked area only (default=None)
bins = divide the data into n evenly spaced bins (default=100)
lower_bound = the lower bound of the bins (x-axis min value) (default=None)
upper_bound = the upper bound of the bins (x-axis max value) (default=None)
title = a custom title for the plot (default=None)
hist_data = return the frequency distribution data if True (default=False)
Returns:
fig_hist = histogram figure
hist_df = dataframe with histogram data, with columns "pixel intensity" and "proportion of pixels (%)"
:param img: numpy.ndarray
:param mask: numpy.ndarray
:param bins: int
:param lower_bound: int
:param upper_bound: int
:param title: str
:param hist_data: bool
:return fig_hist: plotnine.ggplot.ggplot
:return hist_df: pandas.core.frame.DataFrame
"""
if not isinstance(img, np.ndarray):
fatal_error("Only image of type numpy.ndarray is supported input!")
if len(img.shape) < 2:
fatal_error("Input image should be at least a 2d array!")
if mask is not None:
masked = img[np.where(mask > 0)]
img_min, img_max = np.nanmin(masked), np.nanmax(masked)
else:
img_min, img_max = np.nanmin(img), np.nanmax(img)
# for lower / upper bound, if given, use the given value, otherwise, use the min / max of the image
lower_bound = lower_bound if lower_bound is not None else img_min
upper_bound = upper_bound if upper_bound is not None else img_max
if len(img.shape) > 2:
if img.shape[2] == 3:
b_names = ['blue', 'green', 'red']
else:
b_names = [str(i) for i in range(img.shape[2])]
if len(img.shape) == 2:
bin_labels, hist_percent, hist_ = _hist_gray(img, bins=bins, lower_bound=lower_bound, upper_bound=upper_bound,
mask=mask)
hist_df = pd.DataFrame(
{'pixel intensity': bin_labels, 'proportion of pixels (%)': hist_percent, 'hist_count': hist_,
'color channel': ['0' for _ in range(len(hist_percent))]})
else:
# Assumption: RGB image
# Initialize dataframe column arrays
px_int = np.array([])
prop = np.array([])
hist_count = np.array([])
channel = []
for (b, b_name) in enumerate(b_names):
bin_labels, hist_percent, hist_ = _hist_gray(img[:, :, b], bins=bins, lower_bound=lower_bound,
upper_bound=upper_bound, mask=mask)
# Append histogram data for each channel
px_int = np.append(px_int, bin_labels)
prop = np.append(prop, hist_percent)
hist_count = np.append(hist_count, hist_)
channel = channel + [b_name for _ in range(len(hist_percent))]<|fim▁hole|>
fig_hist = (ggplot(data=hist_df,
mapping=aes(x='pixel intensity', y='proportion of pixels (%)', color='color channel'))
+ geom_line())
if title is not None:
fig_hist = fig_hist + labels.ggtitle(title)
if len(img.shape) > 2 and img.shape[2] == 3:
fig_hist = fig_hist + scale_color_manual(['blue', 'green', 'red'])
# Plot or print the histogram
_debug(visual=fig_hist, filename=os.path.join(params.debug_outdir, str(params.device) + '_hist.png'))
if hist_data is True:
return fig_hist, hist_df
return fig_hist<|fim▁end|>
|
# Create dataframe
hist_df = pd.DataFrame(
{'pixel intensity': px_int, 'proportion of pixels (%)': prop, 'hist_count': hist_count,
'color channel': channel})
|
<|file_name|>tool_bar.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
//! Create bars of buttons and other widgets
use libc::c_int;
use ffi;
use glib::{to_bool, to_gboolean};
use cast::{GTK_TOOLBAR, GTK_TOOLITEM};
use {IconSize, ReliefStyle, ToolbarStyle};
/// Toolbar — Create bars of buttons and other widgets
/*
* # Availables signals :
* * `focus-home-or-end` : Action
* * `orientation-changed` : Run First
* * `popup-context-menu` : Run Last
* * `style-changed` : Run First
*/
struct_Widget!(Toolbar);
impl Toolbar {
pub fn new() -> Option<Toolbar> {
let tmp_pointer = unsafe { ffi::gtk_toolbar_new() };
check_pointer!(tmp_pointer, Toolbar)
}
pub fn insert<T: ::ToolItemTrait>(&self,
item: &T,
pos: i32) -> () {
unsafe {
ffi::gtk_toolbar_insert(GTK_TOOLBAR(self.pointer), GTK_TOOLITEM(item.unwrap_widget()), pos as c_int)
}
}
pub fn item_index<T: ::ToolItemTrait>(&self, item: &T) -> i32 {
unsafe {
ffi::gtk_toolbar_get_item_index(GTK_TOOLBAR(self.pointer), GTK_TOOLITEM(item.unwrap_widget())) as i32
}
}
pub fn get_n_items(&self) -> i32 {
unsafe {
ffi::gtk_toolbar_get_n_items(GTK_TOOLBAR(self.pointer)) as i32
}
}
pub fn get_nth_item(&self, n: i32) -> Option<::ToolItem> {
unsafe {
let tmp_pointer = ffi::gtk_toolbar_get_nth_item(GTK_TOOLBAR(self.pointer), n as c_int) as *mut ffi::GtkWidget;
if tmp_pointer.is_null() {
None
} else {
Some(::FFIWidget::wrap_widget(tmp_pointer))
}
}<|fim▁hole|> ffi::gtk_toolbar_get_drop_index(GTK_TOOLBAR(self.pointer), x as c_int, y as c_int) as i32
}
}
pub fn set_drop_highlight_item<T: ::ToolItemTrait>(&self, item: &T, index: i32) -> () {
unsafe {
ffi::gtk_toolbar_set_drop_highlight_item(GTK_TOOLBAR(self.pointer), GTK_TOOLITEM(item.unwrap_widget()), index as c_int);
}
}
pub fn set_show_arrow(&self, show_arrow: bool) -> () {
unsafe { ffi::gtk_toolbar_set_show_arrow(GTK_TOOLBAR(self.pointer), to_gboolean(show_arrow)); }
}
pub fn unset_icon_size(&self) -> () {
unsafe {
ffi::gtk_toolbar_unset_icon_size(GTK_TOOLBAR(self.pointer))
}
}
pub fn get_show_arrow(&self) -> bool {
unsafe { to_bool(ffi::gtk_toolbar_get_show_arrow(GTK_TOOLBAR(self.pointer))) }
}
pub fn get_style(&self) -> ToolbarStyle {
unsafe {
ffi::gtk_toolbar_get_style(GTK_TOOLBAR(self.pointer))
}
}
pub fn get_icon_size(&self) -> IconSize {
unsafe {
ffi::gtk_toolbar_get_icon_size(GTK_TOOLBAR(self.pointer))
}
}
pub fn get_relief_style(&self) -> ReliefStyle {
unsafe {
ffi::gtk_toolbar_get_relief_style(GTK_TOOLBAR(self.pointer))
}
}
pub fn set_style(&self, style: ToolbarStyle) -> () {
unsafe {
ffi::gtk_toolbar_set_style(GTK_TOOLBAR(self.pointer), style);
}
}
pub fn set_icon_size(&self, icon_size: IconSize) -> () {
unsafe {
ffi::gtk_toolbar_set_icon_size(GTK_TOOLBAR(self.pointer), icon_size);
}
}
pub fn unset_style(&self) -> () {
unsafe {
ffi::gtk_toolbar_unset_style(GTK_TOOLBAR(self.pointer));
}
}
}
impl_drop!(Toolbar);
impl_TraitWidget!(Toolbar);
impl ::ContainerTrait for Toolbar {}
impl ::ToolShellTrait for Toolbar {}
impl ::OrientableTrait for Toolbar {}<|fim▁end|>
|
}
pub fn get_drop_index(&self, x: i32, y: i32) -> i32 {
unsafe {
|
<|file_name|>print.py<|end_file_name|><|fim▁begin|>"""
Boolean geometry utilities.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
import sys
__author__ = 'Enrique Perez ([email protected])'
__credits__ = 'Art of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/02/05 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def _getAccessibleAttribute(attributeName):
'Get the accessible attribute.'
if attributeName in globalAccessibleAttributeDictionary:
return globalAccessibleAttributeDictionary[attributeName]
return None
def continuous(valueString):
'Print continuous.'
sys.stdout.write(str(valueString))<|fim▁hole|> print(valueString)
return valueString
globalAccessibleAttributeDictionary = {'continuous' : continuous, 'line' : line}<|fim▁end|>
|
return valueString
def line(valueString):
'Print line.'
|
<|file_name|>sf2_glm.hpp<|end_file_name|><|fim▁begin|>/** SF2 annotations for glm types ********************************************
* *
* Copyright (c) 2015 Florian Oetke *
* This file is distributed under the MIT License *
* See LICENSE file for details. *
\*****************************************************************************/
<|fim▁hole|>#pragma once
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include <sf2/sf2.hpp>
namespace glm {
inline void load(sf2::JsonDeserializer& s, vec2& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("w", v.x),
sf2::vmember("h", v.y));
}
inline void save(sf2::JsonSerializer& s, const vec2& v)
{
s.write_virtual(sf2::vmember("x", v.x), sf2::vmember("y", v.y));
}
inline void load(sf2::JsonDeserializer& s, vec3& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("r", v.x),
sf2::vmember("g", v.y),
sf2::vmember("b", v.z));
}
inline void save(sf2::JsonSerializer& s, const vec3& v)
{
s.write_virtual(sf2::vmember("x", v.x), sf2::vmember("y", v.y), sf2::vmember("z", v.z));
}
inline void load(sf2::JsonDeserializer& s, vec4& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("w", v.w),
sf2::vmember("r", v.x),
sf2::vmember("g", v.y),
sf2::vmember("b", v.z),
sf2::vmember("a", v.a));
}
inline void save(sf2::JsonSerializer& s, const vec4& v)
{
s.write_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("w", v.w));
}
inline void load(sf2::JsonDeserializer& s, ivec2& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("w", v.x),
sf2::vmember("h", v.y));
}
inline void save(sf2::JsonSerializer& s, const ivec2& v)
{
s.write_virtual(sf2::vmember("x", v.x), sf2::vmember("y", v.y));
}
inline void load(sf2::JsonDeserializer& s, ivec3& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("r", v.x),
sf2::vmember("g", v.y),
sf2::vmember("b", v.z));
}
inline void save(sf2::JsonSerializer& s, const ivec3& v)
{
s.write_virtual(sf2::vmember("x", v.x), sf2::vmember("y", v.y), sf2::vmember("z", v.z));
}
inline void load(sf2::JsonDeserializer& s, ivec4& v)
{
s.read_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("w", v.w),
sf2::vmember("r", v.x),
sf2::vmember("g", v.y),
sf2::vmember("b", v.z),
sf2::vmember("a", v.a));
}
inline void save(sf2::JsonSerializer& s, const ivec4& v)
{
s.write_virtual(sf2::vmember("x", v.x),
sf2::vmember("y", v.y),
sf2::vmember("z", v.z),
sf2::vmember("w", v.w));
}
inline void load(sf2::JsonDeserializer& s, quat& v)
{
auto r = 0.f;
auto p = 0.f;
auto y = 0.f;
s.read_virtual(sf2::vmember("roll", r), sf2::vmember("pitch", p), sf2::vmember("yaw", y));
v = quat(glm::vec3(r, p, y));
}
inline void save(sf2::JsonSerializer& s, const quat& v)
{
auto r = roll(v);
auto p = pitch(v);
auto y = yaw(v);
s.write_virtual(sf2::vmember("roll", r), sf2::vmember("pitch", p), sf2::vmember("yaw", y));
}
} // namespace glm<|fim▁end|>
| |
<|file_name|>download.rs<|end_file_name|><|fim▁begin|>use chrono::naive::NaiveDateTime;
use chrono::Local;
use database::Database;
use rutracker::RutrackerForum;
use std::collections::HashMap;
use std::collections::HashSet;
type Result<T> = std::result::Result<T, failure::Error>;
pub struct Downloader<'a> {
db: &'a Database,
forum: &'a RutrackerForum,
ignored_id: Vec<usize>,
}
impl<'a> Downloader<'a> {
pub fn new(db: &'a Database, forum: &'a RutrackerForum, ignored_id: Vec<usize>) -> Self {
Self {<|fim▁hole|> db,
forum,
ignored_id,
}
}
pub fn get_list_for_download(
&self,
forum_id: usize,
download: i16,
) -> Result<HashMap<usize, (String, usize)>> {
/* let date = Local::now().naive_local();
let num_days = |time: NaiveDateTime| date.signed_duration_since(time).num_days();
let check_reg_time_and_status = |status: i16, time: NaiveDateTime| {
([2, 3, 8].contains(&status) && num_days(time) > 30)
|| ([0, 10].contains(&status) && num_days(time) > 90)
};
let keeper_list: HashMap<String, Vec<usize>> =
self.db.get_by_filter(DBName::KeeperList, |_, _| true)?;
let keeper_list: HashSet<usize> = keeper_list.into_iter().flat_map(|(_, v)| v).collect();
let topic_id: HashMap<_, _> = self
.db
.pvc(forum_id, None::<&[usize]>)?
.into_iter()
.filter(|(_, v)| v.seeders <= download)
.filter(|(_, v)| check_reg_time_and_status(v.tor_status, v.reg_time))
.filter(|(id, _)| !self.ignored_id.contains(id) && !keeper_list.contains(id))
.collect(); */
Ok(HashMap::new())
}
}<|fim▁end|>
| |
<|file_name|>test_dump.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
# Copyright (c) 2010-2015 openpyxl
import datetime
import decimal
from io import BytesIO
from openpyxl.xml.functions import tostring, xmlfile
from openpyxl.utils.indexed_list import IndexedList
from openpyxl.utils.datetime import CALENDAR_WINDOWS_1900
from openpyxl.styles import Style
from openpyxl.styles.styleable import StyleId
from openpyxl.tests.helper import compare_xml
import pytest
class DummyLocalData:
pass
class DummyWorkbook:
def __init__(self):
self.shared_strings = IndexedList()
self.shared_styles = [Style()]
self._cell_styles = IndexedList([StyleId(0, 0, 0, 0, 0, 0)])
self._number_formats = IndexedList()
self._local_data = DummyLocalData()
self.encoding = "UTF-8"
self.excel_base_date = CALENDAR_WINDOWS_1900
def get_sheet_names(self):
return []
@pytest.fixture
def DumpWorksheet():
from .. dump_worksheet import DumpWorksheet
return DumpWorksheet(DummyWorkbook(), title="TestWorksheet")
@pytest.mark.lxml_required
def test_write_header(DumpWorksheet):
ws = DumpWorksheet
doc = ws._write_header()
next(doc)
doc.close()
header = open(ws.filename)
xml = header.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_append(DumpWorksheet):
ws = DumpWorksheet
def _writer(doc):
with xmlfile(doc) as xf:
with xf.element('sheetData'):
try:
while True:
body = (yield)
xf.write(body)
except GeneratorExit:
pass
doc = BytesIO()
ws.writer = _writer(doc)
next(ws.writer)
ws.append([1, "s"])
ws.append(['2', 3])
ws.append(i for i in [1, 2])
ws.writer.close()
xml = doc.getvalue()
expected = """
<sheetData>
<row r="1" spans="1:2">
<c r="A1" t="n">
<v>1</v>
</c>
<c r="B1" t="s">
<v>0</v>
</c>
</row>
<row r="2" spans="1:2">
<c r="A2" t="s"><|fim▁hole|> </c>
</row>
<row r="3" spans="1:2">
<c r="A3" t="n">
<v>1</v>
</c>
<c r="B3" t="n">
<v>2</v>
</c>
</row>
</sheetData>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
def test_dirty_cell(DumpWorksheet):
ws = DumpWorksheet
def _writer(doc):
with xmlfile(doc) as xf:
with xf.element('sheetData'):
try:
while True:
body = (yield)
xf.write(body)
except GeneratorExit:
pass
doc = BytesIO()
ws.writer = _writer(doc)
next(ws.writer)
ws.append((datetime.date(2001, 1, 1), 1))
ws.writer.close()
xml = doc.getvalue()
expected = """
<sheetData>
<row r="1" spans="1:2">
<c r="A1" t="n" s="1"><v>36892</v></c>
<c r="B1" t="n"><v>1</v></c>
</row>
</sheetData>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.parametrize("row", ("string", dict()))
def test_invalid_append(DumpWorksheet, row):
ws = DumpWorksheet
with pytest.raises(TypeError):
ws.append(row)
@pytest.mark.lxml_required
def test_cell_comment(DumpWorksheet):
ws = DumpWorksheet
from openpyxl.comments import Comment
from .. dump_worksheet import WriteOnlyCell
cell = WriteOnlyCell(ws, 1)
comment = Comment('hello', 'me')
cell.comment = comment
ws.append([cell])
assert ws._comments == [comment]
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:1"><c r="A1" t="n"><v>1</v></c></row>
</sheetData>
<legacyDrawing r:id="commentsvml"></legacyDrawing>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_cannot_save_twice(DumpWorksheet):
from .. dump_worksheet import WorkbookAlreadySaved
ws = DumpWorksheet
ws.close()
with pytest.raises(WorkbookAlreadySaved):
ws.close()
with pytest.raises(WorkbookAlreadySaved):
ws.append([1])
@pytest.mark.lxml_required
def test_close(DumpWorksheet):
ws = DumpWorksheet
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_auto_filter(DumpWorksheet):
ws = DumpWorksheet
ws.auto_filter.ref = 'A1:F1'
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
<autoFilter ref="A1:F1"/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_frozen_panes(DumpWorksheet):
ws = DumpWorksheet
ws.freeze_panes = 'D4'
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<pane xSplit="3" ySplit="3" topLeftCell="D4" activePane="bottomRight" state="frozen"/>
<selection pane="topRight"/>
<selection pane="bottomLeft"/>
<selection pane="bottomRight" activeCell="A1" sqref="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData/>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff
@pytest.mark.lxml_required
def test_write_empty_row(DumpWorksheet):
ws = DumpWorksheet
ws.append(['1', '2', '3'])
ws.append([])
ws.close()
with open(ws.filename) as src:
xml = src.read()
expected = """
<worksheet xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main">
<sheetPr>
<outlinePr summaryRight="1" summaryBelow="1"/>
<pageSetUpPr/>
</sheetPr>
<sheetViews>
<sheetView workbookViewId="0">
<selection sqref="A1" activeCell="A1"/>
</sheetView>
</sheetViews>
<sheetFormatPr baseColWidth="10" defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:3">
<c r="A1" t="s">
<v>0</v>
</c>
<c r="B1" t="s">
<v>1</v>
</c>
<c r="C1" t="s">
<v>2</v>
</c>
</row>
<row r="2"/>
</sheetData>
</worksheet>
"""
diff = compare_xml(xml, expected)
assert diff is None, diff<|fim▁end|>
|
<v>1</v>
</c>
<c r="B2" t="n">
<v>3</v>
|
<|file_name|>test_complextype_user_osismodelbase.py<|end_file_name|><|fim▁begin|>from JumpScale import j
class test_complextype_user_osismodelbase(j.code.classGetJSRootModelBase()):
"""
group of users
"""
def __init__(self):
pass
self._P_id=0
self._P_organization=""
self._P_name=""
self._P_emails=list()
self._P_groups=list()
self._P_guid=""
self._P__meta=list()
self._P__meta=["osismodel","test_complextype","user",1] #@todo version not implemented now, just already foreseen
@property
def id(self):
return self._P_id
@id.setter
def id(self, value):
if not isinstance(value, int) and value is not None:
if isinstance(value, basestring) and j.basetype.integer.checkString(value):
value = j.basetype.integer.fromString(value)
else:
msg="property id input error, needs to be int, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_id=value
@id.deleter
def id(self):
del self._P_id
@property
def organization(self):
return self._P_organization
@organization.setter
def organization(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkString(value):
value = j.basetype.string.fromString(value)<|fim▁hole|> self._P_organization=value
@organization.deleter
def organization(self):
del self._P_organization
@property
def name(self):
return self._P_name
@name.setter
def name(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkString(value):
value = j.basetype.string.fromString(value)
else:
msg="property name input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_name=value
@name.deleter
def name(self):
del self._P_name
@property
def emails(self):
return self._P_emails
@emails.setter
def emails(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property emails input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_emails=value
@emails.deleter
def emails(self):
del self._P_emails
@property
def groups(self):
return self._P_groups
@groups.setter
def groups(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property groups input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_groups=value
@groups.deleter
def groups(self):
del self._P_groups
@property
def guid(self):
return self._P_guid
@guid.setter
def guid(self, value):
if not isinstance(value, str) and value is not None:
if isinstance(value, basestring) and j.basetype.string.checkString(value):
value = j.basetype.string.fromString(value)
else:
msg="property guid input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P_guid=value
@guid.deleter
def guid(self):
del self._P_guid
@property
def _meta(self):
return self._P__meta
@_meta.setter
def _meta(self, value):
if not isinstance(value, list) and value is not None:
if isinstance(value, basestring) and j.basetype.list.checkString(value):
value = j.basetype.list.fromString(value)
else:
msg="property _meta input error, needs to be list, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
self._P__meta=value
@_meta.deleter
def _meta(self):
del self._P__meta<|fim▁end|>
|
else:
msg="property organization input error, needs to be str, specfile: /opt/jumpscale/apps/osis/logic/test_complextype/model.spec, name model: user, value was:" + str(value)
raise TypeError(msg)
|
<|file_name|>update_search.py<|end_file_name|><|fim▁begin|>import os
import sys
# Put communityshare in sys
this_directory = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.abspath(os.path.join(this_directory, '..')))
from community_share import config, store, Base
from community_share.models.user import User, TypedLabel
from community_share.models.search import Label
grade_level_labels = set((
'K-5', '6-8', '9-12', 'College', 'Adult',
'K-3', '4-5', '6-8', '9-12', 'Preschool',
))
engagement_labels = set((
'Guest Speaker', 'Host Field Trip', 'Judge Student Competition',
'Participate in Career Day', 'Collaborate on a Class Project',
'Mentor Students', 'Brainstorm Curriculum Ideas with Educator',
'Hands-On Demonstration',
'Guest', 'Speaker', 'Field Trip Host', 'Student Competition Judge',
'Individual/Group Mentor', 'Share Curriculum Ideas', 'Curriculuum Development',
'Career Day Participant', 'Collaborator on a Class Project',
'Long-term', 'Individual Mentor', 'Short-term',
'Small Group Mentor', 'Classroom Materials Provider',
'Student Competition Judget',
))
if __name__ == '__main__':
config.load_from_file()
Base.metadata.create_all(store.engine)
users = store.session.query(User).all()
# Update the is_community_partner and is_educator in the user table.
for user in users:
is_educator = False
search = user.educator_profile_search
if (search and search.active):
is_educator = (len(search.labels) > 0)
is_community_partner = False
search = user.community_partner_profile_search
if (search and search.active):
is_community_partner = (len(search.labels) > 0)
user.is_community_partner = is_community_partner
user.is_educator = is_educator
store.session.add(user)
store.session.commit()
# Update Labels
labels = store.session.query(Label).all()
for label in labels:
if label.active:
if label.name in grade_level_labels:
typ='gradelevel'
elif label.name in engagement_labels:
typ='engagement'
else:
typ='expertise'
check = store.session.query(TypedLabel).filter(TypedLabel.name==label.name, TypedLabel.typ==typ).first()
if not check:
new_label = TypedLabel(
name=label.name,
typ=typ,<|fim▁hole|> for user in users:
cp_search = user.community_partner_profile_search
if cp_search:
for label in cp_search.labels:
typed_label = store.session.query(TypedLabel).filter(TypedLabel.name==label.name).first()
user.labels.append(typed_label)
ed_search = user.educator_profile_search
if ed_search:
for label in ed_search.labels:
typed_label = store.session.query(TypedLabel).filter(TypedLabel.name==label.name).first()
if typed_label.typ == 'gradelevel':
user.labels.append(typed_label)
store.session.commit()
# Make a search string for the Community partners.
for user in users:
user.update_search_text()
store.session.add(user)
store.session.commit()<|fim▁end|>
|
)
store.session.add(new_label)
store.session.commit()
# Associate Labels with Users instead of with searches.
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Backend loading
# Based on the Django cache framework
# https://github.com/django/django/blob/5d263dee304fdaf95e18d2f0619d6925984a7f02/django/core/cache/__init__.py
import sys
from importlib import import_module
import warnings
from django.utils import six
from django.utils.module_loading import import_string
from django.core.exceptions import ImproperlyConfigured
from django.conf import settings
class InvalidSearchBackendError(ImproperlyConfigured):
pass
def get_search_backend_config():
search_backends = getattr(settings, 'WAGTAILSEARCH_BACKENDS', {})
# Make sure the default backend is always defined
search_backends.setdefault('default', {
'BACKEND': 'wagtail.wagtailsearch.backends.db',
})
return search_backends
def import_backend(dotted_path):
"""
Theres two formats for the dotted_path.
One with the backend class (old) and one without (new)
eg:
old: wagtail.wagtailsearch.backends.elasticsearch.ElasticsearchSearchBackend
new: wagtail.wagtailsearch.backends.elasticsearch
If a new style dotted path was specified, this function would
look for a backend class from the "SearchBackend" attribute.
"""
try:
# New
backend_module = import_module(dotted_path)
return backend_module.SearchBackend
except ImportError as e:
try:
# Old
return import_string(dotted_path)
except ImportError:
six.reraise(ImportError, e, sys.exc_info()[2])
def get_search_backend(backend='default', **kwargs):
backend = backend
search_backends = get_search_backend_config()
# Try to find the backend
try:
# Try to get the WAGTAILSEARCH_BACKENDS entry for the given backend name first<|fim▁hole|> except KeyError:
try:
# Trying to import the given backend, in case it's a dotted path
import_backend(backend)
except ImportError as e:
raise InvalidSearchBackendError("Could not find backend '{}': {}".format(
backend, e))
params = kwargs
else:
# Backend is a conf entry
params = conf.copy()
params.update(kwargs)
backend = params.pop('BACKEND')
backend = backend
# Try to import the backend
try:
backend_cls = import_backend(backend)
except ImportError as e:
raise InvalidSearchBackendError("Could not find backend '{}': {}".format(
backend, e))
# Create backend
return backend_cls(params)
def _backend_requires_auto_update(backend_name, params):
if params.get('AUTO_UPDATE', True):
return True
# _WAGTAILSEARCH_FORCE_AUTO_UPDATE is only used by Wagtail tests. It allows
# us to test AUTO_UPDATE behaviour against Elasticsearch without having to
# have AUTO_UPDATE enabed for every test.
force_auto_update = getattr(settings, '_WAGTAILSEARCH_FORCE_AUTO_UPDATE', [])
if backend_name in force_auto_update:
return True
return False
def get_search_backends_with_name(with_auto_update=False):
search_backends = get_search_backend_config()
for backend, params in search_backends.items():
if with_auto_update and _backend_requires_auto_update(backend, params) is False:
continue
yield backend, get_search_backend(backend)
def get_search_backends(with_auto_update=False):
# For backwards compatibility
return (backend for _, backend in get_search_backends_with_name(with_auto_update=with_auto_update))<|fim▁end|>
|
conf = search_backends[backend]
|
<|file_name|>ByteBufferAsCharBuffer.java<|end_file_name|><|fim▁begin|>/* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package java.nio;
import libcore.io.SizeOf;
/**
* This class wraps a byte buffer to be a char buffer.
* <p>
* Implementation notice:
* <ul>
* <li>After a byte buffer instance is wrapped, it becomes privately owned by
* the adapter. It must NOT be accessed outside the adapter any more.</li>
* <li>The byte buffer's position and limit are NOT linked with the adapter.
* The adapter extends Buffer, thus has its own position and limit.</li>
* </ul>
* </p>
*
*/
final class ByteBufferAsCharBuffer extends CharBuffer {
private final ByteBuffer byteBuffer;
static CharBuffer asCharBuffer(ByteBuffer byteBuffer) {
ByteBuffer slice = byteBuffer.slice();
slice.order(byteBuffer.order());
return new ByteBufferAsCharBuffer(slice);
}
private ByteBufferAsCharBuffer(ByteBuffer byteBuffer) {
super(byteBuffer.capacity() / SizeOf.CHAR);
this.byteBuffer = byteBuffer;
this.byteBuffer.clear();
this.effectiveDirectAddress = byteBuffer.effectiveDirectAddress;
}
@Override
public CharBuffer asReadOnlyBuffer() {
ByteBufferAsCharBuffer buf = new ByteBufferAsCharBuffer(byteBuffer.asReadOnlyBuffer());
buf.limit = limit;
buf.position = position;
buf.mark = mark;
buf.byteBuffer.order = byteBuffer.order;
return buf;
}
@Override
public CharBuffer compact() {
if (byteBuffer.isReadOnly()) {
throw new ReadOnlyBufferException();
}
byteBuffer.limit(limit * SizeOf.CHAR);
byteBuffer.position(position * SizeOf.CHAR);
byteBuffer.compact();
byteBuffer.clear();
position = limit - position;
limit = capacity;
mark = UNSET_MARK;
return this;
}
@Override
public CharBuffer duplicate() {
ByteBuffer bb = byteBuffer.duplicate().order(byteBuffer.order());
ByteBufferAsCharBuffer buf = new ByteBufferAsCharBuffer(bb);
buf.limit = limit;
buf.position = position;
buf.mark = mark;
return buf;
}
@Override
public char get() {
if (position == limit) {
throw new BufferUnderflowException();
}
return byteBuffer.getChar(position++ * SizeOf.CHAR);
}
@Override
public char get(int index) {
checkIndex(index);
return byteBuffer.getChar(index * SizeOf.CHAR);
}
@Override
public CharBuffer get(char[] dst, int dstOffset, int charCount) {
byteBuffer.limit(limit * SizeOf.CHAR);
byteBuffer.position(position * SizeOf.CHAR);
if (byteBuffer instanceof DirectByteBuffer) {
((DirectByteBuffer) byteBuffer).get(dst, dstOffset, charCount);
} else {
((ByteArrayBuffer) byteBuffer).get(dst, dstOffset, charCount);
}
this.position += charCount;
return this;
}
@Override
public boolean isDirect() {
return byteBuffer.isDirect();
}
@Override
public boolean isReadOnly() {
return byteBuffer.isReadOnly();
}
@Override
public ByteOrder order() {
return byteBuffer.order();
}
@Override char[] protectedArray() {
throw new UnsupportedOperationException();
}
@Override int protectedArrayOffset() {
throw new UnsupportedOperationException();
}
@Override boolean protectedHasArray() {
return false;
}
@Override
public CharBuffer put(char c) {
if (position == limit) {
throw new BufferOverflowException();
}
byteBuffer.putChar(position++ * SizeOf.CHAR, c);
return this;
}
@Override
public CharBuffer put(int index, char c) {
checkIndex(index);
byteBuffer.putChar(index * SizeOf.CHAR, c);
return this;
}
@Override
public CharBuffer put(char[] src, int srcOffset, int charCount) {
byteBuffer.limit(limit * SizeOf.CHAR);
byteBuffer.position(position * SizeOf.CHAR);
if (byteBuffer instanceof DirectByteBuffer) {
((DirectByteBuffer) byteBuffer).put(src, srcOffset, charCount);<|fim▁hole|> this.position += charCount;
return this;
}
@Override
public CharBuffer slice() {
byteBuffer.limit(limit * SizeOf.CHAR);
byteBuffer.position(position * SizeOf.CHAR);
ByteBuffer bb = byteBuffer.slice().order(byteBuffer.order());
CharBuffer result = new ByteBufferAsCharBuffer(bb);
byteBuffer.clear();
return result;
}
@Override public CharBuffer subSequence(int start, int end) {
checkStartEndRemaining(start, end);
CharBuffer result = duplicate();
result.limit(position + end);
result.position(position + start);
return result;
}
}<|fim▁end|>
|
} else {
((ByteArrayBuffer) byteBuffer).put(src, srcOffset, charCount);
}
|
<|file_name|>test_user_messages.py<|end_file_name|><|fim▁begin|>"""
Unit tests for user messages.
"""
import warnings
import ddt
from django.contrib.messages.middleware import MessageMiddleware
from django.test import RequestFactory, TestCase
from common.test.utils import normalize_repr
from openedx.core.djangolib.markup import HTML, Text
from common.djangoapps.student.tests.factories import UserFactory
from ..user_messages import PageLevelMessages, UserMessageType
TEST_MESSAGE = 'Test message'
@ddt.ddt
class UserMessagesTestCase(TestCase):
"""
Unit tests for page level user messages.
"""
def setUp(self):
super().setUp()
self.student = UserFactory.create()
self.request = RequestFactory().request()
self.request.session = {}
self.request.user = self.student
MessageMiddleware().process_request(self.request)
@ddt.data(
('Rock & Roll', '<div class="message-content">Rock & Roll</div>'),
(Text('Rock & Roll'), '<div class="message-content">Rock & Roll</div>'),
(HTML('<p>Hello, world!</p>'), '<div class="message-content"><p>Hello, world!</p></div>')
)
@ddt.unpack
def test_message_escaping(self, message, expected_message_html):
"""
Verifies that a user message is escaped correctly.
"""
PageLevelMessages.register_user_message(self.request, UserMessageType.INFO, message)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].message_html == expected_message_html
@ddt.data(
(UserMessageType.ERROR, 'alert-danger', 'fa fa-warning'),
(UserMessageType.INFO, 'alert-info', 'fa fa-bullhorn'),
(UserMessageType.SUCCESS, 'alert-success', 'fa fa-check-circle'),
(UserMessageType.WARNING, 'alert-warning', 'fa fa-warning'),
)
@ddt.unpack
def test_message_icon(self, message_type, expected_css_class, expected_icon_class):
"""
Verifies that a user message returns the correct CSS and icon classes.
"""
PageLevelMessages.register_user_message(self.request, message_type, TEST_MESSAGE)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].css_class == expected_css_class
assert messages[0].icon_class == expected_icon_class
@ddt.data(
(normalize_repr(PageLevelMessages.register_error_message), UserMessageType.ERROR),
(normalize_repr(PageLevelMessages.register_info_message), UserMessageType.INFO),
(normalize_repr(PageLevelMessages.register_success_message), UserMessageType.SUCCESS),
(normalize_repr(PageLevelMessages.register_warning_message), UserMessageType.WARNING),
)
@ddt.unpack
def test_message_type(self, register_message_function, expected_message_type):
"""
Verifies that each user message function returns the correct type.
"""
register_message_function(self.request, TEST_MESSAGE)
messages = list(PageLevelMessages.user_messages(self.request))
assert len(messages) == 1
assert messages[0].type == expected_message_type
def global_message_count(self):
"""
Count the number of times the global message appears in the user messages.
"""
expected_html = """<div class="message-content">I <3 HTML-escaping</div>"""
messages = list(PageLevelMessages.user_messages(self.request))
return len(list(msg for msg in messages if expected_html in msg.message_html))
def test_global_message_off_by_default(self):
"""Verifies feature toggle."""
with self.settings(
GLOBAL_NOTICE_ENABLED=False,
GLOBAL_NOTICE_MESSAGE="I <3 HTML-escaping",
GLOBAL_NOTICE_TYPE='WARNING'
):
# Missing when feature disabled
assert self.global_message_count() == 0
def test_global_message_persistent(self):
"""Verifies global message is always included, when enabled."""
with self.settings(
GLOBAL_NOTICE_ENABLED=True,
GLOBAL_NOTICE_MESSAGE="I <3 HTML-escaping",
GLOBAL_NOTICE_TYPE='WARNING'
):
# Present with no other setup
assert self.global_message_count() == 1
# Present when other messages are present<|fim▁hole|>
def test_global_message_error_isolation(self):
"""Verifies that any setting errors don't break the page, or other messages."""
with self.settings(
GLOBAL_NOTICE_ENABLED=True,
GLOBAL_NOTICE_MESSAGE=ThrowingMarkup(), # force an error
GLOBAL_NOTICE_TYPE='invalid'
):
PageLevelMessages.register_user_message(self.request, UserMessageType.WARNING, "something else")
# Doesn't throw, or even interfere with other messages,
# when given invalid settings
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
messages = list(PageLevelMessages.user_messages(self.request))
assert len(w) == 1
assert str(w[0].message) == "Could not register global notice: Exception('Some random error')"
assert len(messages) == 1
assert "something else" in messages[0].message_html
class ThrowingMarkup:
"""Class that raises an exception if markupsafe tries to get HTML from it."""
def __html__(self):
raise Exception("Some random error")<|fim▁end|>
|
PageLevelMessages.register_user_message(self.request, UserMessageType.INFO, "something else")
assert self.global_message_count() == 1
|
<|file_name|>uuid.service.ts<|end_file_name|><|fim▁begin|>import { Injectable } from '@angular/core';
@Injectable()
export class UuidService {
constructor() { }
/* tslint:disable:no-bitwise */<|fim▁hole|>
for (i = 0; i < 32; i++) {
random = Math.random() * 16 | 0;
if (i === 8 || i === 12 || i === 16 || i === 20) {
uuid += '-';
}
uuid += (i === 12 ? 4 : (i === 16 ? (random & 3 | 8) : random)).toString(16);
}
return uuid;
}
/* tslint:enable:no-bitwise */
}<|fim▁end|>
|
get() {
let uuid = '';
let i;
let random;
|
<|file_name|>test_help_msg.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#<|fim▁hole|># WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironicclient.tests.functional import base
class IronicClientHelp(base.FunctionalTestBase):
"""Test for python-ironicclient help messages."""
def test_ironic_help(self):
"""Check Ironic client main help message contents."""
caption = ("Command-line interface to the "
"OpenStack Bare Metal Provisioning API.")
subcommands = {
'bash-completion',
'chassis-create',
'chassis-delete',
'chassis-list',
'chassis-node-list',
'chassis-show',
'chassis-update',
'driver-list',
'driver-properties',
'driver-show',
'driver-vendor-passthru',
'help',
'node-create',
'node-delete',
'node-get-boot-device',
'node-get-console',
'node-get-supported-boot-devices',
'node-list',
'node-port-list',
'node-set-boot-device',
'node-set-console-mode',
'node-set-maintenance',
'node-set-power-state',
'node-set-provision-state',
'node-show',
'node-show-states',
'node-update',
'node-validate',
'node-vendor-passthru',
'port-create',
'port-delete',
'port-list',
'port-show',
'port-update'
}
output = self._ironic('help', flags='', params='')
self.assertIn(caption, output)
for string in subcommands:
self.assertIn(string, output)<|fim▁end|>
|
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
<|file_name|>AppInstanceMetricsDAOImpl.java<|end_file_name|><|fim▁begin|>package org.cloudfoundry.autoscaler.data.couchdb.dao.impl;
import java.util.ArrayList;
import java.util.List;
import org.apache.log4j.Logger;
import org.cloudfoundry.autoscaler.data.couchdb.dao.AppInstanceMetricsDAO;
import org.cloudfoundry.autoscaler.data.couchdb.dao.base.TypedCouchDbRepositorySupport;
import org.cloudfoundry.autoscaler.data.couchdb.document.AppInstanceMetrics;
import org.ektorp.ComplexKey;
import org.ektorp.CouchDbConnector;
import org.ektorp.ViewQuery;
import org.ektorp.support.View;
public class AppInstanceMetricsDAOImpl extends CommonDAOImpl implements AppInstanceMetricsDAO {
@View(name = "byAll", map = "function(doc) { if (doc.type == 'AppInstanceMetrics' ) emit([doc.appId, doc.appType, doc.timestamp], doc._id)}")
private static class AppInstanceMetricsRepository_All extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_All(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_byAll");
}
public List<AppInstanceMetrics> getAllRecords() {
return queryView("byAll");
}
}
@View(name = "by_appId", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId) { emit([doc.appId], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppId
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppId(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppId");
}
public List<AppInstanceMetrics> findByAppId(String appId) {
ComplexKey key = ComplexKey.of(appId);
return queryView("by_appId", key);
}
}
@View(name = "by_appId_between", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.appId && doc.timestamp) { emit([doc.appId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByAppIdBetween
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByAppIdBetween(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByAppIdBetween");
}
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
ComplexKey startKey = ComplexKey.of(appId, startTimestamp);
ComplexKey endKey = ComplexKey.of(appId, endTimestamp);
ViewQuery q = createQuery("by_appId_between").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY", new String[] { "by_appId_between", appId,
String.valueOf(startTimestamp), String.valueOf(endTimestamp) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
@View(name = "by_serviceId_before", map = "function(doc) { if (doc.type=='AppInstanceMetrics' && doc.serviceId && doc.timestamp) { emit([ doc.serviceId, doc.timestamp], doc._id) } }")
private static class AppInstanceMetricsRepository_ByServiceId_Before
extends TypedCouchDbRepositorySupport<AppInstanceMetrics> {
public AppInstanceMetricsRepository_ByServiceId_Before(CouchDbConnector db) {
super(AppInstanceMetrics.class, db, "AppInstanceMetrics_ByServiceId");
}
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
ComplexKey startKey = ComplexKey.of(serviceId, 0);
ComplexKey endKey = ComplexKey.of(serviceId, olderThan);
ViewQuery q = createQuery("by_serviceId_before").includeDocs(true).startKey(startKey).endKey(endKey);
List<AppInstanceMetrics> returnvalue = null;
String[] input = beforeConnection("QUERY",
new String[] { "by_serviceId_before", serviceId, String.valueOf(0), String.valueOf(olderThan) });
try {
returnvalue = db.queryView(q, AppInstanceMetrics.class);
} catch (Exception e) {
e.printStackTrace();
}
afterConnection(input);
return returnvalue;
}
}
private static final Logger logger = Logger.getLogger(AppInstanceMetricsDAOImpl.class);
private AppInstanceMetricsRepository_All metricsRepoAll;
private AppInstanceMetricsRepository_ByAppId metricsRepoByAppId;
private AppInstanceMetricsRepository_ByAppIdBetween metricsRepoByAppIdBetween;
private AppInstanceMetricsRepository_ByServiceId_Before metricsRepoByServiceIdBefore;
public AppInstanceMetricsDAOImpl(CouchDbConnector db) {
metricsRepoAll = new AppInstanceMetricsRepository_All(db);
metricsRepoByAppId = new AppInstanceMetricsRepository_ByAppId(db);
metricsRepoByAppIdBetween = new AppInstanceMetricsRepository_ByAppIdBetween(db);
metricsRepoByServiceIdBefore = new AppInstanceMetricsRepository_ByServiceId_Before(db);
}
public AppInstanceMetricsDAOImpl(CouchDbConnector db, boolean initDesignDocument) {
this(db);
if (initDesignDocument) {
try {
initAllRepos();
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
}
}
@Override
public List<AppInstanceMetrics> findAll() {<|fim▁hole|> }
@Override
public List<AppInstanceMetrics> findByAppId(String appId) {
// TODO Auto-generated method stub
return this.metricsRepoByAppId.findByAppId(appId);
}
@Override
public List<AppInstanceMetrics> findByAppIdBetween(String appId, long startTimestamp, long endTimestamp)
throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByAppIdBetween.findByAppIdBetween(appId, startTimestamp, endTimestamp);
}
@Override
public List<AppInstanceMetrics> findByServiceIdBefore(String serviceId, long olderThan) throws Exception {
// TODO Auto-generated method stub
return this.metricsRepoByServiceIdBefore.findByServiceIdBefore(serviceId, olderThan);
}
@Override
public List<AppInstanceMetrics> findByAppIdAfter(String appId, long timestamp) throws Exception {
try {
return findByAppIdBetween(appId, timestamp, System.currentTimeMillis());
} catch (Exception e) {
logger.error(e.getMessage(), e);
}
return null;
}
@SuppressWarnings("unchecked")
@Override
public <T> TypedCouchDbRepositorySupport<T> getDefaultRepo() {
// TODO Auto-generated method stub
return (TypedCouchDbRepositorySupport<T>) this.metricsRepoAll;
}
@SuppressWarnings("unchecked")
@Override
public <T> List<TypedCouchDbRepositorySupport<T>> getAllRepos() {
// TODO Auto-generated method stub
List<TypedCouchDbRepositorySupport<T>> repoList = new ArrayList<TypedCouchDbRepositorySupport<T>>();
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoAll);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppId);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByAppIdBetween);
repoList.add((TypedCouchDbRepositorySupport<T>) this.metricsRepoByServiceIdBefore);
return repoList;
}
}<|fim▁end|>
|
// TODO Auto-generated method stub
return this.metricsRepoAll.getAllRecords();
|
<|file_name|>zone0.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# coding=UTF-8
#
import copy
import time
import socket
import logging
import vdns.common
class Zone0:
"""
Base class for producing zone files
"""
def __init__(self, dt):
self.dt=dt
def fmttd(self, td):
"""
Format a timedelta value to something that's appropriate for
zones
"""
lst=((1, '', 'second', 'seconds'),
(60, 'M', 'minute', 'minutes'),
(3600, 'H', 'hour', 'hours'),
(86400, 'D', 'day', 'days'),
(86400*7, 'W', 'week', 'weeks'))
ts=int(td.total_seconds())
# Find the first value that doesn't give an exact result
ent=lst[0]
for i in lst:
if (ts % i[0]) != 0:
break
ent=i
ret1="%d%s" % (int(ts/ent[0]), ent[1])
# Now form the human readable string
rem=ts
ret2=[]
for i in reversed(lst):
t=int(rem / i[0])
rem=rem % i[0]
if t==0:
continue
if t==1:
unit=i[2]
else:
unit=i[3]
st='%s %s' % (t, unit)
ret2.append(st)
# Speadup
if rem==0:
break
ret2st=', '.join(ret2)
ret=(ret1, ret2st)
return(ret)
def make_ptr_name(self, rec):
"""
Format the name of a PTR record (i.e. reverse IPv4 or IPv6)
"""
if rec['family']==4:
rev=rec['ip_str'].split('.')
rev.reverse()
rev='.'.join(rev)
ret=rev + '.in-addr.arpa'
elif rec['family']==6:
ip2=rec['ip_str'] + '/128'
ret=vdns.common.reverse_name(ip2)
# logging.error('Unhandled address family: %s' % (rec['family'], ))
# ret=''
else:
logging.error('Unknown address family: %s' % (rec['family'], ))
ret=''
# Get rid of the suffix if we can
domain=self.dt['_domain']
if ret[-len(domain):]==domain:
ret=ret[:-len(domain)-1]
return(ret)
# def make_soa(self, incserial):
def make_soa(self):
"""!
NO @param incserial If True then increment the serial number
"""
dt=self.dt
dt2={
# 'serial': self.mkserial(dt, incserial),
'serial': dt['serial'],
'domain': dt['_domain'],
'contact': dt['contact'],
'ns0': dt['ns0'],
}
<|fim▁hole|> t=self.fmttd(dt[i])
dt2[i]=t[0]
dt2[i+'2']=t[1]
st="""\
$ORIGIN %(domain)s.
$TTL %(ttl)s ; %(ttl2)s
@ %(ttl)s IN SOA %(ns0)s. %(contact)s. (
%(serial)-10s ; serial
%(refresh)s ; refresh (%(refresh2)s)
%(retry)s ; retry (%(retry2)s)
%(expire)s ; expire (%(expire2)s)
%(minimum)s ; minimum (%(minimum2)s)
)
""" % dt2
return(st)
def fmtrecord(self, name, ttl, rr, data):
"""
Format a record
This is a dump function that concatenates data, translating ttl
Use mkrecord instead
@param name The hostname
@param ttl The TTL in seconds
@param rr The type of the record
@param data A freeform string
@return The formed entry
"""
if ttl==None:
ttl2=''
else:
t=self.fmttd(ttl)
ttl2=' ' + t[0]
ret="%-16s%s IN %s %s" % \
(name, ttl2, rr, data)
return(ret)
def split_txt(self, data):
"""
Split TXT data to chunks of max 255 bytes to comply with bind
@param data An unquoted string of arbitrary length
@return A quoted string to be used as TXT record
"""
limit=255
items=[]
data2=copy.deepcopy(data)
while len(data2)>limit:
items.append(data2[:limit])
data2=data2[limit:]
items.append(data2)
ret='"' + '" "'.join(items) + '"'
return(ret)
def mkrecord(self, rr, rec):
"""
Create a record based on RR (the type)
@param rr The record type. One of: ns, mx, ds
@return The formed entry
"""
# If this is true then we will make sure that there is a dot
# at the end of the name
needsdot=False
# Allow this to be changed by a type (i.e. PTR)
hostname=None
if rr=='mx':
rrname='MX'
data="%-4d %s" % (rec['priority'], rec['mx'])
if rec['mx'].count('.')>=2:
needsdot=True
elif rr=='ns':
rrname='NS'
data=rec['ns']
if rec['ns'].count('.')>=2:
needsdot=True
elif rr=='ds':
rrname='DS'
data=[]
data.append("%d %d %d %s" % (rec['keyid'], rec['algorithm'],
1, rec['digest_sha1']))
data.append("%d %d %d %s" % (rec['keyid'], rec['algorithm'],
2, rec['digest_sha256']))
elif rr=='a':
rrname='A'
data=rec['ip_str'].split('/')[0]
elif rr=='aaaa':
rrname='AAAA'
data=rec['ip_str'].split('/')[0]
elif rr=='ptr':
# TODO: This is broken. We need to inverse the ip
# and take care of ipv6 as well
rrname='PTR'
data="%s.%s." % (rec['hostname'], rec['domain'])
hostname=self.make_ptr_name(rec)
needsdot=True
elif rr in ('cname', 'cnames'):
rrname='CNAME'
data=rec['hostname0']
if rec['hostname0'].count('.')>=2:
needsdot=True
elif rr=='txt':
rrname='TXT'
data='"%s"' % (rec['txt'],)
elif rr=='dnssec':
rrname='DNSKEY'
if rec['ksk']:
flags=257
else:
flags=256
# rec['hostname']=rec['domain']
data='%s 3 %s %s' % (flags, rec['algorithm'], rec['key_pub'])
elif rr=='sshfp':
rrname='SSHFP'
data='%(keytype)d %(hashtype)d %(fingerprint)s' % rec
elif rr=='dkim':
rrname='TXT'
hostname='%(selector)s._domainkey' % rec
if 'hostname' in rec and rec['hostname']:
hostname+='.'+rec['hostname']
data0=[]
data0.append('v=DKIM1')
if rec['g']!=None: data0.append('g=' + rec['g'])
data0.append('k=' + rec['k'])
data0.append('s=email')
if rec['t'] or not rec['subdomains']:
if rec['t']:
if rec['subdomains']:
t='y'
else:
t='s:y'
else:
t='s'
data0.append('t='+t)
if rec['h']!=None: data0.append('h=' + rec['h'])
data0.append('p=' + rec['key_pub'])
data=self.split_txt('; '.join(data0))
elif rr=='srv':
rrname='SRV'
hostname='_%(service)s._%(protocol)s' % rec
if rec['name']!='':
hostname+='.' + rec['name']
data='%(priority)s %(weight)s %(port)s %(target)s' % rec
if rec['target'].count('.')>=1:
needsdot=True
else:
vdns.common.abort("Unhandled RR type %s: %s" % (rr, rec))
if type(data)!=list:
data=[data]
if needsdot:
for i in range(len(data)):
if data[i][-1]!='.':
data[i]+='.'
if hostname==None:
if 'hostname' in rec:
hostname=rec['hostname']
else:
hostname=''
if hostname=='.':
hostname=''
ttl=rec['ttl']
#ret=self.fmtrecord(hostname, self.dt['ttl'], rrname, data)
ret=''
for d in data:
ret+=self.fmtrecord(hostname, ttl, rrname, d)
ret+='\n'
return(ret)
def mkrecord_a_aaaa(self, rec):
"""!
Auto-determine A or AAAA and call mkrecord
@record rec The record. Must be either A or AAAA
@return The result of mkrecord()
"""
if vdns.common.addr_family(rec['ip_str'])==4:
ret=self.mkrecord('a', rec)
else:
ret=self.mkrecord('aaaa', rec)
return(ret)
def make_toplevel(self):
"""
Create the top-level entries.
These are the entries with empty hostname or hostname=='.'
"""
lst=['ns', 'mx', 'dnssec', 'txt']
ret=''
for typ in lst:
if not typ in self.dt:
continue
recs=self.dt[typ]
for rec in recs:
if 'hostname' in rec and \
not (rec['hostname']=='' or rec['hostname']=='.'):
continue
ret+=self.mkrecord(typ, rec)
if 'hosts' in self.dt:
for rec in self.dt['hosts']:
if rec['hostname']!='':
continue
ret+=self.mkrecord_a_aaaa(rec)
# Add DKIM and SRV here (last) since they have a host part
for x in ('dkim', 'srv'):
if x in self.dt:
for rec in self.dt[x]:
if rec['hostname']!='':
continue
ret+=self.mkrecord(x, rec)
return(ret)
def make_subzones(self):
"""
Create entries that are considered subdomains
For now these are entries that have NS
"""
lst=['ns', 'ds']
ret=''
glue=''
for sub in sorted(self.dt['subs']):
ret+='\n'
for typ in lst:
recs=self.dt['subs'][sub][typ]
for rec in recs:
ret+=self.mkrecord(typ, rec)
recs=self.dt['subs'][sub]['glue']
for rec in recs:
glue+=self.mkrecord_a_aaaa(rec)
if glue!='':
ret+='\n; Glue records\n'
ret+=glue
return(ret)
def make_hosts(self):
"""
Make the host entries
Host entries are accompanied with relevant records like CNAMEs,
TXTs, etc...
"""
done=[] # List of entries already handled
ret=''
subdomaintypes=['ns']
lst=['txt', 'sshfp']
# Determine entries to be excluded
# - since we added them previously
for typ in subdomaintypes:
if not typ in self.dt:
continue
recs=self.dt[typ]
for rec in recs:
t=rec['hostname']
if not t in done:
done.append(t)
# Examine all hosts
# hosts2=dict([(h['ip'], h) for h in self.dt['hosts']])
# ips=hosts2.keys()
# ips.sort()
for rec in self.dt['hosts']:
# for ip in ips:
# rec=hosts2[ip]
hostname=rec['hostname']
if hostname=='':
continue
#ip=rec['ip']
ret+=self.mkrecord_a_aaaa(rec)
if hostname in done:
continue
done.append(hostname)
# Add additional info here
for typ in lst:
if not typ in self.dt:
continue
recs2=self.dt[typ]
for rec2 in recs2:
if rec2['hostname']!=hostname:
continue
rec3=copy.deepcopy(rec2)
rec3['hostname']=''
ret+=self.mkrecord(typ, rec3)
# CNAMEs are special. We look for cnames that are
# pointing to this host
if 'cnames' in self.dt:
recs2=self.dt['cnames']
for rec2 in recs2:
if rec2['hostname0']!=hostname:
continue
ret+=self.mkrecord('cnames', rec2)
done.append(rec2['hostname'])
# Add DKIM here (last) as it has a hostname part
for rec2 in self.dt['dkim']:
if rec2['hostname']!=hostname:
continue
ret+=self.mkrecord('dkim', rec2)
# Now do the rest cnames
rests=['cnames', 'txt']
for rr in rests:
if rr in self.dt:
ret+='\n'
for rec in self.dt[rr]:
if rec['hostname']=='':
continue
if not rec['hostname'] in done:
ret+=self.mkrecord(rr, rec)
return(ret)
def make_reverse(self):
"""
Make the reverse entries
"""
ret=''
# Create a dict and sort the keys. We list IPv4 before IPv6.
# Keys are: X-Y where X is 4 or 6 depending on the family and
# Y is the numerical representation of the address as returned by
# inet_pton. All of this to be able to sort based on numerical
# value instead of string representation
hosts={}
for x in self.dt['hosts']:
# Skip entries that are not designated as reverse
if not x['reverse']:
continue
family0=vdns.common.addr_family(x['ip'])
if family0==4:
family=socket.AF_INET
else:
family=socket.AF_INET6
#k=str(family0) + '-' + str(socket.inet_pton(family, x['ip_str']))
# Use bytestring to fix the sorting issue with python3
# python3: bytes(family0) fails because bytes() expects an
# iterable. Using a list does the trick
k=bytes([family0]) + b'-' + \
socket.inet_pton(family, x['ip_str'])
hosts[k]=x
for x in sorted(hosts):
rec=hosts[x]
ret+=self.mkrecord('ptr', rec)
return(ret)
def make_keys(self):
"""
Make the key files
Returns a list of entries. Each entry is a tuple of:
(type, fn, contents)
Where type is 'key' or 'private'
"""
ret=[]
for x in self.dt['dnssec']:
fn0="K%s.+%03d+%d" % (x['domain'], x['algorithm'], x['keyid'])
fn=fn0 + '.key'
rec=('key', fn, x['st_key_pub'])
ret.append(rec)
fn=fn0 + '.private'
rec=('private', fn, x['st_key_priv'])
ret.append(rec)
return(ret)
if __name__=="__main__":
pass
# vim: set ts=8 sts=4 sw=4 et formatoptions=r ai nocindent:<|fim▁end|>
|
times=('ttl', 'refresh', 'retry', 'expire', 'minimum')
for i in times:
|
<|file_name|>bdz.py<|end_file_name|><|fim▁begin|>from b_hash import b_hash
from b_hash import NoData
from jenkins import jenkins
from h3_hash import h3_hash
from jenkins import jenkins_fast, jenkins_wrapper
from graph import *
from collections import deque
from bitstring import BitArray
import math
class bdz(b_hash):
"""Class for perfect hash function generated by the BDZ algorithm. This algorithms uses uniform random hypergraph."""
def __init__(self):
b_hash.__init__(self)
self.known_keys = False #Keyset is not set
self.function_number = 3 #random 3-graph
self.iteration_limit = 5
self.ratio = 1.24 #ratio between keyset size and theconsumed memory
self.limit = -1
self.m = -1
self.g = None;
def get_g(self):
"""This function return values of the g array. It can not be called before the generate_seed, since it is part of the seed"""
return self.g
def get_range(self):
"""This function returns the size of the biggest possible hash value. If the range is not known yet, the -1 is returned"""
return self.m
def get_ratio(self):
"""Return ratio c between keyset and the size of the memory"""
return self.ratio
def set_ratio(self,ratio):
"""sets the ration and therefore size of the data structure of the PHF"""
self.ratio = ratio
def set_limit(self, limit):
"""Sets the size of the memory bank for one hash function. This function can be used instead of the set ratio. BDZ computes three hash functions with nonoverlapping outputs. Outputs of these hash functions are used as a pointers to the memory. If user know amount of the memory, he may set the limit as 1/3 of the available memory. The ration and other parameters are computed when the key set is given. The limit value always take precedents before the ratio. To stop using limit value, limit should be set to the negative value."""
self.limit = limit;
def get_iteration_limit(self):
"""The BDZ algorithm may have fail to create PHF. The iteration_limit is used to limit the number of attempts of PHF creation"""
return self.iteration_limit
def set_iteration_limit(self,iteration_limit):
"""The BDZ algorithm may have fail to create PHF. The iteration_limit is used to limit the number of attempts of PHF creation"""
self.iteration_limit = iteration_limit
def get_order(self):
"""This function return the number of uniform hash function used to create hypergraph"""
return self.function_number
def set_order(self,number):
"""This function sets the number of hash function used for the creation of the hypergraph. It can not be changed after generation of the PHF"""
self.function_number = number
def set_keys(self, key_set):
"""This is a perfect hash function. For the construction of the PHF, the set of keys has to be known. This function gives set of keys to the function, so generate_seed can build correct function"""
self.key_set = key_set
self.known_keys = True
if self.limit > 0 :
#The limit is set, recompute ratio for the given limit
self.ratio = (3.0*self.limit)/len(key_set)
def is_key_set(self):
"""This function return information, if the set of keys is prepared for the generation of the PHF"""
return self.known_keys
def _found_graph(self):
"""This is internal function. It generate random hypergraph according to the specification in the bdz class. It returns a queue of the edge and changes internal datastructure of BDZ class. Returned edges are ordered in such way, that they can be used for the construction of the PHF"""
#First step is to initialize seed
self.seed = dict()
#Second step is to generate the random hash functions
hashes = list()
for i in range(0,self.function_number):
x = jenkins_wrapper()
x.generate_seed()
# x = h3_hash()
# x.set_bitsize(16)
# x.set_input_size(len(self.key_set[0]))
# x.generate_seed()
hashes.append(x)
self.seed["hashes"] = hashes
#setting m
self.m = int(math.ceil(self.ratio * len(self.key_set)))
limit = int(math.ceil(float(self.m) /self.function_number))
self.m = 3*limit
#print("XXXXXXXXXXXXXXX",limit, self.m)
#Generation of hypergraph
hyper = graph()
hyper.set_order(self.function_number)
hyper.add_vertices(self.m)
#Generation of the edges of the hypergraph
for x in self.key_set:
values = list()
for i in self.seed["hashes"]:
#print("test",i.hash(x)%limit,limit*len(values))
vertex = (i.hash(x) % limit) + limit*len(values)
values.append(vertex)
#Add this edge into the hypergraph
e = hyper.add_edge(values)
# print(e.get_vertices())
#Add edge to the vertices
for v in values:
hyper.get_vertex(v).add_edge(e)
#Generate queue for the edge evaluation
queue_list = []
queue = deque()
#Boolean vector of the used edges
used = [False] * hyper.get_edge_number()
#First remove edges that have at least one vertex with degree 1
for i in range(0,hyper.get_edge_number()):
vert = hyper.get_edge(i).get_vertices()<|fim▁hole|> #print([hyper.get_vertex(x).get_degree() for x in vert])
Deg = [hyper.get_vertex(x).get_degree() == 1 for x in vert]
if sum(Deg) > 0 and used[i] == False:
#This edge has at least one vertex with degree 1
used[i] = True
queue_list.append(i)
queue.append(i)
#Removing edges that have unique vertex (on the stack)
#adding a new edges with unique vertex into stack
while(len(queue)>0):
edge = queue.popleft()
#remove edge from the graph (only from vertex and decrease degree)
for v in hyper.get_edge(edge).get_vertices():
hyper.get_vertex(v).get_edges().remove(hyper.get_edge(edge))
deg = hyper.get_vertex(v).get_degree() - 1
#print("KVIK",deg)
hyper.get_vertex(v).set_degree(deg)
#if degree decrease to 1, the remaining edge should be added
#into the queue
if(deg == 1):
#Found the edge position
e1 = hyper.get_vertex(v).get_edges()[0]
position = hyper.get_edge_position(e1)
#If it is not in the queue, put it there
if used[position] == False:
queue.append(position)
queue_list.append(position)
used[position] = True
self.hyper = hyper
return queue_list
def _found_g(self,v,ed,vi):
"""This function computes value of the g array for given vertex. It uses plus operation."""
s = [self.g[s1] for s1 in self.hyper.get_edge(ed).get_vertices()]
sum1 = sum(s)-s[vi];
self.g[v] = (vi-sum1)%len(s)
return True;
def _found_g2(self,v,ed,vi):
"""This function computes value of the g array for given vertex by the use of the xor function. Assumes two bit representation of the g array"""
s = [self.g[s1] for s1 in self.hyper.get_edge(ed).get_vertices()]
sum1 = s[0];
for index in range(1,len(self.hyper.get_edge(ed).get_vertices())):
sum1 = sum1^s[index]
sum1 = sum1^s[vi]
self.g[v] = (vi^sum1)&3 #3 is the 11 in binary, therefore it clear all the higher bits to zero
return True
def generate_seed(self):
"""This function generates the PHF function according to the BDZ algorithm"""
if not self.known_keys:
raise NoData("The key set is unknown")
size = 0
iteration = 0
while(size != len(self.key_set) and self.iteration_limit > iteration):
queue = self._found_graph()
size = len(queue)
iteration = iteration+1
if(len(queue) != len(self.key_set)):
return False
self.g = [3] * self.m
marked_vertices = [False] *self.m
while(len(queue) > 0):
ed = queue.pop()
worked = False
for vi in range(0,len(self.hyper.get_edge(ed).get_vertices())):
v = self.hyper.get_edge(ed).get_vertices()[vi]
if(marked_vertices[v] == False and worked == False):
worked = self._found_g2(v,ed,vi)
marked_vertices[v] = True
# print(self.g)
# print(self.g)
# print(len(queue))
# print(len(self.key_set))
def hash(self, key):
limit = int(self.m /self.function_number)
# print(limit)
hashes = [x.hash(key)%limit for x in self.seed["hashes"]]
h1 = [hashes[x]+x*limit for x in range(0,len(hashes))]
g_val = [self.g[x] for x in h1]
sum1 = g_val[0];
for index in range(1,len(g_val)):
sum1 = sum1^g_val[index]
h = sum1&3
if h>=len(hashes):
h = 0
return -1
# print("Nonexistent key")
#print(hashes,g_val)
#h = sum(g_val)%len(g_val)
return hashes[h]+(limit*h)<|fim▁end|>
| |
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 TiKV Project Authors. Licensed under Apache-2.0.
mod batch;
mod config;
mod fsm;
mod mailbox;
mod router;
#[cfg(feature = "test-runner")]
pub mod test_runner;<|fim▁hole|>pub use self::config::Config;
pub use self::fsm::{Fsm, Priority};
pub use self::mailbox::{BasicMailbox, Mailbox};
pub use self::router::Router;<|fim▁end|>
|
pub use self::batch::{create_system, BatchRouter, BatchSystem, HandlerBuilder, PollHandler};
|
<|file_name|>pyparser_geoparser_testing.py<|end_file_name|><|fim▁begin|>#parser_testing.py
import os, sys, re, StringIO
sys.path.append('/Users/Jason/Dropbox/JournalMap/scripts/GeoParsers')
#from jmap_geoparser_re import *
from jmap_geoparser import *
#def test_parsing():
test = "blah blah blah 45º 23' 12'', 123º 23' 56'' and blah blah blah 32º21'59''N, 115º 23' 14''W blah blah blah"
coords = coordinateParser.searchString(test)
for coord in coords:
assert coordinate(coord).calcDD(), "Coordinate Transform Error for "+str(coord)
test = "45.234º, 123.43º"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 45.234, 'longitude': 123.43}
test = "-45º 23' 12'', -123º 23' 56''"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -45.38667, 'longitude': 123.39889}
test = "32º21'59''N, 115º 23' 14''W"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 32.36639, 'longitude': -115.38722}
test = "12 43 56 North, 23 56 12 East"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 12.73222, 'longitude': 23.93667}
test = "52 15 10N, 0 01 54W"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.25278, 'longitude': -0.03167}
test = "52 35 31N, 1 28 05E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.59194, 'longitude': 1.46806}
test = "30° 47' N, 34° 46' E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 30.78333, 'longitude': 34.76667}
'''
test = "AT; 1 spm, CN 3-41, 21°00′ N, 112°30′ E"<|fim▁hole|>assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 27.73143, 'longitude': 34.26105}
test = '49°17’13”N, 13°40’18”E'
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 49.28694, 'longitude': 13.67167}
test = '45.9215º; -76.6219º'
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -45.9215, 'longitude': 76.6219}
test = "latitude 32°47′47″ S and longitude 26°50′56″ E"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': -32.79639, 'longitude': 26.84889}
test = "N15°46′ W87°00'"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 15.76667, 'longitude': -87.0}
test = "latitude of 35°13', longitude of 4°11'"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 35.21667, 'longitude': 4.18333}
test = "expects to find coordinates: 52 degrees, 42 minutes north, 124 degrees, 50 minutes west"
assert coordinate(coordinateParser.parseString(test)).calcDD() == {'latitude': 52.7, 'longitude': -124.83333}
# Should return an exception, but instead calculates latitude as 6º 10'
#test = "expects to find coordinates: 5°70'N, 73°46'W" # Minutes greater than 60
#test = "expects not to find: 4.5–5.0 "
'''<|fim▁end|>
|
for result, start, end in coordinateParser.scanString(test):
assert coordinate(result).calcDD() == {'latitude': 21.0, 'longitude': 112.5}
test = '27°43.886, 34°15.663'
|
<|file_name|>pkg_build.go<|end_file_name|><|fim▁begin|>package main
import (
"flag"
"fmt"
"os"
"path/filepath"
"github.com/linuxkit/linuxkit/src/cmd/linuxkit/pkglib"
)
func pkgBuild(args []string) {
flags := flag.NewFlagSet("pkg build", flag.ExitOnError)
flags.Usage = func() {
invoked := filepath.Base(os.Args[0])
fmt.Fprintf(os.Stderr, "USAGE: %s pkg build [options] path\n\n", invoked)
fmt.Fprintf(os.Stderr, "'path' specifies the path to the package source directory.\n")
fmt.Fprintf(os.Stderr, "\n")
flags.PrintDefaults()
}
force := flags.Bool("force", false, "Force rebuild")
p, err := pkglib.NewFromCLI(flags, args...)<|fim▁hole|> }
fmt.Printf("Building %q\n", p.Tag())
opts := []pkglib.BuildOpt{pkglib.WithBuildImage()}
if *force {
opts = append(opts, pkglib.WithBuildForce())
}
if err := p.Build(opts...); err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
}
}<|fim▁end|>
|
if err != nil {
fmt.Fprintf(os.Stderr, "%v\n", err)
os.Exit(1)
|
<|file_name|>test_stack_lock.py<|end_file_name|><|fim▁begin|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from unittest import mock
from heat.common import exception
from heat.common import service_utils
from heat.engine import stack_lock
from heat.objects import stack as stack_object
from heat.objects import stack_lock as stack_lock_object
from heat.tests import common
from heat.tests import utils
class StackLockTest(common.HeatTestCase):
def setUp(self):
super(StackLockTest, self).setUp()
self.context = utils.dummy_context()
self.stack_id = "aae01f2d-52ae-47ac-8a0d-3fde3d220fea"
self.engine_id = service_utils.generate_engine_id()
stack = mock.MagicMock()
stack.id = self.stack_id
stack.name = "test_stack"
stack.action = "CREATE"
self.mock_get_by_id = self.patchobject(
stack_object.Stack, 'get_by_id', return_value=stack)
class TestThreadLockException(Exception):
pass
def test_successful_acquire_new_lock(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
slock.acquire()
mock_create.assert_called_once_with(
self.context, self.stack_id, self.engine_id)
def test_failed_acquire_existing_lock_current_engine(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value=self.engine_id)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.assertRaises(exception.ActionInProgress, slock.acquire)
self.mock_get_by_id.assert_called_once_with(
self.context,
self.stack_id,
show_deleted=True,
eager_load=False)
mock_create.assert_called_once_with(
self.context, self.stack_id, self.engine_id)
def test_successful_acquire_existing_lock_engine_dead(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(service_utils, 'engine_alive', return_value=False)
slock.acquire()
mock_create.assert_called_once_with(
self.context, self.stack_id, self.engine_id)
mock_steal.assert_called_once_with(
self.context, self.stack_id, 'fake-engine-id', self.engine_id)
def test_failed_acquire_existing_lock_engine_alive(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(service_utils, 'engine_alive', return_value=True)
self.assertRaises(exception.ActionInProgress, slock.acquire)
self.mock_get_by_id.assert_called_once_with(
self.context,
self.stack_id,
show_deleted=True,
eager_load=False)
mock_create.assert_called_once_with(
self.context, self.stack_id, self.engine_id)
def test_failed_acquire_existing_lock_engine_dead(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value='fake-engine-id2')
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(service_utils, 'engine_alive', return_value=False)
self.assertRaises(exception.ActionInProgress, slock.acquire)
self.mock_get_by_id.assert_called_once_with(
self.context,
self.stack_id,
show_deleted=True,
eager_load=False)
mock_create.assert_called_once_with(
self.context, self.stack_id, self.engine_id)
mock_steal.assert_called_once_with(
self.context, self.stack_id, 'fake-engine-id', self.engine_id)
def test_successful_acquire_with_retry(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
side_effect=[True, None])
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(service_utils, 'engine_alive', return_value=False)
slock.acquire()
mock_create.assert_has_calls(
[mock.call(self.context, self.stack_id, self.engine_id)] * 2)
mock_steal.assert_has_calls(
[mock.call(self.context, self.stack_id,
'fake-engine-id', self.engine_id)] * 2)
def test_failed_acquire_one_retry_only(self):
mock_create = self.patchobject(stack_lock_object.StackLock,
'create',
return_value='fake-engine-id')
mock_steal = self.patchobject(stack_lock_object.StackLock,
'steal',
return_value=True)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
self.patchobject(service_utils, 'engine_alive', return_value=False)
self.assertRaises(exception.ActionInProgress, slock.acquire)
self.mock_get_by_id.assert_called_with(
self.context,
self.stack_id,
show_deleted=True,
eager_load=False)
mock_create.assert_has_calls(
[mock.call(self.context, self.stack_id, self.engine_id)] * 2)
mock_steal.assert_has_calls(
[mock.call(self.context, self.stack_id,
'fake-engine-id', self.engine_id)] * 2)
def test_context_mgr_exception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_lock():
with slock:
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise self.TestThreadLockException
self.assertRaises(self.TestThreadLockException, check_lock)
self.assertEqual(1, stack_lock_object.StackLock.release.call_count)
def test_context_mgr_noexception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
with slock:
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
self.assertEqual(1, stack_lock_object.StackLock.release.call_count)
def test_thread_lock_context_mgr_exception_acquire_success(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
with slock.thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise self.TestThreadLockException
self.assertRaises(self.TestThreadLockException, check_thread_lock)
self.assertEqual(1, stack_lock_object.StackLock.release.call_count)
def test_thread_lock_context_mgr_exception_acquire_fail(self):
stack_lock_object.StackLock.create = mock.Mock(
return_value=self.engine_id)
stack_lock_object.StackLock.release = mock.Mock()
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
with slock.thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise exception.ActionInProgress
self.assertRaises(exception.ActionInProgress, check_thread_lock)
self.assertFalse(stack_lock_object.StackLock.release.called)
def test_thread_lock_context_mgr_no_exception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
with slock.thread_lock():
self.assertEqual(1, stack_lock_object.StackLock.create.call_count)
self.assertFalse(stack_lock_object.StackLock.release.called)
def test_try_thread_lock_context_mgr_exception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)<|fim▁hole|> with slock.try_thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise self.TestThreadLockException
self.assertRaises(self.TestThreadLockException, check_thread_lock)
self.assertEqual(1, stack_lock_object.StackLock.release.call_count)
def test_try_thread_lock_context_mgr_no_exception(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=None)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
with slock.try_thread_lock():
self.assertEqual(1, stack_lock_object.StackLock.create.call_count)
self.assertFalse(stack_lock_object.StackLock.release.called)
def test_try_thread_lock_context_mgr_existing_lock(self):
stack_lock_object.StackLock.create = mock.Mock(return_value=1234)
stack_lock_object.StackLock.release = mock.Mock(return_value=None)
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
with slock.try_thread_lock():
self.assertEqual(1,
stack_lock_object.StackLock.create.call_count)
raise self.TestThreadLockException
self.assertRaises(self.TestThreadLockException, check_thread_lock)
self.assertFalse(stack_lock_object.StackLock.release.called)<|fim▁end|>
|
slock = stack_lock.StackLock(self.context, self.stack_id,
self.engine_id)
def check_thread_lock():
|
<|file_name|>character.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core';
import { Character } from '../../../data/models/character';<|fim▁hole|> selector: 'lc-character',
templateUrl: 'character.component.html',
styleUrls: [ 'character.component.scss' ],
})
export class CharacterComponent {
@Input() public excludeLinks: LinkLocation[];
@Input() public character: Character;
}<|fim▁end|>
|
import { LinkLocation } from '../../directive/insertLinks/insertLinks.directive';
@Component({
|
<|file_name|>VehicleMakePage.java<|end_file_name|><|fim▁begin|>package uk.gov.dvsa.ui.pages.vehicleinformation;
import org.openqa.selenium.Keys;<|fim▁hole|>import org.openqa.selenium.support.FindBy;
import uk.gov.dvsa.domain.model.vehicle.Make;
import uk.gov.dvsa.domain.navigation.PageNavigator;
import uk.gov.dvsa.framework.config.webdriver.MotAppDriver;
import uk.gov.dvsa.helper.FormDataHelper;
import uk.gov.dvsa.helper.PageInteractionHelper;
import uk.gov.dvsa.ui.pages.Page;
public class VehicleMakePage extends Page {
private static final String PAGE_TITLE = "What is the vehicle's make?";
public static final String PATH = "/create-vehicle/make";
@FindBy(id = "vehicleMake") private WebElement vehicleMakeDropdown;
@FindBy(className = "button") private WebElement continueButton;
public VehicleMakePage(MotAppDriver driver) {
super(driver);
selfVerify();
}
@Override
protected boolean selfVerify() {
return PageInteractionHelper.verifyTitle(this.getTitle(), PAGE_TITLE);
}
public VehicleMakePage selectMake(Make make){
FormDataHelper.selectFromDropDownByValue(vehicleMakeDropdown, make.getId().toString());
vehicleMakeDropdown.sendKeys(Keys.TAB);
return this;
}
public VehicleModelPage continueToVehicleModelPage() {
continueButton.click();
return new VehicleModelPage(driver);
}
public VehicleModelPage updateVehicleMake(Make make) {
FormDataHelper.selectFromDropDownByValue(vehicleMakeDropdown, make.getId().toString());
vehicleMakeDropdown.sendKeys(Keys.TAB);
return continueToVehicleModelPage();
}
}<|fim▁end|>
|
import org.openqa.selenium.WebElement;
|
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// This is a manifest file that'll be compiled into application.js, which will include all the files<|fim▁hole|>// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts,
// or vendor/assets/javascripts of plugins, if any, can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file.
//
// Read Sprockets README (https://github.com/sstephenson/sprockets#sprockets-directives) for details
// about supported directives.
//
//
//= require_tree .<|fim▁end|>
| |
<|file_name|>GSTTools.py<|end_file_name|><|fim▁begin|>'''
Various tools to interface with pyGSTi for running GST experiments.
Created on May 16, 2018
Original Author: Guilhem Ribeill
Copyright 2018 Raytheon BBN Technologies
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
from .PulsePrimitives import *
from .Cliffords import *
from .BasicSequences.helpers import create_cal_seqs
from .Compiler import compile_to_hardware
from itertools import chain
from random import choices
PYGSTI_PRESENT = False
try:
from pygsti.objects.circuit import Circuit
PYGSTI_PRESENT = True
except:
pass
#Default mapping from pyGSTi naming convention to QGL gates.
gst_gate_map = {"Gx": X90,
"Gy": Y90,
"Gi": Id}
def gst_map_1Q(gst_list, qubit, qgl_map=gst_gate_map, append_meas=True):
"""
Helper function that takes an arbitrarily nested list of pygsti gatestrings
and converts them into QGL sequences, keeping the same nesting of lists.
Inputs:
gst_list: GateString to convert, or possibly nested list of pyGSTi GateStrings.
qubit: QGL qubit to apply the sequence to
qgl_map: Dictionary that maps between pyGSTi "Gx" string to QGL pulse
append_meas: Append a measurement to each sequence.
Returns:
QGL sequences, preserving the input list nesting (as a generator)
"""
if isinstance(gst_list, Circuit):
gst_list = [gst_list]
for item in gst_list:
if isinstance(item, Circuit):
mapped = map(lambda x: qgl_map[str(x)](qubit), item.tup)
if append_meas:
yield list(chain(mapped, [MEAS(qubit)]))
else:
yield list(mapped)
elif isinstance(item, list):
yield list(gst_map_1Q(item, qubit, qgl_map=qgl_map, append_meas=append_meas))
def gst_map_2Q(gst_list, qubits, qgl_map=None, append_meas=False):
"""
Helper function that takes an arbitrarily nested list of pygsti gatestrings
and converts them into QGL sequences, keeping the same nesting of lists.
Inputs:
gst_list: GateString to convert, or possibly nested list of pyGSTi GateStrings.
qubit: QGL qubit to apply the sequence to
qgl_map: Dictionary that maps between pyGSTi "Gx" string to QGL pulse
append_meas: Append a measurement to each sequence.
Returns:
QGL sequences, preserving the input list nesting (as a generator)
"""
if isinstance(gst_list, GateString):
gst_list = [gst_list]
for item in gst_list:
if isinstance(item, GateString):
mapped = map(lambda x: qgl_map[x], item.tup)
if append_meas:
yield list(chain(mapped, [reduce(lambda x,y: x*y, map(MEAS, qubits))]))
else:
yield list(mapped)
elif isinstance(item, list):
yield list(gst_map_2Q(item, qubit, qgl_map=qgl_map, append_meas=append_meas))
def create_gst_sequence_from_pygsti(gst_list, qubit, gate_map=gst_gate_map):
""" Returns list of QGL sequences from a pyGSTi GateString list. See gst_map_1Q.
The return value is a list of sequences that can be complied by QGL.<|fim▁hole|> return list(gst_map_1Q(gst_list, qubit, qgl_map=gate_map, append_meas=True))
def pygsti_to_cliffords(gst_seq):
#Map from GST convention to cliffords
cliff_map = {"{}": 0,
"Gi": 1,
"Gx": 2,
"Gy": 5}
#convert to dictionary of lambdas for compatibility with gst_map_1Q
lambda_map = {k: lambda x, v=v: v for k, v in cliff_map.items()}
return list(gst_map_1Q(gst_seq, None, qgl_map=lambda_map,
append_meas=False))
def pauli_rand_clifford_circuit(gst_seq):
def seqreduce(s):
if not s:
return 0
else:
return reduce(lambda x,y: clifford_multiply(x,y), s)
def inv_cliff(c):
return inverse_clifford(clifford_mat(c, 1))
c_ps = [0, 2, 5, 8]
c_seqs = pygsti_to_cliffords(gst_seq)
r_seqs = []
for seq in c_seqs:
if not seq:
r_seqs.append([])
else:
rand_pauli = choices(c_ps, k=len(seq))
inter = 0
bare = 0
rseq = []
for j in range(len(seq)):
inter = clifford_multiply(clifford_multiply(inter, rand_pauli[j]), seq[j])
bare = clifford_multiply(bare, seq[j])
rseq.append(clifford_multiply(rand_pauli[j], seq[j]))
recovery = clifford_multiply(inv_cliff(inter), bare)
rseq[-1] = clifford_multiply(rseq[-1], recovery)
r_seqs.append(rseq)
all_ok = all((r == i for r, i in zip(map(seqreduce, r_seqs), map(seqreduce, c_seqs))))
assert all_ok, "Something went wrong when Pauli-frame randomizing!"
return r_seqs
def SingleQubitCliffordGST(qubit, pygsti_seq, pulse_library="Standard", randomized=False, num_cals=100, diac_compiled=True):
pulse_library = pulse_library.upper()
# QGL pulse libraries handle the Id pulse differently. In the standard
# case, the Id is of finite length equal to all the other one-pulse
# elements of the library. In the Atomic and DiAtomic cases, the ID is
# of length 0 by default. In GST, we need access to both types of the ID
# gate with the first experiment in any GST experiment equal to {} =
# Id(length = 0). All other Id gates in the sequence should be of finite
# length. So we'll modify the Clifford indexing here to make Id(length=0)
# the first element in the library and Id(length=length) the second.
if pulse_library == "STANDARD":
#clifford_pulse = lambda x: clifford_seq(x, qubit)
clifford_pulse = [clifford_seq(i, qubit) for i in range(24)]
clifford_pulse.insert(0, Id(qubit, length=0.0))
elif pulse_library == "DIAC":
#clifford_pulse = lambda x: DiAC(qubit, x, diac_compiled)
clifford_pulse = [AC(qubit, i, diac_compiled) for i in range(24)]
clifford_pulse.insert(1, Id(qubit))
elif pulse_library == "AC":
#clifford_pulse = lambda x: AC(qubit, x)
clifford_pulse = [AC(qubit, i) for i in range(24)]
clifford_pulse.insert(1, Id(qubit))
raise ValueError("Pulse library must be one of 'standard', 'diac', or 'ac'. Got {} instead".format(pulse_library))
if randomized:
seqs = pauli_rand_clifford_circuit(pygsti_seq)
else:
seqs = pygsti_to_cliffords(pygsti_seq)
qgl_seqs = []
for seq in seqs:
qgl_seqs.append([clifford_pulse[c] for c in seq])
qgl_seqs[-1].append(MEAS(qubit))
if num_cals != 0:
qgl_seqs += create_cal_seqs((qubit, ), abs(num_cals))
metafile = compile_to_hardware(qgl_seqs, 'GST/GST')
return metafile<|fim▁end|>
|
"""
|
<|file_name|>SQLUtility.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2004 Actuate Corporation.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Actuate Corporation - initial API and implementation
* Ing. Gerd Stockner (Mayr-Melnhof Karton Gesellschaft m.b.H.) - modifications
* Christian Voller (Mayr-Melnhof Karton Gesellschaft m.b.H.) - modifications
* CoSMIT GmbH - publishing, maintenance
*******************************************************************************/
package com.mmkarton.mx7.reportgenerator.sqledit;
import java.sql.Types;
import java.text.Bidi;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.eclipse.datatools.connectivity.oda.IParameterMetaData;
import org.eclipse.datatools.connectivity.oda.IResultSetMetaData;
import org.eclipse.datatools.connectivity.oda.OdaException;
import org.eclipse.datatools.connectivity.oda.design.DataSetParameters;
import org.eclipse.datatools.connectivity.oda.design.DesignFactory;
import org.eclipse.datatools.connectivity.oda.design.ParameterDefinition;
import org.eclipse.datatools.connectivity.oda.design.ParameterMode;
import org.eclipse.datatools.connectivity.oda.design.ResultSetColumns;
import org.eclipse.datatools.connectivity.oda.design.ResultSetDefinition;
import org.eclipse.datatools.connectivity.oda.design.ui.designsession.DesignSessionUtil;
import com.mmkarton.mx7.reportgenerator.engine.SQLQuery;
import com.mmkarton.mx7.reportgenerator.jdbc.ResultSetMetaData;
import com.mmkarton.mx7.reportgenerator.wizards.BIRTReportWizard;
/**
* The utility class for SQLDataSetEditorPage
*
*/
public class SQLUtility
{
/**
* save the dataset design's metadata info
*
* @param design
*/
public static SQLQuery getBIRTSQLFields(String sqlQueryText) {
MetaDataRetriever retriever = new MetaDataRetriever( addDummyWhere(sqlQueryText));
IResultSetMetaData resultsetMeta = retriever.getResultSetMetaData( );
IParameterMetaData paramMeta = retriever.getParameterMetaData( );
return saveDataSetDesign( resultsetMeta, paramMeta ,sqlQueryText);
}
public static SQLQuery saveDataSetDesign( IResultSetMetaData meta, IParameterMetaData paramMeta, String sqlQueryText )
{
try
{
setParameterMetaData( paramMeta );
// set resultset metadata
return setResultSetMetaData(meta, sqlQueryText );
}
catch ( OdaException e )
{
return null;
}
}
/**
* Set parameter metadata in dataset design
*
* @param design
* @param query
*/
private static void setParameterMetaData(IParameterMetaData paramMeta )
{
try
{
// set parameter metadata
mergeParameterMetaData( paramMeta );
}
catch ( OdaException e )
{
// do nothing, to keep the parameter definition in dataset design
// dataSetDesign.setParameters( null );
}
}
/**
* solve the BIDI line problem
* @param lineText
* @return
*/
public static int[] getBidiLineSegments( String lineText )
{
int[] seg = null;
if ( lineText != null
&& lineText.length( ) > 0
&& !new Bidi( lineText, Bidi.DIRECTION_LEFT_TO_RIGHT ).isLeftToRight( ) )
{
List list = new ArrayList( );
// Punctuations will be regarded as delimiter so that different
// splits could be rendered separately.
Object[] splits = lineText.split( "\\p{Punct}" );
// !=, <> etc. leading to "" will be filtered to meet the rule that
// segments must not have duplicates.
for ( int i = 0; i < splits.length; i++ )
{
if ( !splits[i].equals( "" ) )
list.add( splits[i] );
}
splits = list.toArray( );
// first segment must be 0
// last segment does not necessarily equal to line length
seg = new int[splits.length + 1];
for ( int i = 0; i < splits.length; i++ )
{
seg[i + 1] = lineText.indexOf( (String) splits[i], seg[i] )
+ ( (String) splits[i] ).length( );
}
}
return seg;
}
/**
* Return pre-defined query text pattern with every element in a cell.
*
* @return pre-defined query text
*/
public static String getQueryPresetTextString( String extensionId )
{
String[] lines = getQueryPresetTextArray( extensionId );
String result = "";
if ( lines != null && lines.length > 0 )
{
for ( int i = 0; i < lines.length; i++ )
{
result = result
+ lines[i] + ( i == lines.length - 1 ? " " : " \n" );
}
}
return result;
}
/**
* Return pre-defined query text pattern with every element in a cell in an
* Array
*
* @return pre-defined query text in an Array
*/
public static String[] getQueryPresetTextArray( String extensionId )
{
final String[] lines;
if ( extensionId.equals( "org.eclipse.birt.report.data.oda.jdbc.SPSelectDataSet" ) )
lines = new String[]{
"{call procedure-name(arg1,arg2, ...)}"
};
else
lines = new String[]{
"select", "from"
};
return lines;
}
/**
* merge paramter meta data between dataParameter and datasetDesign's
* parameter.
*
* @param dataSetDesign
* @param md
* @throws OdaException
*/
private static void mergeParameterMetaData( IParameterMetaData md ) throws OdaException
{
if ( md == null)
return;
DataSetParameters dataSetParameter = DesignSessionUtil.toDataSetParametersDesign( md,
ParameterMode.IN_LITERAL );
if ( dataSetParameter != null )
{
Iterator iter = dataSetParameter.getParameterDefinitions( )
.iterator( );
while ( iter.hasNext( ) )
{
ParameterDefinition defn = (ParameterDefinition) iter.next( );
proccessParamDefn( defn, dataSetParameter );
}
}
//dataSetDesign.setParameters( dataSetParameter );
}
/**
* Process the parameter definition for some special case
*
* @param defn<|fim▁hole|> {
if ( defn.getAttributes( ).getNativeDataTypeCode( ) == Types.NULL )
{
defn.getAttributes( ).setNativeDataTypeCode( Types.CHAR );
}
}
/**
* Set the resultset metadata in dataset design
*
* @param dataSetDesign
* @param md
* @throws OdaException
*/
private static SQLQuery setResultSetMetaData(IResultSetMetaData md, String sqlQueryText ) throws OdaException
{
SQLQuery query=null;
ResultSetColumns columns = DesignSessionUtil.toResultSetColumnsDesign( md );
if ( columns != null )
{
query=new SQLQuery();
ResultSetDefinition resultSetDefn = DesignFactory.eINSTANCE.createResultSetDefinition( );
resultSetDefn.setResultSetColumns( columns );
int count=resultSetDefn.getResultSetColumns().getResultColumnDefinitions().size();
query.setSqlQueryString(sqlQueryText);
for (int i = 0; i < count; i++)
{
int columntype=-1;
String columname="";
try {
ResultSetMetaData dataset=(ResultSetMetaData)md;
columname=dataset.getColumnName(i+1);
columntype=dataset.getColumnType(i+1);
} catch (Exception e)
{
return null;
}
query.setFields(columname, columntype);
}
}
return query;
}
private static String addDummyWhere(String sqlQueryText)
{
if (sqlQueryText==null) {
return null;
}
String tempsql = sqlQueryText.toUpperCase();
String sql_query="";
int where_pos = tempsql.toUpperCase().indexOf("WHERE");
if (where_pos > 0)
{
sql_query = tempsql.substring(0,where_pos );
}
else
{
sql_query = tempsql;
}
return sql_query+" Where 1=2";
}
}<|fim▁end|>
|
* @param parameters
*/
private static void proccessParamDefn( ParameterDefinition defn,
DataSetParameters parameters )
|
<|file_name|>functions.rs<|end_file_name|><|fim▁begin|>use std::rc::Rc;
use std::collections::{HashSet, HashMap};
extern crate symbolic_polynomials;
extern crate num;
use symbolic_polynomials::*;
use num::Integer;
#[allow(dead_code)]
type TestMonomial = Monomial<String, i64, u8>;
#[allow(dead_code)]
type TestPolynomial = Polynomial<String, i64, u8>;
#[test]
pub fn max_test() {
let thirteen = TestPolynomial::from(13);
let three = TestPolynomial::from(3);
let thirteen_v2 = max(&thirteen, &three);
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let a_square = &a * &a;
let a_third = &a_square * &a;
let a_v2 = max(&a_square, &a);
let max_a_square_b = max(&a_square, &b);
let max_a_third_b = max(&a_third, &b);
assert_eq!(thirteen_v2, 13);
assert_eq!(13, thirteen_v2);
assert_eq!(a_v2.monomials.len(), 1);
assert_eq!(a_v2.monomials[0].coefficient, 1);
assert_eq!(
a_v2.monomials[0].powers,
vec![(Composite::Max(Rc::new(a_square.clone()), Rc::new(a)), 1)]
);
assert_eq!(max_a_square_b.monomials.len(), 1);
assert_eq!(max_a_square_b.monomials[0].coefficient, 1);
assert_eq!(
max_a_square_b.monomials[0].powers,
vec![(Composite::Max(Rc::new(a_square), Rc::new(b.clone())), 1)]
);
assert_eq!(max_a_third_b.monomials.len(), 1);
assert_eq!(max_a_third_b.monomials[0].coefficient, 1);
assert_eq!(
max_a_third_b.monomials[0].powers,
vec![(Composite::Max(Rc::new(a_third), Rc::new(b)), 1)]
);
let mut values = HashMap::<String, i64>::new();
values.insert("a".into(), 3);
values.insert("b".into(), 13);
assert_eq!(thirteen.eval(&values), Ok(13));
assert_eq!(max_a_square_b.eval(&values), Ok(13));
assert_eq!(max_a_third_b.eval(&values), Ok(27));
}
#[test]
pub fn min_test() {
let thirteen = TestPolynomial::from(13);
let three = TestPolynomial::from(3);
let three_v2 = min(&thirteen, &three);
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let a_square = &a * &a;
let a_third = &a_square * &a;
let a_v2 = min(&a_square, &a);
let min_a_square_b = min(&a_square, &b);
let min_a_third_b = min(&a_third, &b);
assert_eq!(three_v2, 3);
assert_eq!(3, three_v2);
assert_eq!(a_v2.monomials.len(), 1);
assert_eq!(a_v2.monomials[0].coefficient, 1);
assert_eq!(
a_v2.monomials[0].powers,
vec![(Composite::Min(Rc::new(a_square.clone()), Rc::new(a)), 1)]
);
assert_eq!(min_a_square_b.monomials.len(), 1);
assert_eq!(min_a_square_b.monomials[0].coefficient, 1);
assert_eq!(
min_a_square_b.monomials[0].powers,
vec![(Composite::Min(Rc::new(a_square), Rc::new(b.clone())), 1)]
);
assert_eq!(min_a_third_b.monomials.len(), 1);
assert_eq!(min_a_third_b.monomials[0].coefficient, 1);
assert_eq!(
min_a_third_b.monomials[0].powers,
vec![(Composite::Min(Rc::new(a_third), Rc::new(b)), 1)]
);
let mut values = HashMap::<String, i64>::new();
values.insert("a".into(), 3);
values.insert("b".into(), 13);
assert_eq!(three_v2.eval(&values), Ok(3));
assert_eq!(min_a_square_b.eval(&values), Ok(9));
assert_eq!(min_a_third_b.eval(&values), Ok(13));
}
#[test]
pub fn ceil_test() {
let thirteen = TestPolynomial::from(13);
let three = TestPolynomial::from(3);
let five = ceil(&thirteen, &three);
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let a_square = &a * &a;
let a_third = &a_square * &a;
let a_v2 = ceil(&a_square, &a);
let ceil_a_square_b = ceil(&a_square, &b);
let ceil_a_third_b = ceil(&a_third, &b);
assert_eq!(five, 5);
assert_eq!(5, five);
assert_eq!(a_v2, a);
assert_eq!(ceil_a_square_b.monomials.len(), 1);
assert_eq!(ceil_a_square_b.monomials[0].coefficient, 1);
assert_eq!(
ceil_a_square_b.monomials[0].powers,
vec![(Composite::Ceil(Rc::new(a_square), Rc::new(b.clone())), 1)]
);
assert_eq!(ceil_a_third_b.monomials.len(), 1);
assert_eq!(ceil_a_third_b.monomials[0].coefficient, 1);
assert_eq!(
ceil_a_third_b.monomials[0].powers,
vec![(Composite::Ceil(Rc::new(a_third), Rc::new(b)), 1)]
);
let mut values = HashMap::<String, i64>::new();
values.insert("a".into(), 3);
values.insert("b".into(), 13);
assert_eq!(five.eval(&values), Ok(5));
assert_eq!(ceil_a_square_b.eval(&values), Ok(1));
assert_eq!(ceil_a_third_b.eval(&values), Ok(3));
}
#[test]
pub fn floor_test() {
let thirteen = TestPolynomial::from(13);
let three = TestPolynomial::from(3);
let four = floor(&thirteen, &three);
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let a_square = &a * &a;
let a_third = &a_square * &a;
let a_v2 = floor(&a_square, &a);
let floor_a_square_b = floor(&a_square, &b);
let floor_a_third_b = floor(&a_third, &b);
assert_eq!(four, 4);
assert_eq!(4, four);
assert_eq!(a_v2, a);
assert_eq!(a, a_v2);
assert_eq!(floor_a_square_b.monomials.len(), 1);
assert_eq!(floor_a_square_b.monomials[0].coefficient, 1);
assert_eq!(
floor_a_square_b.monomials[0].powers,
vec![(Composite::Floor(Rc::new(a_square), Rc::new(b.clone())), 1)]
);
assert_eq!(floor_a_third_b.monomials.len(), 1);
assert_eq!(floor_a_third_b.monomials[0].coefficient, 1);
assert_eq!(
floor_a_third_b.monomials[0].powers,
vec![(Composite::Floor(Rc::new(a_third), Rc::new(b)), 1)]
);
let mut values = HashMap::<String, i64>::new();
values.insert("a".into(), 3);
values.insert("b".into(), 13);
assert_eq!(four.eval(&values), Ok(4));
assert_eq!(floor_a_square_b.eval(&values), Ok(0));
assert_eq!(floor_a_third_b.eval(&values), Ok(2));
}
#[test]
pub fn unique_identifiers_test() {
let thirteen = TestPolynomial::from(13);
let three = TestPolynomial::from(3);
let four = floor(&thirteen, &three);
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let a_square = &a * &a;
let a_third = &a_square * &a;
let floor_a_square_b = floor(&a_square, &b);
let floor_a_third_b = floor(&a_third, &b);
let mut set = HashSet::new();
thirteen.unique_identifiers(&mut set);
assert_eq!(set.len(), 0);
three.unique_identifiers(&mut set);
assert_eq!(set.len(), 0);
four.unique_identifiers(&mut set);
assert_eq!(set.len(), 0);
a.unique_identifiers(&mut set);
assert_eq!(set.len(), 1);
a_square.unique_identifiers(&mut set);
assert_eq!(set.len(), 1);
a_third.unique_identifiers(&mut set);
assert_eq!(set.len(), 1);
b.unique_identifiers(&mut set);
assert_eq!(set.len(), 2);
floor_a_square_b.unique_identifiers(&mut set);
assert_eq!(set.len(), 2);
floor_a_third_b.unique_identifiers(&mut set);
assert_eq!(set.len(), 2);
set.clear();
floor_a_square_b.unique_identifiers(&mut set);
assert_eq!(set.len(), 2);
set.clear();
floor_a_third_b.unique_identifiers(&mut set);
assert_eq!(set.len(), 2);
}
#[test]
pub fn deduce_values_test1() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 1;
let b_val: i64 = 3;
let c_val: i64 = 7;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// a
let poly1 = a.clone();
let val1 = a_val;
implicit_values.push((poly1.clone(), val1));
// 2ab + 1
let poly2 = 2 * &a * &b + 1;
let val2 = 2 * a_val * b_val + 1;
implicit_values.push((poly2.clone(), val2));
// 5a^2b^2c^2 + a^2b + 3
let poly3 = 5 * &a * &a * &b * &b * &c * &c + &a * &a * &b + 3;
let val3 = 5 * a_val * a_val * b_val * b_val * c_val * c_val + a_val * a_val * b_val + 3;
implicit_values.push((poly3.clone(), val3));
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test2() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 2;
let b_val: i64 = 3;
let c_val: i64 = 5;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// abc^2 + abc + 1
let poly1 = &a * &b * &c * (&c + 1) + 1;
let val1 = a_val * b_val * c_val * (c_val + 1) + 1;
implicit_values.push((poly1.clone(), val1));
// a^2 + c^2 + 2
let poly2 = &a * &a + &c * &c + 2;
let val2 = a_val * a_val + c_val * c_val + 2;
implicit_values.push((poly2.clone(), val2));
// 5c
let poly3 = 5 * &c;
let val3 = 5 * c_val;
implicit_values.push((poly3.clone(), val3));
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test3() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 1;
let b_val: i64 = 2;
let c_val: i64 = 3;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// 3b^2
let poly1 = 3 * &b * &b;
let val1 = 3 * b_val * b_val;
implicit_values.push((poly1.clone(), val1));
// a^3 + b^3 - 10
let poly2 = &a * &a * &a + &b * &b * &b - 10;
let val2 = a_val * a_val * a_val + b_val * b_val * b_val - 10;
implicit_values.push((poly2.clone(), val2));
// ab + ac + bc + 3
let poly3 = &a * &b + &a * &c + &b * &c + 3;
let val3 = a_val * b_val + a_val * c_val + b_val * c_val + 3;
implicit_values.push((poly3.clone(), val3));
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test_floor_min() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 1;
let b_val: i64 = 3;
let c_val: i64 = 7;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// a
let poly1 = a.clone();
let val1 = a_val;
implicit_values.push((poly1.clone(), val1));
// 2ab + 1
let poly2 = 2 * &a * &b + 1;
let val2 = 2 * a_val * b_val + 1;
let two = TestPolynomial::from(2);
implicit_values.push((poly2.clone(), val2));
// 5a^2b^2c^2 + floor(ab^2, 2) + min(a^2, b^2) + 3
let poly3 = 5 * &a * &a * &b * &b * &c * &c + floor(&a * &b * &b, &two) +
min(&a * &a, &b * &b) + 3;
let val3 = 5 * a_val * a_val * b_val * b_val * c_val * c_val +
(a_val * b_val * b_val).div_floor(&2) +
::std::cmp::min(a_val * a_val, b_val * b_val) + 3;
implicit_values.push((poly3.clone(), val3));<|fim▁hole|> assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test_ceil_max() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 2;
let b_val: i64 = 3;
let c_val: i64 = 5;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// abc^2 + abc + 1
let poly1 = &a * &b * &c * (&c + 1) + 1;
let val1 = a_val * b_val * c_val + a_val * c_val * c_val * b_val + 1;
let six = TestPolynomial::from(6);
implicit_values.push((poly1.clone(), val1));
// a^2 + ceil(c^2, 6) + max(c^2, 12) + 2
let poly2 = &a * &a + ceil(&c * &c, &six) + max(&c * &c, TestPolynomial::from(12)) + 2;
let mut val2 = a_val * a_val + (c_val * c_val).div_floor(&6) +
::std::cmp::max(c_val * c_val, 12) + 2;
if c_val * c_val % 6 != 0 {
val2 += 1;
}
implicit_values.push((poly2.clone(), val2));
// 5c
let poly3 = 5 * &c;
let val3 = 5 * c_val;
implicit_values.push((poly3.clone(), val3));
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test_all() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 2;
let b_val: i64 = 3;
let c_val: i64 = 5;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// 3b^2
let poly1 = 3 * &b * &b;
let val1 = 3 * b_val * b_val;
implicit_values.push((poly1.clone(), val1));
// a^3 + floor(b^3, 3) - 10 - min(b^2, 17)
let poly2 = &a * &a * &a + floor(&b * &b * &b, TestPolynomial::from(3)) - 10 -
min(&b * &b, TestPolynomial::from(17));
let val2 = a_val * a_val * a_val + (b_val * b_val * b_val).div_floor(&3) - 10 -
::std::cmp::min(b_val * b_val, 17);
implicit_values.push((poly2.clone(), val2));
// ceil(7ab, 5) + ac + bc + 3 + max(ab - 5, a + 2b)
let poly3 = ceil(7 * &a * &b, TestPolynomial::from(5)) + &a * &c + &b * &c + 3 +
max(&a * &b - 5, &a + 2 * &b);
let mut val3 = (7 * a_val * b_val).div_floor(&5) + a_val * c_val + b_val * c_val + 3 +
::std::cmp::max(a_val * b_val - 5, a_val + 2 * b_val);
if 7 * a_val * b_val % 5 != 0 {
val3 += 1;
}
implicit_values.push((poly3.clone(), val3));
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
assert_eq!(c.eval(&values), Ok(c_val));
assert_eq!(poly1.eval(&values), Ok(val1));
assert_eq!(poly2.eval(&values), Ok(val2));
assert_eq!(poly3.eval(&values), Ok(val3));
}
#[test]
pub fn deduce_values_test_fails() {
let a: TestPolynomial = variable("a".into());
let b: TestPolynomial = variable("b".into());
let c: TestPolynomial = variable("c".into());
let a_val: i64 = 1;
let b_val: i64 = 2;
let c_val: i64 = 3;
let mut implicit_values = Vec::<(TestPolynomial, i64)>::new();
// ab^2
let poly1 = &a * &b * &b;
let val1 = a_val * b_val * b_val;
implicit_values.push((poly1.clone(), val1));
// 2b + 1
let poly2 = 2 * &b + 1;
let val2 = 2 * b_val + 1;
implicit_values.push((poly2.clone(), val2));
// ac^2 + bc + 2
let poly3 = &a * &c * &c + &b * &c + 2;
let val3 = a_val * c_val * c_val + b_val * c_val + 2;
implicit_values.push((poly3.clone(), val3));
assert_eq!(deduce_values(&implicit_values), Err("Could not deduce all variables.".into()));
// 2bc + 1
let poly2 = 2 * &b * &c + 1;
let val2 = 2 * b_val * c_val + 1;
implicit_values.remove(1);
implicit_values.push((poly2.clone(), val2));
assert_eq!(deduce_values(&implicit_values), Err("Could not deduce all variables.".into()));
}<|fim▁end|>
|
let values = deduce_values(&implicit_values).unwrap();
assert_eq!(a.eval(&values), Ok(a_val));
assert_eq!(b.eval(&values), Ok(b_val));
|
<|file_name|>content.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007-2022 Crafter Software Corporation. All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import { errorSelectorApi1, get, getBinary, getGlobalHeaders, getText, post, postJSON } from '../utils/ajax';
import { catchError, map, mapTo, pluck, switchMap, tap } from 'rxjs/operators';
import { forkJoin, Observable, of, zip } from 'rxjs';
import { cdataWrap, createElement, createElements, fromString, getInnerHtml, serialize } from '../utils/xml';
import { ContentType } from '../models/ContentType';
import { createLookupTable, nnou, nou, reversePluckProps, toQueryString } from '../utils/object';
import { LookupTable } from '../models/LookupTable';
import $ from 'jquery/dist/jquery.slim';
import { dataUriToBlob, isBlank, popPiece, removeLastPiece } from '../utils/string';
import ContentInstance, { InstanceRecord } from '../models/ContentInstance';
import { AjaxError, AjaxResponse } from 'rxjs/ajax';
import { ComponentsContentTypeParams, ContentInstancePage } from '../models/Search';
import Core from '@uppy/core';
import XHRUpload from '@uppy/xhr-upload';
import { getRequestForgeryToken } from '../utils/auth';
import { DetailedItem, LegacyItem, SandboxItem } from '../models/Item';
import { VersionsResponse } from '../models/Version';
import { GetChildrenOptions } from '../models/GetChildrenOptions';
import {
createItemActionMap,
createItemStateMap,
parseContentXML,
parseSandBoxItemToDetailedItem
} from '../utils/content';
import QuickCreateItem from '../models/content/QuickCreateItem';
import ApiResponse from '../models/ApiResponse';
import { fetchContentTypes } from './contentTypes';
import { Clipboard } from '../models/GlobalState';
import { getFileNameFromPath, getParentPath, getPasteItemFromPath } from '../utils/path';
import { StandardAction } from '../models/StandardAction';
import { GetChildrenResponse } from '../models/GetChildrenResponse';
import { GetItemWithChildrenResponse } from '../models/GetItemWithChildrenResponse';
import { FetchItemsByPathOptions } from '../models/FetchItemsByPath';
import { v4 as uuid } from 'uuid';
import { asArray } from '../utils/array';
export function fetchComponentInstanceHTML(path: string): Observable<string> {
return getText(`/crafter-controller/component.html?path=${path}`).pipe(pluck('response'));
}
interface GetContentOptions {
lock: boolean;
}
export function fetchContentXML(site: string, path: string, options?: Partial<GetContentOptions>): Observable<string> {
options = Object.assign({ lock: false }, options);
const qs = toQueryString({ site_id: site, path, edit: options.lock });
return get(`/studio/api/1/services/api/1/content/get-content.json${qs}`).pipe(pluck('response', 'content'));
}
export function fetchContentDOM(site: string, path: string): Observable<XMLDocument> {
return fetchContentXML(site, path).pipe(map(fromString));
}
interface GetDescriptorOptions {
flatten: boolean;
}
export function fetchDescriptorXML(
site: string,
path: string,
options?: Partial<GetDescriptorOptions>
): Observable<string> {
const qs = toQueryString({ siteId: site, path, flatten: true, ...options });
return get(`/studio/api/2/content/descriptor${qs}`).pipe(pluck('response', 'xml'));
}
export function fetchDescriptorDOM(
site: string,
path: string,
options?: Partial<GetDescriptorOptions>
): Observable<XMLDocument> {
return fetchDescriptorXML(site, path, options).pipe(map(fromString));
}
export function fetchSandboxItem(site: string, path: string): Observable<SandboxItem> {
return fetchItemsByPath(site, [path]).pipe(pluck(0));
}
export function fetchDetailedItem(
siteId: string,
path: string,
options?: { preferContent: boolean }
): Observable<DetailedItem> {
const { preferContent } = { preferContent: true, ...options };
const qs = toQueryString({ siteId, path, preferContent });
return get(`/studio/api/2/content/item_by_path${qs}`).pipe(
pluck('response', 'item'),
map((item: DetailedItem) => ({
...item,
stateMap: createItemStateMap(item.state),
availableActionsMap: createItemActionMap(item.availableActions)
}))
);
}
export function fetchContentInstanceLookup(
site: string,
path: string,
contentTypesLookup: LookupTable<ContentType>
): Observable<LookupTable<ContentInstance>> {
return fetchContentDOM(site, path).pipe(
map((doc) => {
const lookup = {};
parseContentXML(doc, path, contentTypesLookup, lookup);
return lookup;
})
);
}
export function fetchContentInstance(
site: string,
path: string,
contentTypesLookup: LookupTable<ContentType>
): Observable<ContentInstance> {
return fetchContentDOM(site, path).pipe(map((doc) => parseContentXML(doc, path, contentTypesLookup, {})));
}
export function writeContent(site: string, path: string, content: string, options?: { unlock: boolean }) {
options = Object.assign({ unlock: true }, options);
return post(
writeContentUrl({
site,
path: getParentPath(path),
unlock: options.unlock ? 'true' : 'false',
fileName: getFileNameFromPath(path)
}),
content
).pipe(
map((ajaxResponse) => {
if (ajaxResponse.response.result.error) {
// eslint-disable-next-line no-throw-literal
throw {
...ajaxResponse,
status: 500,
response: {
message: ajaxResponse.response.result.error.message
}
};
} else return true;
})
);
}
export function fetchContentInstanceDescriptor(
site: string,
path: string,
options?: Partial<GetDescriptorOptions>,
contentTypeLookup?: LookupTable<ContentType>
): Observable<{ model: ContentInstance; modelLookup: LookupTable<ContentInstance> }> {
return (
contentTypeLookup
? of(contentTypeLookup)
: fetchContentTypes(site).pipe(map((contentTypes) => createLookupTable(contentTypes)))
).pipe(
switchMap((contentTypeLookup) =>
fetchDescriptorDOM(site, path, options).pipe(
map((doc) => {
const modelLookup = {};
const model = parseContentXML(doc, path, contentTypeLookup, modelLookup);
return { model, modelLookup };
})
)
)
);
}
function writeContentUrl(qs: object): string {
qs = new URLSearchParams(qs as URLSearchParams);
return `/studio/api/1/services/api/1/content/write-content.json?${qs.toString()}`;
}
// region Operations
export function updateField(
site: string,
modelId: string,
fieldId: string,
indexToUpdate: number,
path: string,
value: any,
serializeValue: boolean | ((value: any) => string) = false
): Observable<any> {
return performMutation(
site,
path,
(element) => {
let node = extractNode(element, removeLastPiece(fieldId) || fieldId, indexToUpdate);
if (fieldId.includes('.')) {
// node is <item /> inside collection
const fieldToUpdate = popPiece(fieldId);
let fieldNode = node.querySelector(`:scope > ${fieldToUpdate}`);
if (nou(fieldNode)) {
fieldNode = createElement(fieldToUpdate);
node.appendChild(fieldNode);
}
node = fieldNode;
} else if (!node) {
// node is <fieldId /> inside the element
node = createElement(fieldId);
element.appendChild(node);
}
node.innerHTML =
typeof serializeValue === 'function'
? serializeValue(value)
: Boolean(serializeValue)
? cdataWrap(value)
: value;
},
modelId
);
}
function performMutation(
site: string,
path: string,
mutation: (doc: Element) => void,
modelId: string = null
): Observable<any> {
return fetchContentDOM(site, path).pipe(
switchMap((doc) => {
const documentModelId = doc.querySelector(':scope > objectId').innerHTML.trim();
if (nnou(modelId) && documentModelId !== modelId) {
const component = doc.querySelector(`[id="${modelId}"]`);
mutation(component);
updateModifiedDateElement(component);
} else {
mutation(doc.documentElement);
}
updateModifiedDateElement(doc.documentElement);
return post(
writeContentUrl({
site,
path: path,
unlock: 'true',
fileName: getInnerHtml(doc.querySelector(':scope > file-name'))
}),
serialize(doc)
).pipe(mapTo({ updatedDocument: doc }));
})
);
}
/**
* Insert a *new* component on to the document
* */
export function insertComponent(
site: string,
modelId: string,
fieldId: string,
targetIndex: string | number,
contentType: ContentType,
instance: ContentInstance,
path: string,
shared = false
): Observable<any> {
return performMutation(
site,
path,
(element) => {
const id = instance.craftercms.id;
const path = shared ? instance.craftercms.path ?? getComponentPath(id, instance.craftercms.contentTypeId) : null;
// Create the new `item` that holds or references (embedded vs shared) the component.
const newItem = createElement('item');
delete instance.fileName;
delete instance.internalName;
// Create the new component that will be either embedded into the parent's XML or
// shared stored on it's own.
const component = mergeContentDocumentProps('component', {
'@attributes': { id },
'content-type': contentType.id,
'display-template': contentType.displayTemplate,
// TODO: per this, at this point, internal-name is always cdata wrapped, not driven by config.
'internal-name': cdataWrap(instance.craftercms.label),
'file-name': `${id}.xml`,
objectId: id,
...reversePluckProps(instance, 'craftercms')
});
// Add the child elements into the `item` node
createElements(newItem, {
'@attributes': {
// TODO: Hardcoded value. Fix.
datasource: 'TODO',
...(shared ? {} : { inline: true })
},
key: shared ? path : id,
value: instance.craftercms.label,
...(shared
? {
include: path,
disableFlattening: 'false'
}
: {
component
})
});
insertCollectionItem(element, fieldId, targetIndex, newItem);
},
modelId
);
}
/**
* Insert a *existing* (i.e. shared) component on to the document
* */
export function insertInstance(
site: string,
modelId: string,
fieldId: string,
targetIndex: string | number,
instance: ContentInstance,
path: string,
datasource?: string
): Observable<any> {
return performMutation(
site,
path,
(element) => {
const path = instance.craftercms.path;
const newItem = createElement('item');
createElements(newItem, {
'@attributes': {
// TODO: Hardcoded value. Fix.
datasource: datasource ?? 'TODO'
},
key: path,
value: instance.craftercms.label,
include: path,
disableFlattening: 'false'
});
insertCollectionItem(element, fieldId, targetIndex, newItem);
},
modelId
);
}
export function insertItem(
site: string,
modelId: string,
fieldId: string,
index: string | number,
instance: InstanceRecord,
path: string
): Observable<any> {
return performMutation(
site,
path,
(element) => {
let node = extractNode(element, removeLastPiece(fieldId) || fieldId, index);
const newItem = createElement('item');
createElements(newItem, instance);
node.appendChild(newItem);
},
modelId
);
}
export function duplicateItem(
site: string,
modelId: string,
fieldId: string,
targetIndex: string | number,
path: string
): Observable<any> {
return performMutation(
site,
path,
(element) => {
// removing last piece to get the parent of the item
const field: Element = extractNode(
element,
removeLastPiece(fieldId) || fieldId,
removeLastPiece(`${targetIndex}`)
);
const item: Element = extractNode(element, fieldId, targetIndex).cloneNode(true) as Element;
updateItemId(item);
updateElementComponentsId(item);
field.appendChild(item);
},
modelId
);
}
export function sortItem(
site: string,
modelId: string,
fieldId: string,
currentIndex: number,
targetIndex: number,
path: string
): Observable<any> {
return performMutation(
site,
path,
(element) => {
const item = extractNode(element, fieldId, currentIndex);
insertCollectionItem(element, fieldId, targetIndex, item, currentIndex);
},
modelId
);
}
export function moveItem(
site: string,
originalModelId: string,
originalFieldId: string,
originalIndex: number,
targetModelId: string,
targetFieldId: string,
targetIndex: number,
originalParentPath: string,
targetParentPath: string
): Observable<any> {
// TODO Warning: cannot perform as transaction whilst the UI is the one to do all this.
// const isOriginalEmbedded = nnou(originalParentPath);
// const isTargetEmbedded = nnou(targetParentPath);
// When moving between inherited dropzone to other dropzone, the modelsIds will be different but in some cases the
// parentId will be null for both targets in that case we need to add a nnou validation to parentsModelId;
const isSameModel = originalModelId === targetModelId;
const isSameDocument = originalParentPath === targetParentPath;
if (isSameDocument || isSameModel) {
// Moving items between two fields of the same document or model...
return performMutation(site, originalParentPath, (element) => {
// Item may be moving...
// - from parent model to an embedded model
// - from an embedded model to the parent model
// - from an embedded model to another embedded model
// - from a field to another WITHIN the same model (parent or embedded)
const parentDocumentModelId = getInnerHtml(element.querySelector(':scope > objectId'));
const sourceModelElement =
parentDocumentModelId === originalModelId ? element : element.querySelector(`[id="${originalModelId}"]`);
const targetModelElement =
parentDocumentModelId === targetModelId ? element : element.querySelector(`[id="${targetModelId}"]`);
const item = extractNode(sourceModelElement, originalFieldId, originalIndex);
const targetField = extractNode(targetModelElement, targetFieldId, removeLastPiece(`${targetIndex}`));
const targetFieldItems = targetField.querySelectorAll(':scope > item');
const parsedTargetIndex = parseInt(popPiece(`${targetIndex}`));
if (targetFieldItems.length === parsedTargetIndex) {
targetField.appendChild(item);
} else {
targetField.insertBefore(item, targetFieldItems[parsedTargetIndex]);
}
});
} else {
let removedItemHTML: string;
return performMutation(
site,
originalParentPath,
(element) => {
const item: Element = extractNode(element, originalFieldId, originalIndex);
const field: Element = extractNode(element, originalFieldId, removeLastPiece(`${originalIndex}`));
removedItemHTML = item.outerHTML;
field.removeChild(item);
},
originalModelId
).pipe(
switchMap(() =>
performMutation(<|fim▁hole|> const item: Element = extractNode(element, targetFieldId, targetIndex);
const field: Element = extractNode(element, targetFieldId, removeLastPiece(`${targetIndex}`));
const auxElement = createElement('hold');
auxElement.innerHTML = removedItemHTML;
field.insertBefore(auxElement.querySelector(':scope > item'), item);
},
targetModelId
)
)
);
}
}
export function deleteItem(
site: string,
modelId: string,
fieldId: string,
indexToDelete: number | string,
path: string
): Observable<any> {
return performMutation(
site,
path,
(element) => {
let index = indexToDelete;
let fieldNode = element.querySelector(`:scope > ${fieldId}`);
if (typeof indexToDelete === 'string') {
index = parseInt(popPiece(indexToDelete));
// A fieldId can be in the form of `a.b`, which translates to `a > item > b` on the XML.
// In terms of index, since all it should ever arrive here is collection items,
// this assumes the index path points to the item itself, not the collection.
// By calling removeLastPiece(indexToDelete), we should get the collection node here.
fieldNode = extractNode(element, fieldId, removeLastPiece(`${indexToDelete}`));
}
const $fieldNode = $(fieldNode);
$fieldNode
.children()
.eq(index as number)
.remove();
if ($fieldNode.children().length === 0) {
// If the node isn't completely blank, the xml formatter won't do it's job in converting to a self-closing tag.
// Also, later on, when retrieved, some *legacy* functions would impaired as the deserializing into JSON had unexpected content
$fieldNode.html('');
}
},
modelId
);
}
// endregion
interface SearchServiceResponse {
response: ApiResponse;
result: {
total: number;
items: Array<{
lastModified: string;
lastModifier: string;
mimeType: string;
name: string;
path: string;
previewUrl: string;
size: number;
snippets: unknown;
}>;
facets: Array<{
date: boolean;
multiple: boolean;
name: string;
range: boolean;
values: Array<{
count: number;
from: number;
to: number;
}>;
}>;
};
}
export function fetchItemsByContentType(
site: string,
contentType: string,
contentTypesLookup: LookupTable<ContentType>,
options?: ComponentsContentTypeParams
): Observable<ContentInstancePage>;
export function fetchItemsByContentType(
site: string,
contentTypes: string[],
contentTypesLookup: LookupTable<ContentType>,
options?: ComponentsContentTypeParams
): Observable<ContentInstancePage>;
export function fetchItemsByContentType(
site: string,
contentTypes: string[] | string,
contentTypesLookup: LookupTable<ContentType>,
options?: ComponentsContentTypeParams
): Observable<ContentInstancePage> {
if (typeof contentTypes === 'string') {
contentTypes = [contentTypes];
}
return postJSON(`/studio/api/2/search/search.json?siteId=${site}`, {
...options,
filters: { 'content-type': contentTypes }
}).pipe(
map<AjaxResponse<SearchServiceResponse>, { count: number; paths: string[] }>(({ response }) => ({
count: response.result.total,
paths: response.result.items.map((item) => item.path)
})),
switchMap(({ paths, count }) =>
zip(
of(count),
paths.length
? forkJoin(
paths.reduce((array, path) => {
array.push(fetchContentInstance(site, path, contentTypesLookup));
return array;
}, []) as Array<Observable<ContentInstance>>
)
: of([])
)
),
map(([count, array]) => {
return {
count,
lookup: array.reduce(
(hash, contentInstance) => Object.assign(hash, { [contentInstance.craftercms.path]: contentInstance }),
{}
)
};
})
);
}
export function formatXML(site: string, path: string): Observable<boolean> {
return fetchContentDOM(site, path).pipe(
switchMap((doc) =>
post(
writeContentUrl({
site,
path: path,
unlock: 'true',
fileName: getInnerHtml(doc.querySelector(':scope > file-name'))
}),
serialize(doc)
)
),
mapTo(true)
);
}
interface LegacyContentDocumentProps {
'content-type': string;
'display-template': string;
'internal-name': string;
'file-name': string;
'merge-strategy': string;
createdDate_dt: string;
lastModifiedDate_dt: string;
objectId: string;
locale?: string;
placeInNav?: 'true' | 'false';
}
interface AnyObject {
[key: string]: any;
}
function updateItemId(item: Element): void {
const component = item.querySelector(':scope > component');
if (component) {
const key = item.querySelector(':scope > key');
const objectId = component.querySelector(':scope > objectId');
const fileName = component.querySelector(':scope > file-name');
const id = uuid();
component.id = id;
key.innerHTML = id;
fileName.innerHTML = `${id}.xml`;
objectId.innerHTML = id;
}
}
function updateElementComponentsId(element: Element): void {
element.querySelectorAll('item').forEach((item) => {
updateItemId(item);
});
}
function extractNode(doc: XMLDocument | Element, fieldId: string, index: string | number) {
const indexes = index === '' || nou(index) ? [] : `${index}`.split('.').map((i) => parseInt(i, 10));
let aux: Element = (doc as XMLDocument).documentElement ?? (doc as Element);
if (nou(index) || isBlank(`${index}`)) {
return aux.querySelector(`:scope > ${fieldId}`);
}
const fields = fieldId.split('.');
if (indexes.length > fields.length) {
// There's more indexes than fields
throw new Error(
'[content/extractNode] Path not handled: indexes.length > fields.length. Indexes ' +
`is ${indexes} and fields is ${fields}`
);
}
indexes.forEach((_index, i) => {
const field = fields[i];
aux = aux.querySelectorAll(`:scope > ${field} > item`)[_index];
});
if (indexes.length === fields.length) {
return aux;
} else if (indexes.length < fields.length) {
// There's one more field to use as there were less indexes
// than there were fields. For example: fieldId: `items_o.content_o`, index: 0
// At this point, aux would be `items_o[0]` and we need to extract `content_o`
const field = fields[fields.length - 1];
return aux.querySelector(`:scope > ${field}`);
}
}
function mergeContentDocumentProps(type: string, data: AnyObject): LegacyContentDocumentProps {
// Dasherized props...
// content-type, display-template, no-template-required, internal-name, file-name
// merge-strategy, folder-name, parent-descriptor
const now = data.lastModifiedDate_dt && data.createdDate_dt ? null : createModifiedDate();
const dateCreated = data.createdDate_dt ? data.createdDate_dt : now;
const dateModified = data.lastModifiedDate_dt ? data.lastModifiedDate_dt : now;
return Object.assign(
{
'content-type': '',
'display-template': '',
'internal-name': '',
'file-name': '',
'merge-strategy': 'inherit-levels',
createdDate_dt: dateCreated,
lastModifiedDate_dt: dateModified,
objectId: ''
},
type === 'page' ? { placeInNav: 'false' as 'false' } : {},
data || {}
);
}
function createModifiedDate() {
return new Date().toISOString();
}
function updateModifiedDateElement(doc: Element) {
doc.querySelector(':scope > lastModifiedDate_dt').innerHTML = createModifiedDate();
}
function getComponentPath(id: string, contentType: string) {
const pathBase = `/site/components/${contentType.replace('/component/', '')}s/`.replace(/\/{1,}$/m, '');
return `${pathBase}/${id}.xml`;
}
function insertCollectionItem(
element: Element,
fieldId: string,
targetIndex: string | number,
newItem: Node,
currentIndex?: number
): void {
let fieldNode = extractNode(element, fieldId, removeLastPiece(`${targetIndex}`));
let index = typeof targetIndex === 'string' ? parseInt(popPiece(targetIndex)) : targetIndex;
// If currentIndex it means the op is a 'sort', and the index(targetIndex) needs to plus 1 or no
if (nnou(currentIndex)) {
let currentIndexParsed = typeof currentIndex === 'string' ? parseInt(popPiece(currentIndex)) : currentIndex;
let targetIndexParsed = typeof targetIndex === 'string' ? parseInt(popPiece(targetIndex)) : targetIndex;
if (currentIndexParsed > targetIndexParsed) {
index = typeof targetIndex === 'string' ? parseInt(popPiece(targetIndex)) : targetIndex;
} else {
index = typeof targetIndex === 'string' ? parseInt(popPiece(targetIndex)) + 1 : targetIndex + 1;
}
}
if (nou(fieldNode)) {
fieldNode = createElement(fieldId);
fieldNode.setAttribute('item-list', 'true');
element.appendChild(fieldNode);
}
const itemList = fieldNode.querySelectorAll(`:scope > item`);
if (itemList.length === index) {
fieldNode.appendChild(newItem);
} else {
fieldNode.insertBefore(newItem, itemList[index]);
}
}
export function createFileUpload(
uploadUrl: string,
file: any,
path: string,
metaData: object,
xsrfArgumentName: string
): Observable<StandardAction> {
const qs = toQueryString({ [xsrfArgumentName]: getRequestForgeryToken() });
return new Observable((subscriber) => {
const uppy = new Core({ autoProceed: true });
uppy.use(XHRUpload, { endpoint: `${uploadUrl}${qs}`, headers: getGlobalHeaders() });
uppy.setMeta(metaData);
const blob = dataUriToBlob(file.dataUrl);
uppy.on('upload-success', (file, response) => {
subscriber.next({
type: 'complete',
payload: response
});
subscriber.complete();
});
uppy.on('upload-progress', (file, progress) => {
let type = 'progress';
subscriber.next({
type,
payload: {
file,
progress
}
});
});
uppy.on('upload-error', (file, error) => {
subscriber.error(error);
});
uppy.addFile({
name: file.name,
type: file.type,
data: blob
});
return () => {
uppy.cancelAll();
};
});
}
export function uploadDataUrl(
site: string,
file: any,
path: string,
xsrfArgumentName: string
): Observable<StandardAction> {
return createFileUpload(
'/studio/api/1/services/api/1/content/write-content.json',
file,
path,
{
site,
name: file.name,
type: file.type,
path
},
xsrfArgumentName
);
}
export function uploadToS3(
site: string,
file: any,
path: string,
profileId: string,
xsrfArgumentName: string
): Observable<StandardAction> {
return createFileUpload(
'/studio/api/2/aws/s3/upload.json',
file,
path,
{
name: file.name,
type: file.type,
siteId: site,
path,
profileId: profileId
},
xsrfArgumentName
);
}
export function uploadToWebDAV(
site: string,
file: any,
path: string,
profileId: string,
xsrfArgumentName: string
): Observable<StandardAction> {
return createFileUpload(
'/studio/api/2/webdav/upload',
file,
path,
{
name: file.name,
type: file.type,
siteId: site,
path,
profileId: profileId
},
xsrfArgumentName
);
}
export function uploadToCMIS(
site: string,
file: any,
path: string,
repositoryId: string,
xsrfArgumentName: string
): Observable<StandardAction> {
return createFileUpload(
'/studio/api/2/cmis/upload',
file,
path,
{
name: file.name,
type: file.type,
siteId: site,
cmisPath: path,
cmisRepoId: repositoryId
},
xsrfArgumentName
);
}
export function getBulkUploadUrl(site: string, path: string): string {
const qs = toQueryString({
site,
path,
contentType: 'folder',
createFolders: true,
draft: false,
duplicate: false,
unlock: true,
_csrf: getRequestForgeryToken()
});
return `/studio/api/1/services/api/1/content/write-content.json${qs}`;
}
export function fetchQuickCreateList(site: string): Observable<QuickCreateItem[]> {
return get(`/studio/api/2/content/list_quick_create_content.json?siteId=${site}`).pipe(pluck('response', 'items'));
}
export function fetchItemHistory(site: string, path: string): Observable<VersionsResponse> {
return get(
`/studio/api/1/services/api/1/content/get-item-versions.json?site=${site}&path=${encodeURIComponent(path)}`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
export function revertTo(site: string, path: string, versionNumber: string): Observable<Boolean> {
return get(
`/studio/api/1/services/api/1/content/revert-content.json?site=${site}&path=${encodeURIComponent(
path
)}&version=${versionNumber}`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
interface VersionDescriptor {
site: string;
path: string;
versionNumber: string;
content: ContentInstance;
}
export function fetchItemVersion(site: string, path: string, versionNumber: string): Observable<VersionDescriptor> {
return of({
site,
path,
versionNumber,
content: null
});
}
export function fetchVersions(
site: string,
path: string,
versionNumbers: [string, string],
contentTypes: LookupTable<ContentType>
): Observable<[VersionDescriptor, VersionDescriptor]> {
return of([
{
site,
path,
versionNumber: versionNumbers[0],
content: null
},
{
site,
path,
versionNumber: versionNumbers[1],
content: null
}
]);
}
export function fetchChildrenByPath(
siteId: string,
path: string,
options?: Partial<GetChildrenOptions>
): Observable<GetChildrenResponse> {
return postJSON('/studio/api/2/content/children_by_path', {
siteId,
path,
...options
}).pipe(
pluck('response'),
map(({ children, levelDescriptor, total, offset, limit }) =>
Object.assign(
children
? children.map((child) => ({
...child,
stateMap: createItemStateMap(child.state),
availableActionsMap: createItemActionMap(child.availableActions)
}))
: [],
{
...(levelDescriptor && {
levelDescriptor: {
...levelDescriptor,
stateMap: createItemStateMap(levelDescriptor.state),
availableActionsMap: createItemActionMap(levelDescriptor.availableActions)
}
}),
total,
offset,
limit
}
)
)
);
}
export function fetchChildrenByPaths(
siteId: string,
paths: LookupTable<Partial<GetChildrenOptions>>,
options?: Partial<GetChildrenOptions>
): Observable<LookupTable<GetChildrenResponse>> {
const requests = Object.keys(paths).map((path) =>
fetchChildrenByPath(siteId, path, { ...options, ...paths[path] }).pipe(
catchError((error: AjaxError) => {
if (error.status === 404) {
return of([]);
} else {
throw error;
}
})
)
);
return forkJoin(requests).pipe(
map((responses) => {
const data = {};
Object.keys(paths).forEach((path, i) => (data[path] = responses[i]));
return data;
})
);
}
export function fetchItemsByPath(siteId: string, paths: string[]): Observable<SandboxItem[]>;
export function fetchItemsByPath(
siteId: string,
paths: string[],
options: FetchItemsByPathOptions & { castAsDetailedItem: false }
): Observable<SandboxItem[]>;
export function fetchItemsByPath(
siteId: string,
paths: string[],
options: FetchItemsByPathOptions & { castAsDetailedItem: true }
): Observable<DetailedItem[]>;
export function fetchItemsByPath(
siteId: string,
paths: string[],
options: FetchItemsByPathOptions
): Observable<SandboxItem[]>;
export function fetchItemsByPath(
siteId: string,
paths: string[],
options?: FetchItemsByPathOptions
): Observable<SandboxItem[] | DetailedItem[]> {
const { castAsDetailedItem = false, preferContent = true } = options ?? {};
return postJSON('/studio/api/2/content/sandbox_items_by_path', { siteId, paths, preferContent }).pipe(
pluck('response', 'items'),
map(
(items: SandboxItem[]) =>
items.map((item) => ({
...(castAsDetailedItem ? parseSandBoxItemToDetailedItem(item) : item),
stateMap: createItemStateMap(item.state),
availableActionsMap: createItemActionMap(item.availableActions)
})) as SandboxItem[] | DetailedItem[]
)
);
}
export function fetchItemByPath(siteId: string, path: string): Observable<SandboxItem>;
export function fetchItemByPath(
siteId: string,
path: string,
options: FetchItemsByPathOptions & { castAsDetailedItem: false }
): Observable<SandboxItem>;
export function fetchItemByPath(
siteId: string,
path: string,
options: FetchItemsByPathOptions & { castAsDetailedItem: true }
): Observable<DetailedItem>;
export function fetchItemByPath(
siteId: string,
path: string,
options: FetchItemsByPathOptions
): Observable<SandboxItem>;
export function fetchItemByPath(
siteId: string,
path: string,
options?: FetchItemsByPathOptions
): Observable<SandboxItem | DetailedItem> {
return fetchItemsByPath(siteId, [path], options).pipe(
tap((items) => {
if (items[0] === void 0) {
// Fake out the 404 which the backend won't return for this bulk API
// eslint-disable-next-line no-throw-literal
throw {
name: 'AjaxError',
status: 404,
response: {
response: {
code: 7000,
message: 'Content not found',
remedialAction: `Check that path '${path}' is correct and it exists in site '${siteId}'`,
documentationUrl: ''
}
}
};
}
}),
pluck(0)
);
}
export function fetchItemWithChildrenByPath(
siteId: string,
path: string,
options?: Partial<GetChildrenOptions>
): Observable<GetItemWithChildrenResponse> {
return forkJoin({
item: fetchItemByPath(siteId, path, { castAsDetailedItem: true }),
children: fetchChildrenByPath(siteId, path, options)
});
}
export function paste(siteId: string, targetPath: string, clipboard: Clipboard): Observable<any> {
return postJSON('/studio/api/2/content/paste', {
siteId,
operation: clipboard.type,
targetPath,
item: getPasteItemFromPath(clipboard.sourcePath, clipboard.paths)
}).pipe(pluck('response'));
}
export function duplicate(siteId: string, path: string): Observable<any> {
return postJSON('/studio/api/2/content/duplicate', {
siteId,
path
}).pipe(pluck('response'));
}
export function deleteItems(
siteId: string,
items: string[],
comment: string,
optionalDependencies?: string[]
): Observable<boolean> {
return postJSON('/studio/api/2/content/delete', {
siteId,
items,
optionalDependencies,
comment
}).pipe(mapTo(true));
}
export function lock(siteId: string, path: string): Observable<boolean>;
export function lock(siteId: string, paths: string[]): Observable<boolean>;
export function lock(siteId: string, paths: string[] | string): Observable<boolean> {
return postJSON('/studio/api/2/content/items_lock_by_path', { siteId, paths: asArray(paths) }).pipe(mapTo(true));
}
export function unlock(siteId: string, path: string): Observable<boolean> {
return postJSON('/studio/api/2/content/item_unlock_by_path', { siteId, path }).pipe(mapTo(true));
}
export function fetchWorkflowAffectedItems(site: string, path: string): Observable<SandboxItem[]> {
return get(`/studio/api/2/workflow/affected_paths?siteId=${site}&path=${path}`).pipe(pluck('response', 'items'));
}
export function createFolder(site: string, path: string, name: string): Observable<unknown> {
return post(
`/studio/api/1/services/api/1/content/create-folder.json?site=${site}&path=${encodeURIComponent(
path
)}&name=${encodeURIComponent(name)}`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
export function createFile(site: string, path: string, fileName: string): Observable<unknown> {
return post(
`/studio/api/1/services/api/1/content/write-content.json?site=${site}&phase=onSave&path=${encodeURIComponent(
path
)}&fileName=${fileName}&unlock=true`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
export function renameFolder(site: string, path: string, name: string) {
return post(
`/studio/api/1/services/api/1/content/rename-folder.json?site=${site}&path=${encodeURIComponent(
path
)}&name=${encodeURIComponent(name)}`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
export function changeContentType(site: string, path: string, contentType: string): Observable<boolean> {
return post(
`/studio/api/1/services/api/1/content/change-content-type.json?site=${site}&path=${path}&contentType=${contentType}`
).pipe(pluck('response'), catchError(errorSelectorApi1));
}
export function checkPathExistence(site: string, path: string): Observable<boolean> {
return get(`/studio/api/1/services/api/1/content/content-exists.json?site_id=${site}&path=${path}`).pipe(
pluck('response', 'content'),
catchError(errorSelectorApi1)
);
}
export function fetchLegacyItem(site: string, path: string): Observable<LegacyItem> {
return get(
`/studio/api/1/services/api/1/content/get-item.json?site_id=${site}&path=${encodeURIComponent(path)}`
).pipe(pluck('response', 'item'), catchError(errorSelectorApi1));
}
export function fetchLegacyItemsTree(
site: string,
path: string,
options?: Partial<{ depth: number; order: string }>
): Observable<LegacyItem> {
return get(
`/studio/api/1/services/api/1/content/get-items-tree.json${toQueryString({
site_id: site,
path,
...options
})}`
).pipe(pluck('response', 'item'), catchError(errorSelectorApi1));
}
export function fetchContentByCommitId(site: string, path: string, commitId: string): Observable<string | Blob> {
return getBinary(
`/studio/api/2/content/get_content_by_commit_id?siteId=${site}&path=${path}&commitId=${commitId}`,
void 0,
'blob'
).pipe(
switchMap((ajax) => {
const blob = ajax.response;
const type = ajax.xhr.getResponseHeader('content-type');
if (
/^text\//.test(type) ||
/^application\/(x-httpd-php|rtf|xhtml\+xml|xml|json|ld\+json|javascript|x-groovy|x-sh)$/.test(type)
) {
return blob.text() as Promise<string>;
} else if (/^image\//.test(type)) {
return of(URL.createObjectURL(blob));
} else {
return of(blob);
}
})
);
}<|fim▁end|>
|
site,
targetParentPath,
(element) => {
|
<|file_name|>cms_plugins.py<|end_file_name|><|fim▁begin|>from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from django.template import loader
from django.template.loader import select_template
from django.utils.translation import ugettext_lazy as _
from . import models
from .conf import settings
from filer.models.imagemodels import Image
class FilerFolderPlugin(CMSPluginBase):
module = 'Filer'
model = models.FilerFolder
name = _("Folder")
TEMPLATE_NAME = 'cmsplugin_filer_folder/plugins/folder/%s.html'
render_template = TEMPLATE_NAME % 'default'
text_enabled = False
admin_preview = False
fieldsets = (
(None, {'fields': ['title', 'folder']}),
)
if settings.CMSPLUGIN_FILER_FOLDER_STYLE_CHOICES:
fieldsets[0][1]['fields'].append('style')
def get_folder_files(self, folder, user):
qs_files = folder.files.filter(image__isnull=True)
if user.is_staff:
return qs_files
else:
return qs_files.filter(is_public=True)
def get_folder_images(self, folder, user):
qs_files = folder.files.instance_of(Image)
if user.is_staff:
return qs_files
else:
return qs_files.filter(is_public=True)
def get_children(self, folder):
return folder.get_children()
def render(self, context, instance, placeholder):
self.render_template = select_template((
'cmsplugin_filer_folder/folder.html', # backwards compatibility. deprecated!
self.TEMPLATE_NAME % instance.style,
self.TEMPLATE_NAME % 'default')
).template
folder_files = self.get_folder_files(instance.folder,
context['request'].user)
folder_images = self.get_folder_images(instance.folder,
context['request'].user)
folder_folders = self.get_children(instance.folder)
context.update({
'object': instance,
'folder_files': sorted(folder_files),
'folder_images': sorted(folder_images),
'folder_folders': folder_folders,
'placeholder': placeholder
})
return context
<|fim▁hole|>
plugin_pool.register_plugin(FilerFolderPlugin)<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import logging as _logging
import sys
__author__ = 'luckydonald'
__all__ = ["logging", "ColoredFormatter", "ColoredStreamHandler", "LevelByNameFilter"]
DEFAULT_DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
class ColoredFormatter(_logging.Formatter):
class Color(object):
"""
utility to return ansi colored text.
just to store the colors next to the function.
"""
# Color codes: http://misc.flogisoft.com/bash/tip_colors_and_formatting
def __init__(self, formatter):
self.formatter = formatter
# end def
colors = {
'default': 39,
'black': 30,
'red': 31,
'green': 32,
'yellow': 33,
'blue': 34,
'magenta': 35,
'cyan': 36,
'white': 37,
'grey': 90,
'bgred': 41,
'bggrey': 100
}
mapping = {
'INFO': 'default',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'magenta',
'DEBUG': 'grey',
'SUCCESS': 'green'
}
color_prefix = '\033['
def prepare_color(self, color_number):
return ('%s%dm') % (self.color_prefix, color_number)
# end def
def colored(self, record):
"""
adsasd
"""
color = self.mapping.get(record.levelname, 'default')
clr = self.colors[color]
formatter = dict(
all_off=self.prepare_color(0), # Reset all attributes
color_on=self.prepare_color(clr), # Color as given/from lookup
color_off=self.prepare_color(39), # Default foreground color
inverse_on=self.prepare_color(7), # Reverse (invert the foreground and background colors)
inverse_off=self.prepare_color(27), # Reset reverse
background_off=self.prepare_color(49), # Default background color
file_color_on=self.prepare_color(94), # Light blue
)
lines = []
# log level
level = "{level:8}".format(level=record.levelname)
level_filler = "{:{}}".format("", len(level))
# file/function name
filepart = record.name if record.name else ""
filepart += "." + record.funcName if record.funcName != "<module>" else ""
# date
timestamp = " " + record.asctime if record.asctime else ""
timestamp_filler = " " * len(timestamp)
# Process / Thread names
process_thread_part = process_thread_part_filler = ""
has_process = hasattr(record, "processName") and record.processName != "MainProcess"
has_thread = hasattr(record, "threadName") and record.threadName != "MainThread"
if has_process:
process_thread_part += "{inverse_on}{file_color_on}{thread}{inverse_off}".format(
thread=record.processName, **formatter)
# end if
if has_process and has_thread:
process_thread_part += " ".format(**formatter)
# end if
if has_thread:
process_thread_part += "{inverse_on}{file_color_on}{process}{inverse_off}".format(
process=record.threadName, **formatter)
# end if
if has_process or has_thread and len(timestamp) > 1:
# inject the formatting here, as empty formatting without text would break
process_thread_part_filler = " " * len(process_thread_part)
process_thread_part = "{file_color_on}{inverse_on}{process_thread_part}{inverse_off}".format(
process_thread_part=process_thread_part, **formatter
)
# abuse {date} to contain a space for us. Because a blue colored space is still a space.
timestamp += " " # so the file don't immediatly follows after the date.
timestamp_filler += " "
# end if
# original message
lines_ = record.message.splitlines()
first_line = True if len(lines_) > 1 else None
for line in lines_:
if first_line is None: # single line
lines.append(
"{color_on}{inverse_on}{level}{inverse_off}{color_on}{date}{color_off}{file_color_on}{process_thread_part} {file_color_on}{filepart}:{color_off} {color_on}{message}{color_off}{background_off}{all_off}".format(
filepart=filepart, level=level, message=line, date=timestamp,
process_thread_part=process_thread_part, **formatter))
break
elif first_line: # first line
lines.append(
"{color_on}{inverse_on}{level}{inverse_off}{color_on}{date}{color_off}{file_color_on}{process_thread_part} {file_color_on}{filepart}:{color_off} {all_off}".format(
filepart=filepart, level=level, message=line, date=timestamp,
process_thread_part=process_thread_part, **formatter))
lines.append(
"{color_on}{inverse_on}{level_filler}{inverse_off}{color_off} {color_on}{message}{color_off}{background_off}{all_off}".format(
level_filler=level_filler, message=line, date=timestamp, date_filler=timestamp_filler,
process_thread_part=process_thread_part, process_thread_part_filler=process_thread_part_filler,
**formatter))
first_line = False
# end for
return "\n".join(lines)
# end def
def __init__(self, date_formatter=None):
super(ColoredFormatter, self).__init__(datefmt=date_formatter)
self.color_instance = self.Color(self)
def colored(self, record):
return self.color_instance.colored(record)
# end def
def format(self, record):
super(ColoredFormatter, self).format(record)
# if record.threadName == "MainThread":
# pass
# part1 = self.firstpart.format(record)
if self.usesTime():
record.asctime = self.formatTime(record, self.datefmt)
else:
record.asctime = ""
s = self._fmt % record.__dict__ # py3: s = self.formatMessage(record)
if record.exc_text:
if s[-1:] != "\n":
s += "\n"
try:
s = s + record.exc_text
except UnicodeError: # PYTHON 2, LOL!
# Sometimes filenames have non-ASCII chars, which can lead
# to errors when s is Unicode and record.exc_text is str
# See issue 8924.
# We also use replace for when there are multiple
# encodings, e.g. UTF-8 for the filesystem and latin-1
# for a script. See issue 13232.
s = s + record.exc_text.decode(sys.getfilesystemencoding(), 'replace')
if hasattr(record, "stack_info") and record.stack_info: # py2 doesn't have .stack_info
if s[-1:] != "\n":
s += "\n"
s = s + record.stack_info # py3: self.formatStack()
record.message = s
return self.colored(record)
# end def
def usesTime(self):
return bool(self.datefmt)
# end def
# end class
class ColoredStreamHandler(_logging.StreamHandler):
DEFAULT_DATE_FORMAT = DEFAULT_DATE_FORMAT
"""
Like the normal StreamHandler,
but it automatically sets
`self.formatter = ColoredFormatter()`
"""
def __init__(self, stream=None, date_formatter=DEFAULT_DATE_FORMAT):
super(ColoredStreamHandler, self).__init__(stream)
self.formatter = ColoredFormatter(date_formatter=date_formatter)
# noinspection PyProtectedMember,PyProtectedMember
class _LoggingWrapper(object):
SUCCESS = 25 # between WARNING and INFO
def __init__(self):
_logging.addLevelName(self.SUCCESS, 'SUCCESS')
def getLoglevelInt(self, level_string):
"""
You provide a String, and get a level int
:param level_string: The level.
:type level_string: str
:return: level
:rtype : int
:raises KeyError: if the level does not exists.
"""
if isinstance(level_string, int):
return level_string
# end if
try:
return {
# as names:
"NOTSET": _logging.NOTSET,
"DEBUG": _logging.DEBUG,
"INFO": _logging.INFO,
"SUCCESS": self.SUCCESS,
"WARNING": _logging.WARNING,
"WARN": _logging.WARN, # = WARNING
"ERROR": _logging.ERROR,<|fim▁hole|> except KeyError:
try:
return int(level_string)
except ValueError:
pass
# end try
raise # key not known, and is no integer either.
# end try
# end def
def __call__(self, logger_name):
"""
alias to logger.getLogger(logger_name)
:param logger_name:
:return: self.getLogger(logger_name)
"""
return self.getLogger(logger_name)
# end def
def add_colored_handler(
self, logger_name=None, stream=None, level=None, date_formatter=DEFAULT_DATE_FORMAT, filter=None,
):
"""
Register a logger handler to colorfull print the messages.
If stream is specified, the instance will use it for logging output; otherwise, sys.stdout will be used.
If you supply a date_formatter, there will also be printed a date/time for the logged messages.
Uses python `time.strftime` time formating, see https://docs.python.org/library/time.html#time.strftime
:keyword logger_name: the name of the logger you want to register the printing to.
Probably you should use __name__ , to use your package's logger,
"root" will force all loggers to output.
:type logger_name: str
:keyword stream: An output stream. Default: sys.stdout
:keyword date_formatter: Apply a format for time output. If `None` is given, no time gets printed.
Something like "%Y-%m-%d %H:%M:%S". Uses python `time.strftime` time formating,
see https://docs.python.org/library/time.html#time.strftime
:type date_formatter: str
:keyword filter: A filter applied to the handler.
:return: None
"""
logger = self.getLogger(logger_name) # __name__
if stream is None:
import sys
stream = sys.stdout
# end if
handler = ColoredStreamHandler(stream=stream, date_formatter=date_formatter)
if filter:
handler.addFilter(filter)
# end if
logger.addHandler(handler)
if level:
logger.setLevel(level)
# end if
return logger
# end def
def test_logger_levels(self, name=__name__, force_all_levels=True):
logger = self.getLogger(name)
logger_level = logger.getEffectiveLevel()
if force_all_levels:
logger.setLevel(logging.DEBUG)
logger.debug('level debug')
logger.info('level info')
logger.success('level success')
logger.warning('level warning')
logger.error('level error')
logger.critical('level critical')
if force_all_levels:
logger.setLevel(logger_level)
# end if
# end def
def getLogger(self, name=None):
"""
Adds the .success() function to the logger, else it is same as logger.getLogger()
:param logger: a logging.getLogger() logger.
:return:
"""
logger = _logging.getLogger(name)
logger.SUCCESS = self.SUCCESS
setattr(logger, "success", lambda message, *args: logger._log(self.SUCCESS, message, args))
return logger
if sys.version < "3":
def success(self, msg, *args, **kwargs):
"""
Log 'msg % args' with severity 'SUCCESS'.
To pass exception information, use the keyword argument exc_info with
a true value.
logger.debug("Houston, we landed in the %s", "moon", exc_info=False)
"""
self._success(msg, *args, **kwargs)
else:
from .py3 import success
def _success(self, msg, *args, **kwargs):
if len(self.root.handlers) == 0:
self.basicConfig()
self.root._log(self.SUCCESS, msg, args, **kwargs)
def __getattr__(self, item):
if item != "__getattr__":
if item in self.__dict__:
return self.__dict__[item]
if item == "getLogger":
return self.getLogger
elif item == "success":
return self.success
elif item == "SUCCESS":
return self.SUCCESS
# end if
pass
else:
return getattr(_logging, item)
# end def
# end class
logging = _LoggingWrapper()
class LevelByNameFilter(object):
def __init__(self, root=logging.WARNING, debug=None, info=None, success=None, warning=None, error=None,
critical=None, by_level=None):
"""
A filter where you specify logging levels bound to names (package names, as known from importing)
:param root: level the root should have to be logged. None to disable.
:param debug: all loggers which should log debug and above.
:param info: all loggers which should log info and above.
:param success: all loggers which should log success and above.
:param warning: all loggers which should log warning and above.
:param error: all loggers which should log error and above.
:param critical: all loggers which should log critical and above.
:param by_level: a dict with levels as a key, and names to log as value.
Example: {10: "__main__", 20: "a.b.c", 30: ["a.b.d", "a.b.e"], logging.WARNING: "a"}
"""
self.mapping = dict()
if root:
if isinstance(root, str):
root = logging.getLoglevelInt(root)
assert isinstance(root, int)
self.mapping[""] = root
# end
level = logging.DEBUG
self.parse_argument(debug, logging.DEBUG)
self.parse_argument(info, logging.INFO)
self.parse_argument(success, logging.SUCCESS)
self.parse_argument(warning, logging.WARNING)
self.parse_argument(error, logging.ERROR)
self.parse_argument(critical, logging.CRITICAL)
if by_level:
assert isinstance(by_level, dict)
for level, files in by_level.items():
self.parse_argument(files, level)
# end for
# end if
# end def
def parse_argument(self, argument, level):
if argument:
if isinstance(argument, tuple):
argument = list(argument)
if not isinstance(argument, list):
argument = [argument]
# end if
assert isinstance(argument, list)
for part in argument:
if isinstance(part, (list, tuple)):
argument.extend(part)
elif not isinstance(part, str):
raise TypeError("argument {val!r} is type {type}, should be str.".format(val=part, type=type(part)))
elif "," in part:
argument.append(part.split(","))
else:
self.mapping[part.strip() + "."] = level
# end if
# end for
# end if
# end def
def filter(self, record):
if not self.mapping:
return False # allow
# end if
name = record.name + "."
mapping_path = "" # default is "" = root
for k in self.mapping:
if name.startswith(k):
if len(mapping_path) < len(k): # we got a longer path. longer = more specific.
mapping_path = k
# end if
# end if
# end for
if mapping_path in self.mapping: # e.g. root "" is not specified.
level = self.mapping[mapping_path]
return record.levelno >= level
# end if
return False
# end def
# end class
# # Test code to get a threaded logger:
# from luckydonaldUtils.logger import logging;import threading; from time import sleep;
# def lel():
# logger.debug(threading.current_thread().name)
# logging.test_logger_levels(),logger.critical("littlepip is\nBEST\npony!")
# # end def
# logger = logging.add_colored_handler(level=logging.DEBUG, date_formatter="%Y-%m-%d %H:%M:%S");logging.add_colored_handler(level=logging.DEBUG); lel();sleep(1);thread=threading.Thread(target=lel);thread.start();thread.join()<|fim▁end|>
|
"FATAL": _logging.FATAL, # = CRITICAL
"CRITICAL": _logging.CRITICAL,
}[level_string]
|
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2
# -*- coding: utf-8 -*-
##############################################################################
#
# sci.AI EXE
# Copyright(C) 2017 sci.AI
#
# This program is free software: you can redistribute it and / or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY
# without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see < http://www.gnu.org/licenses/ >.
#
##############################################################################
from flask import Flask
from redis import Redis
from rq import Queue
import rq_dashboard
from flask_mongoengine import MongoEngine
from validator.config import Configuration
app = Flask(__name__)
app.config.from_object(Configuration)
<|fim▁hole|>queue = Queue('high', connection=redis_conn, default_timeout=1800)
from validator.routes import app_routes
app.register_blueprint(app_routes)
# RQ dashboards
app.config.from_object(rq_dashboard.default_settings)
app.register_blueprint(rq_dashboard.blueprint, url_prefix="/rq")<|fim▁end|>
|
db = MongoEngine(app)
redis_conn = Redis()
|
<|file_name|>core.py<|end_file_name|><|fim▁begin|># Copyright 2021 DeepMind Technologies Limited
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Abstract API specification for XManager implementations.
Each implementation of the XManager API should override the abstract methods.
Users are normally expected to have the following pair of imports:
```
from xmanager import xm
from xmanager import xm_foo
```
"""
import abc
import asyncio
from concurrent import futures
import enum
import getpass
import inspect
import queue
import threading
from typing import Any, Awaitable, Callable, Collection, Dict, List, Mapping, Optional, Sequence, overload
import attr
from xmanager.xm import async_packager
from xmanager.xm import id_predictor
from xmanager.xm import job_blocks
from xmanager.xm import job_operators
from xmanager.xm import metadata_context
from xmanager.xm import pattern_matching
def _check_if_unsupported_args_are_present(args: Mapping[str, Any],
supported_args: Collection[str],
job_type: str) -> None:
supported_args = set(supported_args)
unsupported_args = set(args.keys()) - supported_args
if unsupported_args:
raise ValueError(
f'Arguments {unsupported_args!r} are not supported by {job_type}. Only '
f'{supported_args!r} are allowed.')
def _apply_args_to_job(job: job_blocks.Job, args: Mapping[str, Any]) -> None:
"""Overrides job properties."""
_check_if_unsupported_args_are_present(args, ('args', 'env_vars'), 'xm.Job')
if 'args' in args:
job.args = job_blocks.merge_args(job.args, args['args'])
job.env_vars.update(args.get('env_vars', {}))
def _apply_args_to_job_group(job_group: job_blocks.JobGroup,
args: Mapping[str, Any]) -> None:
"""Recursively overrides job group properties."""
if args:
_check_if_unsupported_args_are_present(args, job_group.jobs.keys(),
'xm.JobGroup')
for key, job in job_group.jobs.items():
_apply_args(job, args.get(key, {}))
_apply_args = pattern_matching.match(
_apply_args_to_job, _apply_args_to_job_group,
pattern_matching.Case([job_blocks.JobGeneratorType, Any],
lambda other, args: None))
class ExperimentUnitStatus(abc.ABC):
"""The status of an experiment unit."""
@property
@abc.abstractmethod
def is_active(self) -> bool:
"""Returns whether the unit is not in terminal state.
It may be actively running or queued. The unit may produce more results.
If the unit is stopped by a user it will be neither active, completed
nor failed.
"""
raise NotImplementedError
@property
@abc.abstractmethod
def is_completed(self) -> bool:
"""Returns whether the unit has completed without failures.
This is a terminal state. The unit has produced all the intended results.
But it still may be restarted by an explicit request.
"""
raise NotImplementedError
@property
@abc.abstractmethod
def is_failed(self) -> bool:
"""Returns whether the unit has failed.
This is a terminal state. Experiment unit will enter this state on any
fatal failure, such as process exiting with non-zero code, cloud rejecting
to schedule/queue the job or exceptions in JobGenerator. The unit will stay
in this state unless explicitly restarted.
Intermediate failures do not result in this state.
"""
raise NotImplementedError
@property
@abc.abstractmethod
def message(self) -> str:
"""An optional human-readable message providing context for the status.
This may take the form of explaining why the work unit is in this state,
or any potentially transient errors the work unit may be experiencing.
"""
raise NotImplementedError
class ExperimentUnitError(RuntimeError):
"""Experiment unit could not be completed."""
class ExperimentUnitFailedError(ExperimentUnitError):
"""A job running in an experiment unit has failed."""
class ExperimentUnitNotCompletedError(ExperimentUnitError):
"""Experiment unit is neither running nor completed.
For example it may be stopped by a user.
"""
class NotFoundError(KeyError):
"""Experiment/Work Unit/etc. has not been found."""
def _work_unit_arguments(
job: job_blocks.JobType,
args: Optional[Mapping[str, Any]],
) -> Mapping[str, Any]:
"""Constructs work unit arguments to display them in various UIs.
If users pass `args` to the `.add` method explicitly, we assume `args` to be
the sought work unit arguments. If `args` are not passed to `.add`, we deduce
work unit arguments implicitly from the `job`s' `args` and `env_vars`.
Args:
job: A job to run inside a work unit.
args: Explicitly specified arguments (could be empty).
Returns:
Depending on the type of the `job` given, can be one of the following:
- if it's an instance of `Job`, we return `{'args': job.args, 'env_vars':
job.env_vars}` with empty values omitted;
- if it's an instance of `JobGroup`, we recursively unwind the group while
populating corresponding nested dictionaries until we reach standalone
`Job`s;
- if it's a job generator, we return `{}`.
"""
if args is not None:
# In order to give users control on what is shown as work unit arguments we
# don't alter them if a value is given.
return args
def deduce_args_for_job(job: job_blocks.Job) -> Dict[str, Any]:
args = {
'args': job.args.to_dict(kwargs_only=True),
'env_vars': job.env_vars
}
return {key: value for key, value in args.items() if value}
def deduce_args_for_job_group(group: job_blocks.JobGroup) -> Dict[str, Any]:
args = {}
for job_name, job in group.jobs.items():
job_args = deduce_args(job)
if job_args:
args[job_name] = job_args
return args
deduce_args = pattern_matching.match(
deduce_args_for_job, deduce_args_for_job_group,
pattern_matching.Case([job_blocks.JobGeneratorType],
lambda generator: {}))
return deduce_args(job)
class Importance(enum.Enum):
"""How important it is to schedule particular Experiment or ExperimentUnit.
This is a hint to the scheduler. Not all schedulers take it into account
(xm_local doesn't). And even with smart scheduler a less important work unit
may run before a more important one e.g. if it uses a less contended resource.
Unlike ServiceTier, importance only controls preference within a team i.e. how
team's resources are divided between team's experiments. It has no effect on
resource allocation between teams.
"""
# High impact experiments. Try scheduling them even at the cost of significant
# reduction of the overall throughput that your experiments get.
HIGH = 'high'
# The default importance.
NORMAL = 'normal'
# Prefer to schedule other experiments with higher importance, but in overall
# try to maximize throughput.
LOW = 'low'
@attr.s(auto_attribs=True, kw_only=True)
class ExperimentUnitRole(abc.ABC):
"""The role of an experiment unit within the experiment structure.
Attributes:
importance: how important it is to schedule this executable unit comparing
to all your executable units (from all your experiments).
"""
importance: Importance = Importance.NORMAL
class ExperimentUnit(abc.ABC):
"""ExperimentUnit is a collection of semantically associated `Job`s."""
experiment: 'Experiment'
def __init__(self, experiment: 'Experiment',
create_task: Callable[[Awaitable[Any]], futures.Future],
args: Optional[Mapping[str,
Any]], role: ExperimentUnitRole) -> None:
"""Initializes an `ExperimentUnit` instance.
Args:
experiment: An experiment this unit belongs to.
create_task: A callback to register a new asynchronous task.
args: Arguments to this experiment unit. Most commonly used to represent
the hyperparameter sweep trial corresponding to a work unit.
role: The role of this unit in the experiment structure.
"""
self.experiment = experiment
self._create_task = create_task
self._args = args
self._role = role
self._launch_tasks: List[futures.Future] = []
@property
def experiment_id(self) -> int:
"""Returns a unique ID assigned to the experiment."""
return self.experiment.experiment_id
def add(self,
job: job_blocks.JobType,
args: Optional[Mapping[str, Any]] = None) -> Awaitable[None]:
# pyformat: disable
"""Adds a Job / JobGroup to the experiment unit.
Only one JobGroup can be added to an ExperimentUnit. This limitation may be
lifted in future versions.
Args:
job: A job or job group to add.
args: Keyword arguments to be passed to the job. For Job and JobGroup args
are recursively expanded. For example,
```
wu.add(
JobGroup(agent=Job(...)),
args={'agent': {'args': {'learning_rate': 0.1}}},
)
```
would update `args` field of a job `agent` in the group.
Returns:
An awaitable that would be fulfilled when the job is launched.
"""
# pyformat: enable
job = job_operators.shallow_copy_job_type(job)
if args is not None:
_apply_args(job, args)
job_operators.populate_job_names(job)
def launch_job(job: job_blocks.Job) -> Awaitable[None]:
return self._launch_job_group(
job_blocks.JobGroup(**{job.name: job}),<|fim▁hole|> def launch_job_group(group: job_blocks.JobGroup) -> Awaitable[None]:
return self._launch_job_group(group,
_work_unit_arguments(group, self._args))
def launch_job_generator(
job_generator: job_blocks.JobGeneratorType) -> Awaitable[None]:
if (not inspect.iscoroutinefunction(job_generator) and
not inspect.iscoroutinefunction(job_generator.__call__)):
raise ValueError(
'Job generator must be an async function. Signature needs to be '
'`async def job_generator(work_unit: xm.WorkUnit):`')
return job_generator(self, **(args or {}))
job_awaitable = pattern_matching.match(launch_job, launch_job_group,
launch_job_generator)(
job)
launch_task = self._create_task(job_awaitable)
self._launch_tasks.append(launch_task)
return asyncio.wrap_future(launch_task)
async def wait_until_complete(self) -> 'ExperimentUnit':
"""Waits until the unit is in a final state: completed/failed/stopped.
Raises:
ExperimentUnitError: Exception if the unit couldn't complete.
Returns:
Returns self to facilitate asyncio.as_completed usage.
"""
try:
for task in self._launch_tasks:
await asyncio.wrap_future(task)
except Exception as e:
raise ExperimentUnitError('Experiment unit could not be created.') from e
await self._wait_until_complete()
return self
async def _launch_job_group(self, job_group: job_blocks.JobGroup,
args_view: Mapping[str, Any]) -> None:
"""Launches a given job group as part of the unit."""
raise NotImplementedError
async def _wait_until_complete(self) -> None:
"""Waits until the unit is in a final state: completed/failed/stopped.
Child classes need to implement this method to support awaiting units.
Unlike wait_until_complete this method asumes that unit has been fully
created. This method is only invoked if somebody has requested to monitor
unit.
"""
raise NotImplementedError
def stop(self) -> None:
"""Initiate the process to stop the unit from running.
This method will synchronously make a request for the unit to stop.
However, the method does not actually wait for the unit to be in a
terminal state.
Use self.wait_until_complete() after self.stop() to guarantee the unit
is stopped.
"""
raise NotImplementedError
def get_status(self) -> ExperimentUnitStatus:
"""Gets the status of this unit."""
raise NotImplementedError
@property
@abc.abstractmethod
def experiment_unit_name(self) -> str:
raise NotImplementedError
def get_full_job_name(self, job_name: str) -> str:
"""Given `Job.name` constructs its full name.
The primary use case is addressing containers -- full names serve as
hostnames.
Args:
job_name: Short name of a job.
Returns:
Full name of the job.
"""
return f'{self.experiment_unit_name}_{job_name}'
@property
def context(self) -> metadata_context.MetadataContext:
"""Returns metadata context for a unit."""
return metadata_context.MetadataContext(
creator=getpass.getuser(),
annotations=metadata_context.ContextAnnotations())
@attr.s(auto_attribs=True, kw_only=True)
class WorkUnitRole(ExperimentUnitRole):
"""An experiment unit with this role is a work unit.
Work units contain jobs that are often run as trials as part of an
experiment's hyper-parameter search. The status of a work unit is used to
determine the status of the experiment.
"""
class WorkUnit(ExperimentUnit):
"""Work units are experiment units with the work unit role."""
@property
@abc.abstractmethod
def work_unit_id(self) -> int:
raise NotImplementedError
async def wait_until_complete(self) -> 'WorkUnit':
"""Waits until the unit is in a final state: completed/failed/stopped.
Raises:
ExperimentUnitError: Exception if the unit couldn't complete.
Returns:
Returns self to facilitate asyncio.as_completed usage.
"""
await super().wait_until_complete()
return self
@attr.s(auto_attribs=True, kw_only=True)
class AuxiliaryUnitRole(ExperimentUnitRole):
"""An experiment unit with this role is an auxiliary unit.
Auxiliary units contain jobs that are not part of the trials of a
hyper-parameter search. The status of an auxiliary unit is not used to
determine the status of the experiment. e.g. Tensorboard
Attributes:
termination_delay_secs: How long to keep AUX unit running after experiment
completion.
"""
termination_delay_secs: int
class AuxiliaryUnitJob(abc.ABC):
"""A job bundled with an AuxiliaryUnitRole.
This class allows libraries to define self-contained objects which would
result in AUX units once added to the expetiment.
Note that this class conforms to xm.JobGenerator interface.
"""
role: AuxiliaryUnitRole
_job: job_blocks.JobType
def __init__(self,
job: job_blocks.JobType,
*,
importance: Importance = Importance.NORMAL,
termination_delay_secs: int) -> None:
self.role = AuxiliaryUnitRole(
importance=importance,
termination_delay_secs=termination_delay_secs,
)
self._job = job
async def __call__(self, aux_unit: ExperimentUnit, **kwargs):
async def launch_generator(
job_generator: job_blocks.JobGeneratorType) -> None:
await job_generator(aux_unit, **kwargs)
async def launch_job(job: Any) -> None:
aux_unit.add(job, args=kwargs)
await pattern_matching.async_match(launch_generator, launch_job)(self._job)
class Experiment(abc.ABC):
"""Experiment contains a family of jobs run on the same snapshot of code.
Experiment also implements the behavior of how to add and execute jobs.
Attempting to add jobs that contain Executables with unsupported types will
fail.
"""
# An event loop in which job generators would be run.
_event_loop: asyncio.AbstractEventLoop
# A queue of background tasks that launch work units.
_running_tasks: queue.Queue
# Work unit ID predictor.
_work_unit_id_predictor: id_predictor.Predictor
# A class variable for batching packaging requests.
_async_packager: async_packager.AsyncPackager
@property
def experiment_id(self) -> int:
"""Returns a unique ID assigned to the experiment."""
raise NotImplementedError
def __enter__(self):
if asyncio.get_event_loop().is_running():
raise RuntimeError('When using Experiment from a coroutine please use '
'`async with` syntax')
self._event_loop = asyncio.new_event_loop()
asyncio.get_child_watcher().attach_loop(self._event_loop)
self._event_loop_thread = threading.Thread(
target=self._event_loop.run_forever, daemon=True)
self._event_loop_thread.start()
# asyncio.run_coroutine_threadsafe doesn't accept class method and wants it
# wrapped in a function.
async def async_enter():
await self.__aenter__()
asyncio.run_coroutine_threadsafe(
async_enter(), loop=self._event_loop).result()
return self
def _wait_for_tasks(self):
while not self._running_tasks.empty():
self._running_tasks.get_nowait().result()
def __exit__(self, exc_type, exc_value, traceback):
self._wait_for_tasks()
self._event_loop.call_soon_threadsafe(self._event_loop.stop)
self._event_loop_thread.join()
async def __aenter__(self):
self._event_loop = asyncio.get_event_loop()
self._running_tasks = queue.Queue()
self._work_unit_id_predictor = id_predictor.Predictor(1 +
self.work_unit_count)
return self
async def _await_for_tasks(self):
while not self._running_tasks.empty():
await asyncio.wrap_future(self._running_tasks.get_nowait())
async def __aexit__(self, exc_type, exc_value, traceback):
await self._await_for_tasks()
@classmethod
def package(
cls, packageables: Sequence[job_blocks.Packageable] = ()
) -> Sequence[job_blocks.Executable]:
"""Packages `packageables` & triggers async packaging.
This function has 2 usages:
- Builds all given executables specs in parallel. While calling package(...)
multiple times is allowed, that would result in slow sequential build,
even if invoked from concurrent threads.
- Triggers packaging of the items enqueued previously with `package_async`.
Args:
packageables: A sequence of extra packageables to build synchronously.
Returns:
A sequence of packaging results associated to `packageables` (same order).
"""
return cls._async_packager.package(packageables)
@classmethod
def package_async(
cls,
packageable: job_blocks.Packageable) -> Awaitable[job_blocks.Executable]:
"""Queues executable spec to be packaged into executable.
If gathering all packageables for a single `package()` call is inconvenient,
one may request packaging with `package_async` and later trigger the build
for the whole batch with `package()`.
Usage:
if eval:
eval_executable = experiment.package_async(xm.blaze_binary(...))
if train:
train_executable = experiment.package_async(xm.blaze_binary(...))
experiment.package() # Explicitly trigger packaging.
jobs = {}
if eval:
jobs['eval'] = xm.job(await eval_executable, ...)
if train:
jobs['train'] = xm.job(await train_executable, ...)
Args:
packageable: Executable spec to package.
Returns:
An awaitable for the packaging result.
"""
return cls._async_packager.add(packageable)
@overload
def add(self,
job: AuxiliaryUnitJob,
args: Optional[Mapping[str, Any]] = ...) -> Awaitable[ExperimentUnit]:
...
@overload
def add(self,
job: job_blocks.JobType,
args: Optional[Mapping[str, Any]] = ...,
role: WorkUnitRole = ...) -> Awaitable[WorkUnit]:
...
@overload
def add(self, job: job_blocks.JobType, args: Optional[Mapping[str, Any]],
role: ExperimentUnitRole) -> Awaitable[ExperimentUnit]:
...
@overload
def add(
self,
job: job_blocks.JobType,
args: Optional[Mapping[str, Any]] = ...,
*, # parameters after “*” are keyword-only parameters
role: ExperimentUnitRole
) -> Awaitable[ExperimentUnit]:
...
# The ExecutableUnit return type is determined by the role.
def add(self, job, args=None, role=WorkUnitRole()):
# pyformat: disable
"""Adds a Job / JobGroup to the experiment.
A new Experiment Unit is created to run the job.
Args:
job: A Job or JobGroup to add.
args: Keyword arguments to be passed to the job. For Job and JobGroup args
are recursively expanded. For example,
```
wu.add(
JobGroup(agent=Job(...)),
args={'agent': {'args': {'learning_rate': 0.1}}},
)
```
would update `args` field of a job `agent` in the group.
role: The role of this unit in the experiment structure.
Returns:
An awaitable that would be fulfilled when the job is launched.
"""
# pyformat: enable
role = pattern_matching.match(
pattern_matching.Case([AuxiliaryUnitJob], lambda job: job.role),
pattern_matching.Case([Any], lambda job: role),
)(
job)
experiment_unit_future = self._create_experiment_unit(args, role)
async def launch():
experiment_unit = await experiment_unit_future
await experiment_unit.add(job, args)
return experiment_unit
return asyncio.wrap_future(self._create_task(launch()))
@abc.abstractmethod
def _create_experiment_unit(
self, args: Optional[Mapping[str, Any]],
role: ExperimentUnitRole) -> Awaitable[ExperimentUnit]:
"""Creates a new experiment unit.
Synchronously starts the experiment unit creation, ensuring that IDs would
be assigned in invocation order. The operation itself may run asynchronously
in background.
Args:
args: Executable unit arguments, to be show as a part of hyper-parameter
sweep.
role: Executable unit role: whether to create a work or auxiliary unit.
Returns:
An awaitable to the creation result.
"""
raise NotImplementedError
def _create_task(self, task: Awaitable[Any]) -> futures.Future:
future = asyncio.run_coroutine_threadsafe(task, loop=self._event_loop)
self._running_tasks.put_nowait(future)
return future
@property
def work_unit_count(self) -> int:
"""Returns how many work units the experiment has."""
raise NotImplementedError
@abc.abstractmethod
def work_units(self) -> Mapping[int, WorkUnit]:
"""Returns a mapping from work_unit_id to an instance of the work unit."""
raise NotImplementedError
@property
def context(self) -> metadata_context.MetadataContext:
"""Returns metadata context for the experiment."""
return metadata_context.MetadataContext(
creator=getpass.getuser(),
annotations=metadata_context.ContextAnnotations())
@abc.abstractmethod
def create_experiment(experiment_title: Optional[str] = None) -> Experiment:
"""Returns a concrete Experiment instance."""
raise NotImplementedError
@abc.abstractmethod
def get_experiment(experiment_id: int) -> Experiment:
"""Returns an Experiment instance associated with this experiment id.
Args:
experiment_id: An ID of an experiment to get.
Raises:
NotFoundError: If experiment is not found.
"""
raise NotImplementedError<|fim▁end|>
|
_work_unit_arguments(job, self._args))
|
<|file_name|>metric_service_client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
#
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.monitoring.v3 MetricService API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.client_options
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.gapic_v1.routing_header
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.api import metric_pb2 as api_metric_pb2
from google.api import monitored_resource_pb2
from google.cloud.monitoring_v3.gapic import enums
from google.cloud.monitoring_v3.gapic import metric_service_client_config
from google.cloud.monitoring_v3.gapic.transports import metric_service_grpc_transport
from google.cloud.monitoring_v3.proto import alert_pb2
from google.cloud.monitoring_v3.proto import alert_service_pb2
from google.cloud.monitoring_v3.proto import alert_service_pb2_grpc
from google.cloud.monitoring_v3.proto import common_pb2
from google.cloud.monitoring_v3.proto import group_pb2
from google.cloud.monitoring_v3.proto import group_service_pb2
from google.cloud.monitoring_v3.proto import group_service_pb2_grpc
from google.cloud.monitoring_v3.proto import metric_pb2 as proto_metric_pb2
from google.cloud.monitoring_v3.proto import metric_service_pb2
from google.cloud.monitoring_v3.proto import metric_service_pb2_grpc
from google.protobuf import empty_pb2
from google.protobuf import field_mask_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
"google-cloud-monitoring"
).version
class MetricServiceClient(object):
"""
Manages metric descriptors, monitored resource descriptors, and
time series data.
"""
SERVICE_ADDRESS = "monitoring.googleapis.com:443"
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = "google.monitoring.v3.MetricService"
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials<|fim▁hole|> file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MetricServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def metric_descriptor_path(cls, project, metric_descriptor):
"""Return a fully-qualified metric_descriptor string."""
return google.api_core.path_template.expand(
"projects/{project}/metricDescriptors/{metric_descriptor=**}",
project=project,
metric_descriptor=metric_descriptor,
)
@classmethod
def monitored_resource_descriptor_path(cls, project, monitored_resource_descriptor):
"""Return a fully-qualified monitored_resource_descriptor string."""
return google.api_core.path_template.expand(
"projects/{project}/monitoredResourceDescriptors/{monitored_resource_descriptor}",
project=project,
monitored_resource_descriptor=monitored_resource_descriptor,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
"projects/{project}", project=project
)
def __init__(
self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None,
client_options=None,
):
"""Constructor.
Args:
transport (Union[~.MetricServiceGrpcTransport,
Callable[[~.Credentials, type], ~.MetricServiceGrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
client_options (Union[dict, google.api_core.client_options.ClientOptions]):
Client options used to set user options on the client. API Endpoint
should be set through client_options.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
"The `client_config` argument is deprecated.",
PendingDeprecationWarning,
stacklevel=2,
)
else:
client_config = metric_service_client_config.config
if channel:
warnings.warn(
"The `channel` argument is deprecated; use " "`transport` instead.",
PendingDeprecationWarning,
stacklevel=2,
)
api_endpoint = self.SERVICE_ADDRESS
if client_options:
if type(client_options) == dict:
client_options = google.api_core.client_options.from_dict(
client_options
)
if client_options.api_endpoint:
api_endpoint = client_options.api_endpoint
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=metric_service_grpc_transport.MetricServiceGrpcTransport,
address=api_endpoint,
)
else:
if credentials:
raise ValueError(
"Received both a transport instance and "
"credentials; these are mutually exclusive."
)
self.transport = transport
else:
self.transport = metric_service_grpc_transport.MetricServiceGrpcTransport(
address=api_endpoint, channel=channel, credentials=credentials
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION
)
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config["interfaces"][self._INTERFACE_NAME]
)
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def list_monitored_resource_descriptors(
self,
name,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists monitored resource descriptors that match a filter. This method does
not require a Stackdriver account.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_monitored_resource_descriptors(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_monitored_resource_descriptors(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project on which to execute the request. The format is
``"projects/{project_id_or_number}"``.
filter_ (str): An optional
`filter <https://cloud.google.com/monitoring/api/v3/filters>`__
describing the descriptors to be returned. The filter can reference the
descriptor's type and labels. For example, the following filter returns
only Google Compute Engine descriptors that have an ``id`` label:
::
resource.type = starts_with("gce_") AND resource.label:id
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.monitoring_v3.types.MonitoredResourceDescriptor` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_monitored_resource_descriptors" not in self._inner_api_calls:
self._inner_api_calls[
"list_monitored_resource_descriptors"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_monitored_resource_descriptors,
default_retry=self._method_configs[
"ListMonitoredResourceDescriptors"
].retry,
default_timeout=self._method_configs[
"ListMonitoredResourceDescriptors"
].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.ListMonitoredResourceDescriptorsRequest(
name=name, filter=filter_, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_monitored_resource_descriptors"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="resource_descriptors",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_monitored_resource_descriptor(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a single monitored resource descriptor. This method does not require a
Stackdriver account.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.monitored_resource_descriptor_path('[PROJECT]', '[MONITORED_RESOURCE_DESCRIPTOR]')
>>>
>>> response = client.get_monitored_resource_descriptor(name)
Args:
name (str): The monitored resource descriptor to get. The format is
``"projects/{project_id_or_number}/monitoredResourceDescriptors/{resource_type}"``.
The ``{resource_type}`` is a predefined type, such as
``cloudsql_database``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.MonitoredResourceDescriptor` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "get_monitored_resource_descriptor" not in self._inner_api_calls:
self._inner_api_calls[
"get_monitored_resource_descriptor"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_monitored_resource_descriptor,
default_retry=self._method_configs[
"GetMonitoredResourceDescriptor"
].retry,
default_timeout=self._method_configs[
"GetMonitoredResourceDescriptor"
].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.GetMonitoredResourceDescriptorRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_monitored_resource_descriptor"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_metric_descriptors(
self,
name,
filter_=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists metric descriptors that match a filter. This method does not require
a Stackdriver account.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_metric_descriptors(name):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_metric_descriptors(name).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project on which to execute the request. The format is
``"projects/{project_id_or_number}"``.
filter_ (str): If this field is empty, all custom and system-defined metric descriptors
are returned. Otherwise, the
`filter <https://cloud.google.com/monitoring/api/v3/filters>`__
specifies which metric descriptors are to be returned. For example, the
following filter matches all `custom
metrics <https://cloud.google.com/monitoring/custom-metrics>`__:
::
metric.type = starts_with("custom.googleapis.com/")
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_metric_descriptors" not in self._inner_api_calls:
self._inner_api_calls[
"list_metric_descriptors"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_metric_descriptors,
default_retry=self._method_configs["ListMetricDescriptors"].retry,
default_timeout=self._method_configs["ListMetricDescriptors"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.ListMetricDescriptorsRequest(
name=name, filter=filter_, page_size=page_size
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_metric_descriptors"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="metric_descriptors",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def get_metric_descriptor(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Gets a single metric descriptor. This method does not require a Stackdriver
account.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]')
>>>
>>> response = client.get_metric_descriptor(name)
Args:
name (str): The metric descriptor on which to execute the request. The format is
``"projects/{project_id_or_number}/metricDescriptors/{metric_id}"``. An
example value of ``{metric_id}`` is
``"compute.googleapis.com/instance/disk/read_bytes_count"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "get_metric_descriptor" not in self._inner_api_calls:
self._inner_api_calls[
"get_metric_descriptor"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.get_metric_descriptor,
default_retry=self._method_configs["GetMetricDescriptor"].retry,
default_timeout=self._method_configs["GetMetricDescriptor"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.GetMetricDescriptorRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["get_metric_descriptor"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def create_metric_descriptor(
self,
name,
metric_descriptor,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates a new metric descriptor. User-created metric descriptors define
`custom metrics <https://cloud.google.com/monitoring/custom-metrics>`__.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `metric_descriptor`:
>>> metric_descriptor = {}
>>>
>>> response = client.create_metric_descriptor(name, metric_descriptor)
Args:
name (str): The project on which to execute the request. The format is
``"projects/{project_id_or_number}"``.
metric_descriptor (Union[dict, ~google.cloud.monitoring_v3.types.MetricDescriptor]): The new `custom
metric <https://cloud.google.com/monitoring/custom-metrics>`__
descriptor.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.MetricDescriptor`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.monitoring_v3.types.MetricDescriptor` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "create_metric_descriptor" not in self._inner_api_calls:
self._inner_api_calls[
"create_metric_descriptor"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_metric_descriptor,
default_retry=self._method_configs["CreateMetricDescriptor"].retry,
default_timeout=self._method_configs["CreateMetricDescriptor"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.CreateMetricDescriptorRequest(
name=name, metric_descriptor=metric_descriptor
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
return self._inner_api_calls["create_metric_descriptor"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def delete_metric_descriptor(
self,
name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Deletes a metric descriptor. Only user-created `custom
metrics <https://cloud.google.com/monitoring/custom-metrics>`__ can be
deleted.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.metric_descriptor_path('[PROJECT]', '[METRIC_DESCRIPTOR]')
>>>
>>> client.delete_metric_descriptor(name)
Args:
name (str): The metric descriptor on which to execute the request. The format is
``"projects/{project_id_or_number}/metricDescriptors/{metric_id}"``. An
example of ``{metric_id}`` is:
``"custom.googleapis.com/my_test_metric"``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "delete_metric_descriptor" not in self._inner_api_calls:
self._inner_api_calls[
"delete_metric_descriptor"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_metric_descriptor,
default_retry=self._method_configs["DeleteMetricDescriptor"].retry,
default_timeout=self._method_configs["DeleteMetricDescriptor"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.DeleteMetricDescriptorRequest(name=name)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["delete_metric_descriptor"](
request, retry=retry, timeout=timeout, metadata=metadata
)
def list_time_series(
self,
name,
filter_,
interval,
view,
aggregation=None,
order_by=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Lists time series that match a filter. This method does not require a
Stackdriver account.
Example:
>>> from google.cloud import monitoring_v3
>>> from google.cloud.monitoring_v3 import enums
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `filter_`:
>>> filter_ = ''
>>>
>>> # TODO: Initialize `interval`:
>>> interval = {}
>>>
>>> # TODO: Initialize `view`:
>>> view = enums.ListTimeSeriesRequest.TimeSeriesView.FULL
>>>
>>> # Iterate over all results
>>> for element in client.list_time_series(name, filter_, interval, view):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_time_series(name, filter_, interval, view).pages:
... for element in page:
... # process element
... pass
Args:
name (str): The project on which to execute the request. The format is
"projects/{project\_id\_or\_number}".
filter_ (str): A `monitoring
filter <https://cloud.google.com/monitoring/api/v3/filters>`__ that
specifies which time series should be returned. The filter must specify
a single metric type, and can additionally specify metric labels and
other information. For example:
::
metric.type = "compute.googleapis.com/instance/cpu/usage_time" AND
metric.label.instance_name = "my-instance-name"
interval (Union[dict, ~google.cloud.monitoring_v3.types.TimeInterval]): The time interval for which results should be returned. Only time series
that contain data points in the specified interval are included
in the response.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.TimeInterval`
view (~google.cloud.monitoring_v3.types.TimeSeriesView): Specifies which information is returned about the time series.
aggregation (Union[dict, ~google.cloud.monitoring_v3.types.Aggregation]): By default, the raw time series data is returned.
Use this field to combine multiple time series for different
views of the data.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.Aggregation`
order_by (str): Unsupported: must be left blank. The points in each time series are
returned in reverse time order.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.api_core.page_iterator.PageIterator` instance.
An iterable of :class:`~google.cloud.monitoring_v3.types.TimeSeries` instances.
You can also iterate over the pages of the response
using its `pages` property.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "list_time_series" not in self._inner_api_calls:
self._inner_api_calls[
"list_time_series"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_time_series,
default_retry=self._method_configs["ListTimeSeries"].retry,
default_timeout=self._method_configs["ListTimeSeries"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.ListTimeSeriesRequest(
name=name,
filter=filter_,
interval=interval,
view=view,
aggregation=aggregation,
order_by=order_by,
page_size=page_size,
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls["list_time_series"],
retry=retry,
timeout=timeout,
metadata=metadata,
),
request=request,
items_field="time_series",
request_token_field="page_token",
response_token_field="next_page_token",
)
return iterator
def create_time_series(
self,
name,
time_series,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
"""
Creates or adds data to one or more time series.
The response is empty if all time series in the request were written.
If any time series could not be written, a corresponding failure message is
included in the error response.
Example:
>>> from google.cloud import monitoring_v3
>>>
>>> client = monitoring_v3.MetricServiceClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `time_series`:
>>> time_series = []
>>>
>>> client.create_time_series(name, time_series)
Args:
name (str): The project on which to execute the request. The format is
``"projects/{project_id_or_number}"``.
time_series (list[Union[dict, ~google.cloud.monitoring_v3.types.TimeSeries]]): The new data to be added to a list of time series. Adds at most one data
point to each of several time series. The new data point must be more
recent than any other point in its time series. Each ``TimeSeries``
value must fully specify a unique time series by supplying all label
values for the metric and the monitored resource.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.monitoring_v3.types.TimeSeries`
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will
be retried using a default configuration.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
if metadata is None:
metadata = []
metadata = list(metadata)
# Wrap the transport method to add retry and timeout logic.
if "create_time_series" not in self._inner_api_calls:
self._inner_api_calls[
"create_time_series"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.create_time_series,
default_retry=self._method_configs["CreateTimeSeries"].retry,
default_timeout=self._method_configs["CreateTimeSeries"].timeout,
client_info=self._client_info,
)
request = metric_service_pb2.CreateTimeSeriesRequest(
name=name, time_series=time_series
)
if metadata is None:
metadata = []
metadata = list(metadata)
try:
routing_header = [("name", name)]
except AttributeError:
pass
else:
routing_metadata = google.api_core.gapic_v1.routing_header.to_grpc_metadata(
routing_header
)
metadata.append(routing_metadata)
self._inner_api_calls["create_time_series"](
request, retry=retry, timeout=timeout, metadata=metadata
)<|fim▁end|>
| |
<|file_name|>run.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Eve Demo (Secured)
~~~~~~~~~~~~~~~~~~
This is a fork of Eve Demo (https://github.com/pyeve/eve-demo)
intended to demonstrate how a Eve API can be secured by means of
Flask-Sentinel.
For demonstration purposes, besides protecting a couple API endpoints<|fim▁hole|> endpoint an protecting with via decorator.
:copyright: (c) 2015 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from eve import Eve
from oauth2 import BearerAuth
from flask.ext.sentinel import ResourceOwnerPasswordCredentials, oauth
app = Eve(auth=BearerAuth)
ResourceOwnerPasswordCredentials(app)
@app.route('/endpoint')
@oauth.require_oauth()
def restricted_access():
return "You made it through and accessed the protected resource!"
if __name__ == '__main__':
app.run(ssl_context='adhoc')<|fim▁end|>
|
with a BearerToken class instance, we are also adding a static html
|
<|file_name|>ssl.spec.js<|end_file_name|><|fim▁begin|>const ASSERT = require('assert');
describe('SSL Specs', function () {
const PATH = require('path');
let Acts = require('./../index');
let testcert = PATH.join(__dirname, 'certs', 'test.cert');
let testkey = PATH.join(__dirname, 'certs', 'test.key');
it('boot with testcertificate without chain', function() {
return new Promise((resolve, reject) => {
try {
Acts.createServer(__dirname, {
server: {
address: 'localhost',
port: 8086,
ssl: {
usessl: true,
redirectnonsslrequests: true,<|fim▁hole|> }
}
});
Acts.start(function () {
Acts.shutdown();
resolve();
});
} catch (e) {
reject(e);
}
});
});
it('boot with testcertificate with chain', function() {
return new Promise((resolve, reject) => {
try {
Acts.createServer(__dirname, {
server: {
address: 'localhost',
port: 8086,
ssl: {
usessl: true,
redirectnonsslrequests: true,
privatekey: testkey,
certificate: testcert,
certificationauthority: [testcert]
}
}
});
Acts.start(function () {
Acts.shutdown();
resolve();
});
} catch (e) {
reject(e);
}
});
});
});<|fim▁end|>
|
privatekey: testkey,
certificate: testcert,
certificationauthority: []
|
<|file_name|>admin.py<|end_file_name|><|fim▁begin|>__author__ = 'herald olivares'
# -*- coding: utf-8 -*-
from django.contrib import admin
from upc.sunat.models import Person, Concept, Debt
class PersonAdmin(admin.ModelAdmin):
list_display = ('name', 'ruc', 'phone', 'type')
class ConceptAdmin(admin.ModelAdmin):
pass
class DebtAdmin(admin.ModelAdmin):
list_display = ('concept', 'person', 'period', 'tax_code', 'resolution_number', 'amount')
admin.site.register(Person, PersonAdmin)<|fim▁hole|><|fim▁end|>
|
admin.site.register(Concept, ConceptAdmin)
admin.site.register(Debt, DebtAdmin)
|
<|file_name|>base.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
"""Generic base class for cli hammer commands."""
import logging
from robottelo import ssh
from robottelo.cli import hammer
from robottelo.config import conf
<|fim▁hole|>
class CLIReturnCodeError(Exception):
"""Indicates that a CLI command has finished with return code, different
from zero.
:param return_code: CLI command return code
:param stderr: contents of the ``stderr``
:param msg: explanation of the error
"""
def __init__(self, return_code, stderr, msg):
self.return_code = return_code
self.stderr = stderr
self.msg = msg
def __str__(self):
return self.msg
class Base(object):
"""
@param command_base: base command of hammer.
Output of recent `hammer --help`::
activation-key Manipulate activation keys.
architecture Manipulate architectures.
auth Foreman connection login/logout.
auth-source Manipulate auth sources.
capsule Manipulate capsule
compute-resource Manipulate compute resources.
content-host Manipulate content hosts on the server
content-view Manipulate content views.
docker-image Manipulate docker images
domain Manipulate domains.
environment Manipulate environments.
erratum Manipulate errata
fact Search facts.
filter Manage permission filters.
global-parameter Manipulate global parameters.
gpg Manipulate GPG Key actions on the server
host Manipulate hosts.
host-collection Manipulate host collections
hostgroup Manipulate hostgroups.
import Import data exported from a Red Hat Sat..
lifecycle-environment Manipulate lifecycle_environments
location Manipulate locations.
medium Manipulate installation media.
model Manipulate hardware models.
organization Manipulate organizations
os Manipulate operating system.
package Manipulate packages.
package-group Manipulate package groups
partition-table Manipulate partition tables.
ping Get the status of the server
product Manipulate products.
proxy Manipulate smart proxies.
puppet-class Search puppet modules.
puppet-module View Puppet Module details.
report Browse and read reports.
repository Manipulate repositories
repository-set Manipulate repository sets on the server
role Manage user roles.
sc-param Manipulate smart class parameters.
shell Interactive shell
subnet Manipulate subnets.
subscription Manipulate subscriptions.
sync-plan Manipulate sync plans
task Tasks related actions.
template Manipulate config templates.
user Manipulate users.
user-group Manage user groups.
@since: 27.Nov.2013
"""
command_base = None # each inherited instance should define this
command_sub = None # specific to instance, like: create, update, etc
command_requires_org = False # True when command requires organization-id
logger = logging.getLogger('robottelo')
@classmethod
def _handle_response(cls, response, ignore_stderr=None):
"""Verify ``return_code`` of the CLI command.
Check for a non-zero return code or any stderr contents.
:param response: a ``SSHCommandResult`` object, returned by
:mod:`robottelo.ssh.command`.
:param ignore_stderr: indicates whether to throw a warning in logs if
``stderr`` is not empty.
:returns: contents of ``stdout``.
:raises robottelo.cli.base.CLIReturnCodeError: If return code is
different from zero.
"""
if response.return_code != 0:
raise CLIReturnCodeError(
response.return_code,
response.stderr,
u'Command "{0} {1}" finished with return_code {2}\n'
'stderr contains following message:\n{3}'
.format(
cls.command_base,
cls.command_sub,
response.return_code,
response.stderr,
)
)
if len(response.stderr) != 0 and not ignore_stderr:
cls.logger.warning(
u'stderr contains following message:\n{0}'
.format(response.stderr)
)
return response.stdout
@classmethod
def add_operating_system(cls, options=None):
"""
Adds OS to record.
"""
cls.command_sub = 'add-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def create(cls, options=None):
"""
Creates a new record using the arguments passed via dictionary.
"""
cls.command_sub = 'create'
if options is None:
options = {}
result = cls.execute(
cls._construct_command(options), output_format='csv')
# Extract new object ID if it was successfully created
if len(result) > 0 and 'id' in result[0]:
obj_id = result[0]['id']
# Fetch new object
# Some Katello obj require the organization-id for subcommands
info_options = {u'id': obj_id}
if cls.command_requires_org:
if 'organization-id' not in options:
raise CLIError(
'organization-id option is required for {0}.create'
.format(cls.__name__)
)
info_options[u'organization-id'] = options[u'organization-id']
new_obj = cls.info(info_options)
# stdout should be a dictionary containing the object
if len(new_obj) > 0:
result = new_obj
return result
@classmethod
def delete(cls, options=None):
"""Deletes existing record."""
cls.command_sub = 'delete'
return cls.execute(
cls._construct_command(options),
ignore_stderr=True,
)
@classmethod
def delete_parameter(cls, options=None):
"""
Deletes parameter from record.
"""
cls.command_sub = 'delete-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def dump(cls, options=None):
"""
Displays the content for existing partition table.
"""
cls.command_sub = 'dump'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def _get_username_password(cls, username=None, password=None):
"""Lookup for the username and password for cli command in following
order:
1. ``user`` or ``password`` parameters
2. ``foreman_admin_username`` or ``foreman_admin_password`` attributes
3. foreman.admin.username or foreman.admin.password configuration
:return: A tuple with the username and password found
:rtype: tuple
"""
if username is None:
try:
username = getattr(cls, 'foreman_admin_username')
except AttributeError:
username = conf.properties['foreman.admin.username']
if password is None:
try:
password = getattr(cls, 'foreman_admin_password')
except AttributeError:
password = conf.properties['foreman.admin.password']
return (username, password)
@classmethod
def execute(cls, command, user=None, password=None, output_format=None,
timeout=None, ignore_stderr=None, return_raw_response=None):
"""Executes the cli ``command`` on the server via ssh"""
user, password = cls._get_username_password(user, password)
# add time to measure hammer performance
perf_test = conf.properties.get('performance.test.foreman.perf', '0')
cmd = u'LANG={0} {1} hammer -v -u {2} -p {3} {4} {5}'.format(
conf.properties['main.locale'],
u'time -p' if perf_test == '1' else '',
user,
password,
u'--output={0}'.format(output_format) if output_format else u'',
command,
)
response = ssh.command(
cmd.encode('utf-8'),
output_format=output_format,
timeout=timeout,
)
if return_raw_response:
return response
else:
return cls._handle_response(
response,
ignore_stderr=ignore_stderr,
)
@classmethod
def exists(cls, options=None, search=None):
"""Search for an entity using the query ``search[0]="search[1]"``
Will be used the ``list`` command with the ``--search`` option to do
the search.
If ``options`` argument already have a search key, then the ``search``
argument will not be evaluated. Which allows different search query.
"""
if options is None:
options = {}
if search is not None and u'search' not in options:
options.update({u'search': u'{0}=\\"{1}\\"'.format(
search[0], search[1])})
result = cls.list(options)
if result:
result = result[0]
return result
@classmethod
def info(cls, options=None, output_format=None):
"""Reads the entity information."""
cls.command_sub = 'info'
if options is None:
options = {}
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError(
'organization-id option is required for {0}.info'
.format(cls.__name__)
)
result = cls.execute(
command=cls._construct_command(options),
output_format=output_format
)
if output_format != 'json':
result = hammer.parse_info(result)
return result
@classmethod
def list(cls, options=None, per_page=True):
"""
List information.
@param options: ID (sometimes name works as well) to retrieve info.
"""
cls.command_sub = 'list'
if options is None:
options = {}
if 'per-page' not in options and per_page:
options[u'per-page'] = 10000
if cls.command_requires_org and 'organization-id' not in options:
raise CLIError(
'organization-id option is required for {0}.list'
.format(cls.__name__)
)
result = cls.execute(
cls._construct_command(options), output_format='csv')
return result
@classmethod
def puppetclasses(cls, options=None):
"""
Lists all puppet classes.
"""
cls.command_sub = 'puppet-classes'
result = cls.execute(
cls._construct_command(options), output_format='csv')
return result
@classmethod
def remove_operating_system(cls, options=None):
"""
Removes OS from record.
"""
cls.command_sub = 'remove-operatingsystem'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def sc_params(cls, options=None):
"""
Lists all smart class parameters.
"""
cls.command_sub = 'sc-params'
result = cls.execute(
cls._construct_command(options), output_format='csv')
return result
@classmethod
def set_parameter(cls, options=None):
"""
Creates or updates parameter for a record.
"""
cls.command_sub = 'set-parameter'
result = cls.execute(cls._construct_command(options))
return result
@classmethod
def update(cls, options=None):
"""
Updates existing record.
"""
cls.command_sub = 'update'
result = cls.execute(
cls._construct_command(options), output_format='csv')
return result
@classmethod
def with_user(cls, username=None, password=None):
"""Context Manager for credentials"""
if username is None:
username = conf.properties['foreman.admin.username']
if password is None:
password = conf.properties['foreman.admin.password']
class Wrapper(cls):
"""Wrapper class which defines the foreman admin username and
password to be used when executing any cli command.
"""
foreman_admin_username = username
foreman_admin_password = password
return Wrapper
@classmethod
def _construct_command(cls, options=None):
"""
Build a hammer cli command based on the options passed
"""
tail = u''
if options is None:
options = {}
for key, val in options.items():
if val is None:
continue
if val is True:
tail += u' --{0}'.format(key)
elif val is not False:
if isinstance(val, list):
val = ','.join(str(el) for el in val)
tail += u' --{0}="{1}"'.format(key, val)
cmd = u'{0} {1} {2}'.format(
cls.command_base,
cls.command_sub,
tail.strip()
)
return cmd<|fim▁end|>
|
class CLIError(Exception):
"""Indicates that a CLI command could not be run."""
|
<|file_name|>err.rs<|end_file_name|><|fim▁begin|>// information from https://raw.githubusercontent.com/torvalds/linux/master/
// /include/uapi/linux/can/error.h
use std::convert::TryFrom;
use super::CanFrame;
use std::error::Error;
use std::{error, fmt};
#[inline]
/// Helper function to retrieve a specific byte of frame data or returning an
/// `Err(..)` otherwise.
fn get_data(frame: &CanFrame, idx: u8) -> Result<u8, CanErrorDecodingFailure> {
Ok(*(frame.data()
.get(idx as usize)
.ok_or_else(|| CanErrorDecodingFailure::NotEnoughData(idx)))?)
}
/// Error decoding a CanError from a CanFrame.
#[derive(Copy, Clone, Debug)]
pub enum CanErrorDecodingFailure {
/// The supplied CanFrame did not have the error bit set.
NotAnError,
/// The error type is not known and cannot be decoded.
UnknownErrorType(u32),
/// The error type indicated a need for additional information as `data`,
/// but the `data` field was not long enough.
NotEnoughData(u8),
/// The error type `ControllerProblem` was indicated and additional
/// information found, but not recognized.
InvalidControllerProblem,
/// The type of the ProtocolViolation was not valid
InvalidViolationType,
/// A location was specified for a ProtocolViolation, but the location
/// was not valid.
InvalidLocation,
/// The supplied transciever error was invalid.
InvalidTransceiverError,
}
impl error::Error for CanErrorDecodingFailure {
fn description(&self) -> &str {
match *self {
CanErrorDecodingFailure::NotAnError => "CAN frame is not an error",
CanErrorDecodingFailure::UnknownErrorType(_) => "unknown error type",
CanErrorDecodingFailure::NotEnoughData(_) => "not enough data",
CanErrorDecodingFailure::InvalidControllerProblem => "not a valid controller problem",
CanErrorDecodingFailure::InvalidViolationType => "not a valid violation type",
CanErrorDecodingFailure::InvalidLocation => "not a valid location",
CanErrorDecodingFailure::InvalidTransceiverError => "not a valid transceiver error",
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
impl fmt::Display for CanErrorDecodingFailure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
#[derive(Copy, Clone, Debug)]
pub enum CanError {
/// TX timeout (by netdevice driver)
TransmitTimeout,
/// Arbitration was lost. Contains the number after which arbitration was
/// lost or 0 if unspecified
LostArbitration(u8),
/// Controller problem, see `ControllerProblem`
ControllerProblem(ControllerProblem),
/// Protocol violation at the specified `Location`. See `ProtocolViolation`
/// for details.
ProtocolViolation {
vtype: ViolationType,
location: Location,
},
/// Transceiver Error.
TransceiverError,
/// No ACK received for current CAN frame.
NoAck,
/// Bus off (due to too many detected errors)
BusOff,
/// Bus error (due to too many detected errors)
BusError,
/// The bus has been restarted
Restarted,
/// Unknown, possibly invalid, error
Unknown(u32),
}
impl error::Error for CanError {
fn description(&self) -> &str {
match *self {
CanError::TransmitTimeout => "transmission timeout",
CanError::LostArbitration(_) => "arbitration lost",
CanError::ControllerProblem(_) => "controller problem",
CanError::ProtocolViolation { .. } => "protocol violation",
CanError::TransceiverError => "transceiver error",
CanError::NoAck => "no ack",
CanError::BusOff => "bus off",
CanError::BusError => "bus error",
CanError::Restarted => "restarted",
CanError::Unknown(_) => "unknown error",
}
}
fn cause(&self) -> Option<&error::Error> {
match *self {
CanError::ControllerProblem(ref e) => Some(e),
_ => None,
}
}
}
impl fmt::Display for CanError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
CanError::LostArbitration(n) => write!(f, "arbitration lost after {} bits", n),
CanError::ControllerProblem(e) => write!(f, "controller problem: {}", e),
CanError::ProtocolViolation { vtype, location } => {
write!(f, "protocol violation at {}: {}", location, vtype)
}
CanError::Unknown(errno) => write!(f, "unknown error ({})", errno),
_ => write!(f, "{}", self.description()),
}
}
}
#[derive(Copy, Clone, Debug)]
pub enum ControllerProblem {
// unspecified
Unspecified,
// RX buffer overflow
ReceiveBufferOverflow,
// TX buffer overflow
TransmitBufferOverflow,
// reached warning level for RX errors
ReceiveErrorWarning,
// reached warning level for TX errors
TransmitErrorWarning,
// reached error passive status RX
ReceiveErrorPassive,
// reached error passive status TX
TransmitErrorPassive,
// recovered to error active state
Active,
}
impl error::Error for ControllerProblem {
fn description(&self) -> &str {
match *self {
ControllerProblem::Unspecified => "unspecified controller problem",
ControllerProblem::ReceiveBufferOverflow => "receive buffer overflow",
ControllerProblem::TransmitBufferOverflow => "transmit buffer overflow",
ControllerProblem::ReceiveErrorWarning => "ERROR WARNING (receive)",
ControllerProblem::TransmitErrorWarning => "ERROR WARNING (transmit)",
ControllerProblem::ReceiveErrorPassive => "ERROR PASSIVE (receive)",
ControllerProblem::TransmitErrorPassive => "ERROR PASSIVE (transmit)",
ControllerProblem::Active => "ERROR ACTIVE",
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
impl fmt::Display for ControllerProblem {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl TryFrom<u8> for ControllerProblem {
type Error = CanErrorDecodingFailure;
fn try_from(val: u8) -> Result<ControllerProblem, CanErrorDecodingFailure> {
Ok(match val {
0x00 => ControllerProblem::Unspecified,
0x01 => ControllerProblem::ReceiveBufferOverflow,
0x02 => ControllerProblem::TransmitBufferOverflow,
0x04 => ControllerProblem::ReceiveErrorWarning,
0x08 => ControllerProblem::TransmitErrorWarning,
0x10 => ControllerProblem::ReceiveErrorPassive,
0x20 => ControllerProblem::TransmitErrorPassive,
0x40 => ControllerProblem::Active,
_ => return Err(CanErrorDecodingFailure::InvalidControllerProblem),
})
}
}
#[derive(Copy, Clone, Debug)]
pub enum ViolationType {
/// Unspecified Violation
Unspecified,
/// Single Bit Error
SingleBitError,
/// Frame formatting error
FrameFormatError,
/// Bit stuffing error
BitStuffingError,
/// A dominant bit was sent, but not received
UnableToSendDominantBit,
/// A recessive bit was sent, but not received
UnableToSendRecessiveBit,
/// Bus overloaded
BusOverload,
/// Bus is active (again)
Active,
/// Transmission Error
TransmissionError,
}
impl error::Error for ViolationType {
fn description(&self) -> &str {
match *self {
ViolationType::Unspecified => "unspecified",
ViolationType::SingleBitError => "single bit error",
ViolationType::FrameFormatError => "frame format error",
ViolationType::BitStuffingError => "bit stuffing error",
ViolationType::UnableToSendDominantBit => "unable to send dominant bit",
ViolationType::UnableToSendRecessiveBit => "unable to send recessive bit",
ViolationType::BusOverload => "bus overload",
ViolationType::Active => "active",
ViolationType::TransmissionError => "transmission error",
}
}
fn cause(&self) -> Option<&error::Error> {
None
}
}
impl fmt::Display for ViolationType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.description())
}
}
impl TryFrom<u8> for ViolationType {
type Error = CanErrorDecodingFailure;
fn try_from(val: u8) -> Result<ViolationType, CanErrorDecodingFailure> {
Ok(match val {
0x00 => ViolationType::Unspecified,
0x01 => ViolationType::SingleBitError,
0x02 => ViolationType::FrameFormatError,
0x04 => ViolationType::BitStuffingError,
0x08 => ViolationType::UnableToSendDominantBit,
0x10 => ViolationType::UnableToSendRecessiveBit,
0x20 => ViolationType::BusOverload,
0x40 => ViolationType::Active,
0x80 => ViolationType::TransmissionError,
_ => return Err(CanErrorDecodingFailure::InvalidViolationType),
})
}
}
/// Location
///
/// Describes where inside a received frame an error occured.
#[derive(Copy, Clone, Debug)]
pub enum Location {
/// Unspecified
Unspecified,
/// Start of frame.<|fim▁hole|> Id2821,
/// ID bits 20-18 (SFF: 2-0)
Id2018,
/// substitute RTR (SFF: RTR)
SubstituteRtr,
/// extension of identifier
IdentifierExtension,
/// ID bits 17-13
Id1713,
/// ID bits 12-5
Id1205,
/// ID bits 4-0
Id0400,
/// RTR bit
Rtr,
/// Reserved bit 1
Reserved1,
/// Reserved bit 0
Reserved0,
/// Data length
DataLengthCode,
/// Data section
DataSection,
/// CRC sequence
CrcSequence,
/// CRC delimiter
CrcDelimiter,
/// ACK slot
AckSlot,
/// ACK delimiter
AckDelimiter,
/// End-of-frame
EndOfFrame,
/// Intermission (between frames)
Intermission,
}
impl fmt::Display for Location {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f,
"{}",
match *self {
Location::Unspecified => "unspecified location",
Location::StartOfFrame => "start of frame",
Location::Id2821 => "ID, bits 28-21",
Location::Id2018 => "ID, bits 20-18",
Location::SubstituteRtr => "substitute RTR bit",
Location::IdentifierExtension => "ID, extension",
Location::Id1713 => "ID, bits 17-13",
Location::Id1205 => "ID, bits 12-05",
Location::Id0400 => "ID, bits 04-00",
Location::Rtr => "RTR bit",
Location::Reserved1 => "reserved bit 1",
Location::Reserved0 => "reserved bit 0",
Location::DataLengthCode => "data length code",
Location::DataSection => "data section",
Location::CrcSequence => "CRC sequence",
Location::CrcDelimiter => "CRC delimiter",
Location::AckSlot => "ACK slot",
Location::AckDelimiter => "ACK delimiter",
Location::EndOfFrame => "end of frame",
Location::Intermission => "intermission",
})
}
}
impl TryFrom<u8> for Location {
type Error = CanErrorDecodingFailure;
fn try_from(val: u8) -> Result<Location, CanErrorDecodingFailure> {
Ok(match val {
0x00 => Location::Unspecified,
0x03 => Location::StartOfFrame,
0x02 => Location::Id2821,
0x06 => Location::Id2018,
0x04 => Location::SubstituteRtr,
0x05 => Location::IdentifierExtension,
0x07 => Location::Id1713,
0x0F => Location::Id1205,
0x0E => Location::Id0400,
0x0C => Location::Rtr,
0x0D => Location::Reserved1,
0x09 => Location::Reserved0,
0x0B => Location::DataLengthCode,
0x0A => Location::DataSection,
0x08 => Location::CrcSequence,
0x18 => Location::CrcDelimiter,
0x19 => Location::AckSlot,
0x1B => Location::AckDelimiter,
0x1A => Location::EndOfFrame,
0x12 => Location::Intermission,
_ => return Err(CanErrorDecodingFailure::InvalidLocation),
})
}
}
pub enum TransceiverError {
Unspecified,
CanHighNoWire,
CanHighShortToBat,
CanHighShortToVcc,
CanHighShortToGnd,
CanLowNoWire,
CanLowShortToBat,
CanLowShortToVcc,
CanLowShortToGnd,
CanLowShortToCanHigh,
}
impl TryFrom<u8> for TransceiverError {
type Error = CanErrorDecodingFailure;
fn try_from(val: u8) -> Result<TransceiverError, CanErrorDecodingFailure> {
Ok(match val {
0x00 => TransceiverError::Unspecified,
0x04 => TransceiverError::CanHighNoWire,
0x05 => TransceiverError::CanHighShortToBat,
0x06 => TransceiverError::CanHighShortToVcc,
0x07 => TransceiverError::CanHighShortToGnd,
0x40 => TransceiverError::CanLowNoWire,
0x50 => TransceiverError::CanLowShortToBat,
0x60 => TransceiverError::CanLowShortToVcc,
0x70 => TransceiverError::CanLowShortToGnd,
0x80 => TransceiverError::CanLowShortToCanHigh,
_ => return Err(CanErrorDecodingFailure::InvalidTransceiverError),
})
}
}
impl CanError {
pub fn from_frame(frame: &CanFrame) -> Result<CanError, CanErrorDecodingFailure> {
if !frame.is_error() {
return Err(CanErrorDecodingFailure::NotAnError);
}
match frame.err() {
0x00000001 => Ok(CanError::TransmitTimeout),
0x00000002 => Ok(CanError::LostArbitration(get_data(frame, 0)?)),
0x00000004 => {
Ok(CanError::ControllerProblem(ControllerProblem::try_from(get_data(frame, 1)?)?))
}
0x00000008 => {
Ok(CanError::ProtocolViolation {
vtype: ViolationType::try_from(get_data(frame, 2)?)?,
location: Location::try_from(get_data(frame, 3)?)?,
})
}
0x00000010 => Ok(CanError::TransceiverError),
0x00000020 => Ok(CanError::NoAck),
0x00000040 => Ok(CanError::BusOff),
0x00000080 => Ok(CanError::BusError),
0x00000100 => Ok(CanError::Restarted),
e => Err(CanErrorDecodingFailure::UnknownErrorType(e)),
}
}
}
pub trait ControllerSpecificErrorInformation {
fn get_ctrl_err(&self) -> Option<&[u8]>;
}
impl ControllerSpecificErrorInformation for CanFrame {
#[inline]
fn get_ctrl_err(&self) -> Option<&[u8]> {
let data = self.data();
if data.len() != 8 {
None
} else {
Some(&data[5..])
}
}
}<|fim▁end|>
|
StartOfFrame,
/// ID bits 28-21 (SFF: 10-3)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Stub.<|fim▁hole|>pub use self::memvector::*;
pub use self::readonly::*;<|fim▁end|>
|
mod memvector;
mod readonly;
|
<|file_name|>failure.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![experimental]
use alloc::boxed::Box;
use any::{Any, AnyRefExt};
use fmt;
use io::{Writer, IoResult};<|fim▁hole|>use rt::{Stderr, Stdio};
use rustrt::local::Local;
use rustrt::task::Task;
use str::Str;
use string::String;
// Defined in this module instead of io::stdio so that the unwinding
local_data_key!(pub local_stderr: Box<Writer + Send>)
impl Writer for Stdio {
fn write(&mut self, bytes: &[u8]) -> IoResult<()> {
fn fmt_write<F: fmt::FormatWriter>(f: &mut F, bytes: &[u8]) {
let _ = f.write(bytes);
}
fmt_write(self, bytes);
Ok(())
}
}
pub fn on_fail(obj: &Any + Send, file: &'static str, line: uint) {
let msg = match obj.as_ref::<&'static str>() {
Some(s) => *s,
None => match obj.as_ref::<String>() {
Some(s) => s.as_slice(),
None => "Box<Any>",
}
};
let mut err = Stderr;
// It is assumed that all reasonable rust code will have a local task at
// all times. This means that this `exists` will return true almost all of
// the time. There are border cases, however, when the runtime has
// *almost* set up the local task, but hasn't quite gotten there yet. In
// order to get some better diagnostics, we print on failure and
// immediately abort the whole process if there is no local task
// available.
if !Local::exists(None::<Task>) {
let _ = writeln!(&mut err, "failed at '{}', {}:{}", msg, file, line);
if backtrace::log_enabled() {
let _ = backtrace::write(&mut err);
} else {
let _ = writeln!(&mut err, "run with `RUST_BACKTRACE=1` to \
see a backtrace");
}
return
}
// Peel the name out of local task so we can print it. We've got to be sure
// that the local task is in TLS while we're printing as I/O may occur.
let (name, unwinding) = {
let mut t = Local::borrow(None::<Task>);
(t.name.take(), t.unwinder.unwinding())
};
{
let n = name.as_ref().map(|n| n.as_slice()).unwrap_or("<unnamed>");
match local_stderr.replace(None) {
Some(mut stderr) => {
// FIXME: what to do when the task printing fails?
let _ = writeln!(stderr,
"task '{}' failed at '{}', {}:{}\n",
n, msg, file, line);
if backtrace::log_enabled() {
let _ = backtrace::write(&mut *stderr);
}
local_stderr.replace(Some(stderr));
}
None => {
let _ = writeln!(&mut err, "task '{}' failed at '{}', {}:{}",
n, msg, file, line);
if backtrace::log_enabled() {
let _ = backtrace::write(&mut err);
}
}
}
// If this is a double failure, make sure that we printed a backtrace
// for this failure.
if unwinding && !backtrace::log_enabled() {
let _ = backtrace::write(&mut err);
}
}
Local::borrow(None::<Task>).name = name;
}<|fim▁end|>
|
use kinds::Send;
use option::{Some, None};
use result::Ok;
use rt::backtrace;
|
<|file_name|>models.go<|end_file_name|><|fim▁begin|>// +build go1.9
// Copyright 2018 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This code was auto-generated by:
// github.com/Azure/azure-sdk-for-go/tools/profileBuilder
package siterecovery
import original "github.com/Azure/azure-sdk-for-go/services/recoveryservices/mgmt/2018-01-10/siterecovery"
const (
DefaultBaseURI = original.DefaultBaseURI
)
type BaseClient = original.BaseClient
type A2ARpRecoveryPointType = original.A2ARpRecoveryPointType
const (
Latest A2ARpRecoveryPointType = original.Latest
LatestApplicationConsistent A2ARpRecoveryPointType = original.LatestApplicationConsistent
LatestCrashConsistent A2ARpRecoveryPointType = original.LatestCrashConsistent
LatestProcessed A2ARpRecoveryPointType = original.LatestProcessed
)
type AgentAutoUpdateStatus = original.AgentAutoUpdateStatus
const (
Disabled AgentAutoUpdateStatus = original.Disabled
Enabled AgentAutoUpdateStatus = original.Enabled<|fim▁hole|>
const (
Deprecated AgentVersionStatus = original.Deprecated
NotSupported AgentVersionStatus = original.NotSupported
SecurityUpdateRequired AgentVersionStatus = original.SecurityUpdateRequired
Supported AgentVersionStatus = original.Supported
UpdateRequired AgentVersionStatus = original.UpdateRequired
)
type AlternateLocationRecoveryOption = original.AlternateLocationRecoveryOption
const (
CreateVMIfNotFound AlternateLocationRecoveryOption = original.CreateVMIfNotFound
NoAction AlternateLocationRecoveryOption = original.NoAction
)
type DataSyncStatus = original.DataSyncStatus
const (
ForDownTime DataSyncStatus = original.ForDownTime
ForSynchronization DataSyncStatus = original.ForSynchronization
)
type DisableProtectionReason = original.DisableProtectionReason
const (
MigrationComplete DisableProtectionReason = original.MigrationComplete
NotSpecified DisableProtectionReason = original.NotSpecified
)
type FailoverDeploymentModel = original.FailoverDeploymentModel
const (
Classic FailoverDeploymentModel = original.Classic
NotApplicable FailoverDeploymentModel = original.NotApplicable
ResourceManager FailoverDeploymentModel = original.ResourceManager
)
type HealthErrorCategory = original.HealthErrorCategory
const (
AgentAutoUpdate HealthErrorCategory = original.AgentAutoUpdate
Configuration HealthErrorCategory = original.Configuration
FabricInfrastructure HealthErrorCategory = original.FabricInfrastructure
None HealthErrorCategory = original.None
Replication HealthErrorCategory = original.Replication
TestFailover HealthErrorCategory = original.TestFailover
VersionExpiry HealthErrorCategory = original.VersionExpiry
)
type HyperVReplicaAzureRpRecoveryPointType = original.HyperVReplicaAzureRpRecoveryPointType
const (
HyperVReplicaAzureRpRecoveryPointTypeLatest HyperVReplicaAzureRpRecoveryPointType = original.HyperVReplicaAzureRpRecoveryPointTypeLatest
HyperVReplicaAzureRpRecoveryPointTypeLatestApplicationConsistent HyperVReplicaAzureRpRecoveryPointType = original.HyperVReplicaAzureRpRecoveryPointTypeLatestApplicationConsistent
HyperVReplicaAzureRpRecoveryPointTypeLatestProcessed HyperVReplicaAzureRpRecoveryPointType = original.HyperVReplicaAzureRpRecoveryPointTypeLatestProcessed
)
type IdentityProviderType = original.IdentityProviderType
const (
RecoveryServicesActiveDirectory IdentityProviderType = original.RecoveryServicesActiveDirectory
)
type InMageV2RpRecoveryPointType = original.InMageV2RpRecoveryPointType
const (
InMageV2RpRecoveryPointTypeLatest InMageV2RpRecoveryPointType = original.InMageV2RpRecoveryPointTypeLatest
InMageV2RpRecoveryPointTypeLatestApplicationConsistent InMageV2RpRecoveryPointType = original.InMageV2RpRecoveryPointTypeLatestApplicationConsistent
InMageV2RpRecoveryPointTypeLatestCrashConsistent InMageV2RpRecoveryPointType = original.InMageV2RpRecoveryPointTypeLatestCrashConsistent
InMageV2RpRecoveryPointTypeLatestProcessed InMageV2RpRecoveryPointType = original.InMageV2RpRecoveryPointTypeLatestProcessed
)
type InstanceType = original.InstanceType
const (
InstanceTypeA2A InstanceType = original.InstanceTypeA2A
InstanceTypeApplyRecoveryPointProviderSpecificInput InstanceType = original.InstanceTypeApplyRecoveryPointProviderSpecificInput
InstanceTypeHyperVReplicaAzure InstanceType = original.InstanceTypeHyperVReplicaAzure
InstanceTypeInMageAzureV2 InstanceType = original.InstanceTypeInMageAzureV2
)
type InstanceTypeBasicConfigurationSettings = original.InstanceTypeBasicConfigurationSettings
const (
InstanceTypeConfigurationSettings InstanceTypeBasicConfigurationSettings = original.InstanceTypeConfigurationSettings
InstanceTypeHyperVVirtualMachine InstanceTypeBasicConfigurationSettings = original.InstanceTypeHyperVVirtualMachine
InstanceTypeReplicationGroupDetails InstanceTypeBasicConfigurationSettings = original.InstanceTypeReplicationGroupDetails
InstanceTypeVmmVirtualMachine InstanceTypeBasicConfigurationSettings = original.InstanceTypeVmmVirtualMachine
InstanceTypeVMwareVirtualMachine InstanceTypeBasicConfigurationSettings = original.InstanceTypeVMwareVirtualMachine
)
type InstanceTypeBasicDisableProtectionProviderSpecificInput = original.InstanceTypeBasicDisableProtectionProviderSpecificInput
const (
InstanceTypeDisableProtectionProviderSpecificInput InstanceTypeBasicDisableProtectionProviderSpecificInput = original.InstanceTypeDisableProtectionProviderSpecificInput
InstanceTypeInMage InstanceTypeBasicDisableProtectionProviderSpecificInput = original.InstanceTypeInMage
)
type InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInput
const (
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeA2A InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeA2A
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeEnableProtectionProviderSpecificInput InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeEnableProtectionProviderSpecificInput
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeInMage InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeInMage
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeInMageAzureV2 InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeInMageAzureV2
InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeSan InstanceTypeBasicEnableProtectionProviderSpecificInput = original.InstanceTypeBasicEnableProtectionProviderSpecificInputInstanceTypeSan
)
type InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetails
const (
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeA2A InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeA2A
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeEventProviderSpecificDetails InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeEventProviderSpecificDetails
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplica2012 InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplica2012
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplica2012R2 InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplica2012R2
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplicaAzure InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplicaAzure
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplicaBaseEventDetails InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeHyperVReplicaBaseEventDetails
InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeInMageAzureV2 InstanceTypeBasicEventProviderSpecificDetails = original.InstanceTypeBasicEventProviderSpecificDetailsInstanceTypeInMageAzureV2
)
type InstanceTypeBasicEventSpecificDetails = original.InstanceTypeBasicEventSpecificDetails
const (
InstanceTypeEventSpecificDetails InstanceTypeBasicEventSpecificDetails = original.InstanceTypeEventSpecificDetails
InstanceTypeJobStatus InstanceTypeBasicEventSpecificDetails = original.InstanceTypeJobStatus
)
type InstanceTypeBasicFabricSpecificCreateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificCreateNetworkMappingInput
const (
InstanceTypeAzureToAzure InstanceTypeBasicFabricSpecificCreateNetworkMappingInput = original.InstanceTypeAzureToAzure
InstanceTypeFabricSpecificCreateNetworkMappingInput InstanceTypeBasicFabricSpecificCreateNetworkMappingInput = original.InstanceTypeFabricSpecificCreateNetworkMappingInput
InstanceTypeVmmToAzure InstanceTypeBasicFabricSpecificCreateNetworkMappingInput = original.InstanceTypeVmmToAzure
InstanceTypeVmmToVmm InstanceTypeBasicFabricSpecificCreateNetworkMappingInput = original.InstanceTypeVmmToVmm
)
type InstanceTypeBasicFabricSpecificCreationInput = original.InstanceTypeBasicFabricSpecificCreationInput
const (
InstanceTypeAzure InstanceTypeBasicFabricSpecificCreationInput = original.InstanceTypeAzure
InstanceTypeFabricSpecificCreationInput InstanceTypeBasicFabricSpecificCreationInput = original.InstanceTypeFabricSpecificCreationInput
InstanceTypeVMwareV2 InstanceTypeBasicFabricSpecificCreationInput = original.InstanceTypeVMwareV2
)
type InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetails
const (
InstanceTypeBasicFabricSpecificDetailsInstanceTypeAzure InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeAzure
InstanceTypeBasicFabricSpecificDetailsInstanceTypeFabricSpecificDetails InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeFabricSpecificDetails
InstanceTypeBasicFabricSpecificDetailsInstanceTypeHyperVSite InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeHyperVSite
InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMM InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMM
InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMware InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMware
InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMwareV2 InstanceTypeBasicFabricSpecificDetails = original.InstanceTypeBasicFabricSpecificDetailsInstanceTypeVMwareV2
)
type InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput
const (
InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeAzureToAzure InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeAzureToAzure
InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeFabricSpecificUpdateNetworkMappingInput InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeFabricSpecificUpdateNetworkMappingInput
InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeVmmToAzure InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeVmmToAzure
InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeVmmToVmm InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput = original.InstanceTypeBasicFabricSpecificUpdateNetworkMappingInputInstanceTypeVmmToVmm
)
type InstanceTypeBasicGroupTaskDetails = original.InstanceTypeBasicGroupTaskDetails
const (
InstanceTypeGroupTaskDetails InstanceTypeBasicGroupTaskDetails = original.InstanceTypeGroupTaskDetails
InstanceTypeInlineWorkflowTaskDetails InstanceTypeBasicGroupTaskDetails = original.InstanceTypeInlineWorkflowTaskDetails
InstanceTypeRecoveryPlanGroupTaskDetails InstanceTypeBasicGroupTaskDetails = original.InstanceTypeRecoveryPlanGroupTaskDetails
InstanceTypeRecoveryPlanShutdownGroupTaskDetails InstanceTypeBasicGroupTaskDetails = original.InstanceTypeRecoveryPlanShutdownGroupTaskDetails
)
type InstanceTypeBasicJobDetails = original.InstanceTypeBasicJobDetails
const (
InstanceTypeAsrJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeAsrJobDetails
InstanceTypeExportJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeExportJobDetails
InstanceTypeFailoverJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeFailoverJobDetails
InstanceTypeJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeJobDetails
InstanceTypeSwitchProtectionJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeSwitchProtectionJobDetails
InstanceTypeTestFailoverJobDetails InstanceTypeBasicJobDetails = original.InstanceTypeTestFailoverJobDetails
)
type InstanceTypeBasicNetworkMappingFabricSpecificSettings = original.InstanceTypeBasicNetworkMappingFabricSpecificSettings
const (
InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeAzureToAzure InstanceTypeBasicNetworkMappingFabricSpecificSettings = original.InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeAzureToAzure
InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeNetworkMappingFabricSpecificSettings InstanceTypeBasicNetworkMappingFabricSpecificSettings = original.InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeNetworkMappingFabricSpecificSettings
InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeVmmToAzure InstanceTypeBasicNetworkMappingFabricSpecificSettings = original.InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeVmmToAzure
InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeVmmToVmm InstanceTypeBasicNetworkMappingFabricSpecificSettings = original.InstanceTypeBasicNetworkMappingFabricSpecificSettingsInstanceTypeVmmToVmm
)
type InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetails
const (
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeA2A InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeA2A
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplica2012 InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplica2012
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplica2012R2 InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplica2012R2
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplicaAzure InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplicaAzure
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplicaBasePolicyDetails InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeHyperVReplicaBasePolicyDetails
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMage InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMage
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMageAzureV2 InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMageAzureV2
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMageBasePolicyDetails InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeInMageBasePolicyDetails
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypePolicyProviderSpecificDetails InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypePolicyProviderSpecificDetails
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeRcmAzureMigration InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeRcmAzureMigration
InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeVMwareCbt InstanceTypeBasicPolicyProviderSpecificDetails = original.InstanceTypeBasicPolicyProviderSpecificDetailsInstanceTypeVMwareCbt
)
type InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInput
const (
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeA2A
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplica2012 InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplica2012
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplica2012R2 InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplica2012R2
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeInMage InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeInMage
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeInMageAzureV2 InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeInMageAzureV2
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypePolicyProviderSpecificInput InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypePolicyProviderSpecificInput
InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeVMwareCbt InstanceTypeBasicPolicyProviderSpecificInput = original.InstanceTypeBasicPolicyProviderSpecificInputInstanceTypeVMwareCbt
)
type InstanceTypeBasicProtectionContainerMappingProviderSpecificDetails = original.InstanceTypeBasicProtectionContainerMappingProviderSpecificDetails
const (
InstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsInstanceTypeA2A InstanceTypeBasicProtectionContainerMappingProviderSpecificDetails = original.InstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsInstanceTypeA2A
InstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsInstanceTypeProtectionContainerMappingProviderSpecificDetails InstanceTypeBasicProtectionContainerMappingProviderSpecificDetails = original.InstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsInstanceTypeProtectionContainerMappingProviderSpecificDetails
)
type InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInput
const (
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeA2A InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeA2A
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzureFailback InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzureFailback
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeInMage InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeInMage
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeInMageAzureV2 InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeInMageAzureV2
InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeProviderSpecificFailoverInput InstanceTypeBasicProviderSpecificFailoverInput = original.InstanceTypeBasicProviderSpecificFailoverInputInstanceTypeProviderSpecificFailoverInput
)
type InstanceTypeBasicProviderSpecificRecoveryPointDetails = original.InstanceTypeBasicProviderSpecificRecoveryPointDetails
const (
InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeA2A InstanceTypeBasicProviderSpecificRecoveryPointDetails = original.InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeA2A
InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeInMageAzureV2 InstanceTypeBasicProviderSpecificRecoveryPointDetails = original.InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeInMageAzureV2
InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeProviderSpecificRecoveryPointDetails InstanceTypeBasicProviderSpecificRecoveryPointDetails = original.InstanceTypeBasicProviderSpecificRecoveryPointDetailsInstanceTypeProviderSpecificRecoveryPointDetails
)
type InstanceTypeBasicRecoveryPlanActionDetails = original.InstanceTypeBasicRecoveryPlanActionDetails
const (
InstanceTypeAutomationRunbookActionDetails InstanceTypeBasicRecoveryPlanActionDetails = original.InstanceTypeAutomationRunbookActionDetails
InstanceTypeManualActionDetails InstanceTypeBasicRecoveryPlanActionDetails = original.InstanceTypeManualActionDetails
InstanceTypeRecoveryPlanActionDetails InstanceTypeBasicRecoveryPlanActionDetails = original.InstanceTypeRecoveryPlanActionDetails
InstanceTypeScriptActionDetails InstanceTypeBasicRecoveryPlanActionDetails = original.InstanceTypeScriptActionDetails
)
type InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput
const (
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeA2A InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeA2A
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzureFailback InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeHyperVReplicaAzureFailback
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeInMage InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeInMage
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeInMageAzureV2 InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeInMageAzureV2
InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeRecoveryPlanProviderSpecificFailoverInput InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput = original.InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputInstanceTypeRecoveryPlanProviderSpecificFailoverInput
)
type InstanceTypeBasicReplicationProviderSpecificContainerCreationInput = original.InstanceTypeBasicReplicationProviderSpecificContainerCreationInput
const (
InstanceTypeBasicReplicationProviderSpecificContainerCreationInputInstanceTypeA2A InstanceTypeBasicReplicationProviderSpecificContainerCreationInput = original.InstanceTypeBasicReplicationProviderSpecificContainerCreationInputInstanceTypeA2A
InstanceTypeBasicReplicationProviderSpecificContainerCreationInputInstanceTypeReplicationProviderSpecificContainerCreationInput InstanceTypeBasicReplicationProviderSpecificContainerCreationInput = original.InstanceTypeBasicReplicationProviderSpecificContainerCreationInputInstanceTypeReplicationProviderSpecificContainerCreationInput
)
type InstanceTypeBasicReplicationProviderSpecificContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificContainerMappingInput
const (
InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeA2A InstanceTypeBasicReplicationProviderSpecificContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeA2A
InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeReplicationProviderSpecificContainerMappingInput InstanceTypeBasicReplicationProviderSpecificContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificContainerMappingInputInstanceTypeReplicationProviderSpecificContainerMappingInput
)
type InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettings
const (
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeA2A InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeA2A
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplica2012 InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplica2012
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplica2012R2 InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplica2012R2
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplicaAzure InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplicaAzure
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplicaBaseReplicationDetails InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeHyperVReplicaBaseReplicationDetails
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeInMage InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeInMage
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeInMageAzureV2 InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeInMageAzureV2
InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeReplicationProviderSpecificSettings InstanceTypeBasicReplicationProviderSpecificSettings = original.InstanceTypeBasicReplicationProviderSpecificSettingsInstanceTypeReplicationProviderSpecificSettings
)
type InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInput
const (
InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputInstanceTypeA2A InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputInstanceTypeA2A
InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputInstanceTypeReplicationProviderSpecificUpdateContainerMappingInput InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInput = original.InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputInstanceTypeReplicationProviderSpecificUpdateContainerMappingInput
)
type InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInput
const (
InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeA2A InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeA2A
InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeInMage InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeInMage
InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeInMageAzureV2 InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeInMageAzureV2
InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeReverseReplicationProviderSpecificInput InstanceTypeBasicReverseReplicationProviderSpecificInput = original.InstanceTypeBasicReverseReplicationProviderSpecificInputInstanceTypeReverseReplicationProviderSpecificInput
)
type InstanceTypeBasicSwitchProtectionProviderSpecificInput = original.InstanceTypeBasicSwitchProtectionProviderSpecificInput
const (
InstanceTypeBasicSwitchProtectionProviderSpecificInputInstanceTypeA2A InstanceTypeBasicSwitchProtectionProviderSpecificInput = original.InstanceTypeBasicSwitchProtectionProviderSpecificInputInstanceTypeA2A
InstanceTypeBasicSwitchProtectionProviderSpecificInputInstanceTypeSwitchProtectionProviderSpecificInput InstanceTypeBasicSwitchProtectionProviderSpecificInput = original.InstanceTypeBasicSwitchProtectionProviderSpecificInputInstanceTypeSwitchProtectionProviderSpecificInput
)
type InstanceTypeBasicTaskTypeDetails = original.InstanceTypeBasicTaskTypeDetails
const (
InstanceTypeAutomationRunbookTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeAutomationRunbookTaskDetails
InstanceTypeConsistencyCheckTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeConsistencyCheckTaskDetails
InstanceTypeFabricReplicationGroupTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeFabricReplicationGroupTaskDetails
InstanceTypeJobTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeJobTaskDetails
InstanceTypeManualActionTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeManualActionTaskDetails
InstanceTypeScriptActionTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeScriptActionTaskDetails
InstanceTypeTaskTypeDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeTaskTypeDetails
InstanceTypeVirtualMachineTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeVirtualMachineTaskDetails
InstanceTypeVMNicUpdatesTaskDetails InstanceTypeBasicTaskTypeDetails = original.InstanceTypeVMNicUpdatesTaskDetails
)
type InstanceTypeBasicUpdateReplicationProtectedItemProviderInput = original.InstanceTypeBasicUpdateReplicationProtectedItemProviderInput
const (
InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeA2A InstanceTypeBasicUpdateReplicationProtectedItemProviderInput = original.InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeA2A
InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeHyperVReplicaAzure InstanceTypeBasicUpdateReplicationProtectedItemProviderInput = original.InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeHyperVReplicaAzure
InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeInMageAzureV2 InstanceTypeBasicUpdateReplicationProtectedItemProviderInput = original.InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeInMageAzureV2
InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeUpdateReplicationProtectedItemProviderInput InstanceTypeBasicUpdateReplicationProtectedItemProviderInput = original.InstanceTypeBasicUpdateReplicationProtectedItemProviderInputInstanceTypeUpdateReplicationProtectedItemProviderInput
)
type LicenseType = original.LicenseType
const (
LicenseTypeNoLicenseType LicenseType = original.LicenseTypeNoLicenseType
LicenseTypeNotSpecified LicenseType = original.LicenseTypeNotSpecified
LicenseTypeWindowsServer LicenseType = original.LicenseTypeWindowsServer
)
type MultiVMGroupCreateOption = original.MultiVMGroupCreateOption
const (
AutoCreated MultiVMGroupCreateOption = original.AutoCreated
UserSpecified MultiVMGroupCreateOption = original.UserSpecified
)
type MultiVMSyncPointOption = original.MultiVMSyncPointOption
const (
UseMultiVMSyncRecoveryPoint MultiVMSyncPointOption = original.UseMultiVMSyncRecoveryPoint
UsePerVMRecoveryPoint MultiVMSyncPointOption = original.UsePerVMRecoveryPoint
)
type MultiVMSyncStatus = original.MultiVMSyncStatus
const (
MultiVMSyncStatusDisabled MultiVMSyncStatus = original.MultiVMSyncStatusDisabled
MultiVMSyncStatusEnabled MultiVMSyncStatus = original.MultiVMSyncStatusEnabled
)
type PossibleOperationsDirections = original.PossibleOperationsDirections
const (
PrimaryToRecovery PossibleOperationsDirections = original.PrimaryToRecovery
RecoveryToPrimary PossibleOperationsDirections = original.RecoveryToPrimary
)
type PresenceStatus = original.PresenceStatus
const (
NotPresent PresenceStatus = original.NotPresent
Present PresenceStatus = original.Present
Unknown PresenceStatus = original.Unknown
)
type RecoveryPlanActionLocation = original.RecoveryPlanActionLocation
const (
Primary RecoveryPlanActionLocation = original.Primary
Recovery RecoveryPlanActionLocation = original.Recovery
)
type RecoveryPlanGroupType = original.RecoveryPlanGroupType
const (
Boot RecoveryPlanGroupType = original.Boot
Failover RecoveryPlanGroupType = original.Failover
Shutdown RecoveryPlanGroupType = original.Shutdown
)
type RecoveryPointSyncType = original.RecoveryPointSyncType
const (
MultiVMSyncRecoveryPoint RecoveryPointSyncType = original.MultiVMSyncRecoveryPoint
PerVMRecoveryPoint RecoveryPointSyncType = original.PerVMRecoveryPoint
)
type RecoveryPointType = original.RecoveryPointType
const (
Custom RecoveryPointType = original.Custom
LatestTag RecoveryPointType = original.LatestTag
LatestTime RecoveryPointType = original.LatestTime
)
type ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperation
const (
ReplicationProtectedItemOperationChangePit ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationChangePit
ReplicationProtectedItemOperationCommit ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationCommit
ReplicationProtectedItemOperationCompleteMigration ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationCompleteMigration
ReplicationProtectedItemOperationDisableProtection ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationDisableProtection
ReplicationProtectedItemOperationFailback ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationFailback
ReplicationProtectedItemOperationFinalizeFailback ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationFinalizeFailback
ReplicationProtectedItemOperationPlannedFailover ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationPlannedFailover
ReplicationProtectedItemOperationRepairReplication ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationRepairReplication
ReplicationProtectedItemOperationReverseReplicate ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationReverseReplicate
ReplicationProtectedItemOperationSwitchProtection ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationSwitchProtection
ReplicationProtectedItemOperationTestFailover ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationTestFailover
ReplicationProtectedItemOperationTestFailoverCleanup ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationTestFailoverCleanup
ReplicationProtectedItemOperationUnplannedFailover ReplicationProtectedItemOperation = original.ReplicationProtectedItemOperationUnplannedFailover
)
type RpInMageRecoveryPointType = original.RpInMageRecoveryPointType
const (
RpInMageRecoveryPointTypeCustom RpInMageRecoveryPointType = original.RpInMageRecoveryPointTypeCustom
RpInMageRecoveryPointTypeLatestTag RpInMageRecoveryPointType = original.RpInMageRecoveryPointTypeLatestTag
RpInMageRecoveryPointTypeLatestTime RpInMageRecoveryPointType = original.RpInMageRecoveryPointTypeLatestTime
)
type SetMultiVMSyncStatus = original.SetMultiVMSyncStatus
const (
Disable SetMultiVMSyncStatus = original.Disable
Enable SetMultiVMSyncStatus = original.Enable
)
type Severity = original.Severity
const (
Error Severity = original.Error
Info Severity = original.Info
NONE Severity = original.NONE
Warning Severity = original.Warning
)
type SourceSiteOperations = original.SourceSiteOperations
const (
NotRequired SourceSiteOperations = original.NotRequired
Required SourceSiteOperations = original.Required
)
type A2AApplyRecoveryPointInput = original.A2AApplyRecoveryPointInput
type A2AContainerCreationInput = original.A2AContainerCreationInput
type A2AContainerMappingInput = original.A2AContainerMappingInput
type A2AEnableProtectionInput = original.A2AEnableProtectionInput
type A2AEventDetails = original.A2AEventDetails
type A2AFailoverProviderInput = original.A2AFailoverProviderInput
type A2APolicyCreationInput = original.A2APolicyCreationInput
type A2APolicyDetails = original.A2APolicyDetails
type A2AProtectedDiskDetails = original.A2AProtectedDiskDetails
type A2AProtectedManagedDiskDetails = original.A2AProtectedManagedDiskDetails
type A2AProtectionContainerMappingDetails = original.A2AProtectionContainerMappingDetails
type A2ARecoveryPointDetails = original.A2ARecoveryPointDetails
type A2AReplicationDetails = original.A2AReplicationDetails
type A2AReprotectInput = original.A2AReprotectInput
type A2ASwitchProtectionInput = original.A2ASwitchProtectionInput
type A2AUpdateContainerMappingInput = original.A2AUpdateContainerMappingInput
type A2AUpdateReplicationProtectedItemInput = original.A2AUpdateReplicationProtectedItemInput
type A2AVMDiskInputDetails = original.A2AVMDiskInputDetails
type A2AVMManagedDiskInputDetails = original.A2AVMManagedDiskInputDetails
type A2AVMManagedDiskUpdateDetails = original.A2AVMManagedDiskUpdateDetails
type AddVCenterRequest = original.AddVCenterRequest
type AddVCenterRequestProperties = original.AddVCenterRequestProperties
type Alert = original.Alert
type AlertCollection = original.AlertCollection
type AlertCollectionIterator = original.AlertCollectionIterator
type AlertCollectionPage = original.AlertCollectionPage
type AlertProperties = original.AlertProperties
type ApplyRecoveryPointInput = original.ApplyRecoveryPointInput
type ApplyRecoveryPointInputProperties = original.ApplyRecoveryPointInputProperties
type BasicApplyRecoveryPointProviderSpecificInput = original.BasicApplyRecoveryPointProviderSpecificInput
type ApplyRecoveryPointProviderSpecificInput = original.ApplyRecoveryPointProviderSpecificInput
type AsrJobDetails = original.AsrJobDetails
type ASRTask = original.ASRTask
type AutomationRunbookTaskDetails = original.AutomationRunbookTaskDetails
type AzureFabricCreationInput = original.AzureFabricCreationInput
type AzureFabricSpecificDetails = original.AzureFabricSpecificDetails
type AzureToAzureCreateNetworkMappingInput = original.AzureToAzureCreateNetworkMappingInput
type AzureToAzureNetworkMappingSettings = original.AzureToAzureNetworkMappingSettings
type AzureToAzureUpdateNetworkMappingInput = original.AzureToAzureUpdateNetworkMappingInput
type AzureToAzureVMSyncedConfigDetails = original.AzureToAzureVMSyncedConfigDetails
type AzureVMDiskDetails = original.AzureVMDiskDetails
type ComputeSizeErrorDetails = original.ComputeSizeErrorDetails
type BasicConfigurationSettings = original.BasicConfigurationSettings
type ConfigurationSettings = original.ConfigurationSettings
type ConfigureAlertRequest = original.ConfigureAlertRequest
type ConfigureAlertRequestProperties = original.ConfigureAlertRequestProperties
type ConsistencyCheckTaskDetails = original.ConsistencyCheckTaskDetails
type CreateNetworkMappingInput = original.CreateNetworkMappingInput
type CreateNetworkMappingInputProperties = original.CreateNetworkMappingInputProperties
type CreatePolicyInput = original.CreatePolicyInput
type CreatePolicyInputProperties = original.CreatePolicyInputProperties
type CreateProtectionContainerInput = original.CreateProtectionContainerInput
type CreateProtectionContainerInputProperties = original.CreateProtectionContainerInputProperties
type CreateProtectionContainerMappingInput = original.CreateProtectionContainerMappingInput
type CreateProtectionContainerMappingInputProperties = original.CreateProtectionContainerMappingInputProperties
type CreateRecoveryPlanInput = original.CreateRecoveryPlanInput
type CreateRecoveryPlanInputProperties = original.CreateRecoveryPlanInputProperties
type CurrentScenarioDetails = original.CurrentScenarioDetails
type DataStore = original.DataStore
type DisableProtectionInput = original.DisableProtectionInput
type DisableProtectionInputProperties = original.DisableProtectionInputProperties
type BasicDisableProtectionProviderSpecificInput = original.BasicDisableProtectionProviderSpecificInput
type DisableProtectionProviderSpecificInput = original.DisableProtectionProviderSpecificInput
type DiscoverProtectableItemRequest = original.DiscoverProtectableItemRequest
type DiscoverProtectableItemRequestProperties = original.DiscoverProtectableItemRequestProperties
type DiskDetails = original.DiskDetails
type DiskEncryptionInfo = original.DiskEncryptionInfo
type DiskEncryptionKeyInfo = original.DiskEncryptionKeyInfo
type DiskVolumeDetails = original.DiskVolumeDetails
type Display = original.Display
type EnableProtectionInput = original.EnableProtectionInput
type EnableProtectionInputProperties = original.EnableProtectionInputProperties
type BasicEnableProtectionProviderSpecificInput = original.BasicEnableProtectionProviderSpecificInput
type EnableProtectionProviderSpecificInput = original.EnableProtectionProviderSpecificInput
type EncryptionDetails = original.EncryptionDetails
type Event = original.Event
type EventCollection = original.EventCollection
type EventCollectionIterator = original.EventCollectionIterator
type EventCollectionPage = original.EventCollectionPage
type EventProperties = original.EventProperties
type BasicEventProviderSpecificDetails = original.BasicEventProviderSpecificDetails
type EventProviderSpecificDetails = original.EventProviderSpecificDetails
type EventQueryParameter = original.EventQueryParameter
type BasicEventSpecificDetails = original.BasicEventSpecificDetails
type EventSpecificDetails = original.EventSpecificDetails
type ExportJobDetails = original.ExportJobDetails
type Fabric = original.Fabric
type FabricCollection = original.FabricCollection
type FabricCollectionIterator = original.FabricCollectionIterator
type FabricCollectionPage = original.FabricCollectionPage
type FabricCreationInput = original.FabricCreationInput
type FabricCreationInputProperties = original.FabricCreationInputProperties
type FabricProperties = original.FabricProperties
type FabricReplicationGroupTaskDetails = original.FabricReplicationGroupTaskDetails
type BasicFabricSpecificCreateNetworkMappingInput = original.BasicFabricSpecificCreateNetworkMappingInput
type FabricSpecificCreateNetworkMappingInput = original.FabricSpecificCreateNetworkMappingInput
type BasicFabricSpecificCreationInput = original.BasicFabricSpecificCreationInput
type FabricSpecificCreationInput = original.FabricSpecificCreationInput
type BasicFabricSpecificDetails = original.BasicFabricSpecificDetails
type FabricSpecificDetails = original.FabricSpecificDetails
type BasicFabricSpecificUpdateNetworkMappingInput = original.BasicFabricSpecificUpdateNetworkMappingInput
type FabricSpecificUpdateNetworkMappingInput = original.FabricSpecificUpdateNetworkMappingInput
type FailoverJobDetails = original.FailoverJobDetails
type FailoverProcessServerRequest = original.FailoverProcessServerRequest
type FailoverProcessServerRequestProperties = original.FailoverProcessServerRequestProperties
type FailoverReplicationProtectedItemDetails = original.FailoverReplicationProtectedItemDetails
type BasicGroupTaskDetails = original.BasicGroupTaskDetails
type GroupTaskDetails = original.GroupTaskDetails
type HealthError = original.HealthError
type HealthErrorSummary = original.HealthErrorSummary
type HyperVReplica2012EventDetails = original.HyperVReplica2012EventDetails
type HyperVReplica2012R2EventDetails = original.HyperVReplica2012R2EventDetails
type HyperVReplicaAzureApplyRecoveryPointInput = original.HyperVReplicaAzureApplyRecoveryPointInput
type HyperVReplicaAzureEnableProtectionInput = original.HyperVReplicaAzureEnableProtectionInput
type HyperVReplicaAzureEventDetails = original.HyperVReplicaAzureEventDetails
type HyperVReplicaAzureFailbackProviderInput = original.HyperVReplicaAzureFailbackProviderInput
type HyperVReplicaAzureFailoverProviderInput = original.HyperVReplicaAzureFailoverProviderInput
type HyperVReplicaAzurePolicyDetails = original.HyperVReplicaAzurePolicyDetails
type HyperVReplicaAzurePolicyInput = original.HyperVReplicaAzurePolicyInput
type HyperVReplicaAzureReplicationDetails = original.HyperVReplicaAzureReplicationDetails
type HyperVReplicaAzureReprotectInput = original.HyperVReplicaAzureReprotectInput
type HyperVReplicaAzureUpdateReplicationProtectedItemInput = original.HyperVReplicaAzureUpdateReplicationProtectedItemInput
type HyperVReplicaBaseEventDetails = original.HyperVReplicaBaseEventDetails
type HyperVReplicaBasePolicyDetails = original.HyperVReplicaBasePolicyDetails
type HyperVReplicaBaseReplicationDetails = original.HyperVReplicaBaseReplicationDetails
type HyperVReplicaBluePolicyDetails = original.HyperVReplicaBluePolicyDetails
type HyperVReplicaBluePolicyInput = original.HyperVReplicaBluePolicyInput
type HyperVReplicaBlueReplicationDetails = original.HyperVReplicaBlueReplicationDetails
type HyperVReplicaPolicyDetails = original.HyperVReplicaPolicyDetails
type HyperVReplicaPolicyInput = original.HyperVReplicaPolicyInput
type HyperVReplicaReplicationDetails = original.HyperVReplicaReplicationDetails
type HyperVSiteDetails = original.HyperVSiteDetails
type HyperVVirtualMachineDetails = original.HyperVVirtualMachineDetails
type IdentityInformation = original.IdentityInformation
type InconsistentVMDetails = original.InconsistentVMDetails
type InitialReplicationDetails = original.InitialReplicationDetails
type InlineWorkflowTaskDetails = original.InlineWorkflowTaskDetails
type InMageAgentDetails = original.InMageAgentDetails
type InMageAgentVersionDetails = original.InMageAgentVersionDetails
type InMageAzureV2ApplyRecoveryPointInput = original.InMageAzureV2ApplyRecoveryPointInput
type InMageAzureV2EnableProtectionInput = original.InMageAzureV2EnableProtectionInput
type InMageAzureV2EventDetails = original.InMageAzureV2EventDetails
type InMageAzureV2FailoverProviderInput = original.InMageAzureV2FailoverProviderInput
type InMageAzureV2PolicyDetails = original.InMageAzureV2PolicyDetails
type InMageAzureV2PolicyInput = original.InMageAzureV2PolicyInput
type InMageAzureV2ProtectedDiskDetails = original.InMageAzureV2ProtectedDiskDetails
type InMageAzureV2RecoveryPointDetails = original.InMageAzureV2RecoveryPointDetails
type InMageAzureV2ReplicationDetails = original.InMageAzureV2ReplicationDetails
type InMageAzureV2ReprotectInput = original.InMageAzureV2ReprotectInput
type InMageAzureV2UpdateReplicationProtectedItemInput = original.InMageAzureV2UpdateReplicationProtectedItemInput
type InMageBasePolicyDetails = original.InMageBasePolicyDetails
type InMageDisableProtectionProviderSpecificInput = original.InMageDisableProtectionProviderSpecificInput
type InMageDiskDetails = original.InMageDiskDetails
type InMageDiskExclusionInput = original.InMageDiskExclusionInput
type InMageDiskSignatureExclusionOptions = original.InMageDiskSignatureExclusionOptions
type InMageEnableProtectionInput = original.InMageEnableProtectionInput
type InMageFailoverProviderInput = original.InMageFailoverProviderInput
type InMagePolicyDetails = original.InMagePolicyDetails
type InMagePolicyInput = original.InMagePolicyInput
type InMageProtectedDiskDetails = original.InMageProtectedDiskDetails
type InMageReplicationDetails = original.InMageReplicationDetails
type InMageReprotectInput = original.InMageReprotectInput
type InMageVolumeExclusionOptions = original.InMageVolumeExclusionOptions
type InnerHealthError = original.InnerHealthError
type InputEndpoint = original.InputEndpoint
type Job = original.Job
type JobCollection = original.JobCollection
type JobCollectionIterator = original.JobCollectionIterator
type JobCollectionPage = original.JobCollectionPage
type BasicJobDetails = original.BasicJobDetails
type JobDetails = original.JobDetails
type JobEntity = original.JobEntity
type JobErrorDetails = original.JobErrorDetails
type JobProperties = original.JobProperties
type JobQueryParameter = original.JobQueryParameter
type JobStatusEventDetails = original.JobStatusEventDetails
type JobTaskDetails = original.JobTaskDetails
type KeyEncryptionKeyInfo = original.KeyEncryptionKeyInfo
type LogicalNetwork = original.LogicalNetwork
type LogicalNetworkCollection = original.LogicalNetworkCollection
type LogicalNetworkCollectionIterator = original.LogicalNetworkCollectionIterator
type LogicalNetworkCollectionPage = original.LogicalNetworkCollectionPage
type LogicalNetworkProperties = original.LogicalNetworkProperties
type ManualActionTaskDetails = original.ManualActionTaskDetails
type MasterTargetServer = original.MasterTargetServer
type MobilityServiceUpdate = original.MobilityServiceUpdate
type Network = original.Network
type NetworkCollection = original.NetworkCollection
type NetworkCollectionIterator = original.NetworkCollectionIterator
type NetworkCollectionPage = original.NetworkCollectionPage
type NetworkMapping = original.NetworkMapping
type NetworkMappingCollection = original.NetworkMappingCollection
type NetworkMappingCollectionIterator = original.NetworkMappingCollectionIterator
type NetworkMappingCollectionPage = original.NetworkMappingCollectionPage
type BasicNetworkMappingFabricSpecificSettings = original.BasicNetworkMappingFabricSpecificSettings
type NetworkMappingFabricSpecificSettings = original.NetworkMappingFabricSpecificSettings
type NetworkMappingProperties = original.NetworkMappingProperties
type NetworkProperties = original.NetworkProperties
type OperationsDiscovery = original.OperationsDiscovery
type OperationsDiscoveryCollection = original.OperationsDiscoveryCollection
type OperationsDiscoveryCollectionIterator = original.OperationsDiscoveryCollectionIterator
type OperationsDiscoveryCollectionPage = original.OperationsDiscoveryCollectionPage
type OSDetails = original.OSDetails
type OSDiskDetails = original.OSDiskDetails
type PlannedFailoverInput = original.PlannedFailoverInput
type PlannedFailoverInputProperties = original.PlannedFailoverInputProperties
type Policy = original.Policy
type PolicyCollection = original.PolicyCollection
type PolicyCollectionIterator = original.PolicyCollectionIterator
type PolicyCollectionPage = original.PolicyCollectionPage
type PolicyProperties = original.PolicyProperties
type BasicPolicyProviderSpecificDetails = original.BasicPolicyProviderSpecificDetails
type PolicyProviderSpecificDetails = original.PolicyProviderSpecificDetails
type BasicPolicyProviderSpecificInput = original.BasicPolicyProviderSpecificInput
type PolicyProviderSpecificInput = original.PolicyProviderSpecificInput
type ProcessServer = original.ProcessServer
type ProtectableItem = original.ProtectableItem
type ProtectableItemCollection = original.ProtectableItemCollection
type ProtectableItemCollectionIterator = original.ProtectableItemCollectionIterator
type ProtectableItemCollectionPage = original.ProtectableItemCollectionPage
type ProtectableItemProperties = original.ProtectableItemProperties
type ProtectableItemQueryParameter = original.ProtectableItemQueryParameter
type ProtectedItemsQueryParameter = original.ProtectedItemsQueryParameter
type ProtectionContainer = original.ProtectionContainer
type ProtectionContainerCollection = original.ProtectionContainerCollection
type ProtectionContainerCollectionIterator = original.ProtectionContainerCollectionIterator
type ProtectionContainerCollectionPage = original.ProtectionContainerCollectionPage
type ProtectionContainerFabricSpecificDetails = original.ProtectionContainerFabricSpecificDetails
type ProtectionContainerMapping = original.ProtectionContainerMapping
type ProtectionContainerMappingCollection = original.ProtectionContainerMappingCollection
type ProtectionContainerMappingCollectionIterator = original.ProtectionContainerMappingCollectionIterator
type ProtectionContainerMappingCollectionPage = original.ProtectionContainerMappingCollectionPage
type ProtectionContainerMappingProperties = original.ProtectionContainerMappingProperties
type BasicProtectionContainerMappingProviderSpecificDetails = original.BasicProtectionContainerMappingProviderSpecificDetails
type ProtectionContainerMappingProviderSpecificDetails = original.ProtectionContainerMappingProviderSpecificDetails
type ProtectionContainerProperties = original.ProtectionContainerProperties
type ProviderError = original.ProviderError
type BasicProviderSpecificFailoverInput = original.BasicProviderSpecificFailoverInput
type ProviderSpecificFailoverInput = original.ProviderSpecificFailoverInput
type BasicProviderSpecificRecoveryPointDetails = original.BasicProviderSpecificRecoveryPointDetails
type ProviderSpecificRecoveryPointDetails = original.ProviderSpecificRecoveryPointDetails
type RcmAzureMigrationPolicyDetails = original.RcmAzureMigrationPolicyDetails
type RecoveryPlan = original.RecoveryPlan
type RecoveryPlanA2AFailoverInput = original.RecoveryPlanA2AFailoverInput
type RecoveryPlanAction = original.RecoveryPlanAction
type BasicRecoveryPlanActionDetails = original.BasicRecoveryPlanActionDetails
type RecoveryPlanActionDetails = original.RecoveryPlanActionDetails
type RecoveryPlanAutomationRunbookActionDetails = original.RecoveryPlanAutomationRunbookActionDetails
type RecoveryPlanCollection = original.RecoveryPlanCollection
type RecoveryPlanCollectionIterator = original.RecoveryPlanCollectionIterator
type RecoveryPlanCollectionPage = original.RecoveryPlanCollectionPage
type RecoveryPlanGroup = original.RecoveryPlanGroup
type RecoveryPlanGroupTaskDetails = original.RecoveryPlanGroupTaskDetails
type RecoveryPlanHyperVReplicaAzureFailbackInput = original.RecoveryPlanHyperVReplicaAzureFailbackInput
type RecoveryPlanHyperVReplicaAzureFailoverInput = original.RecoveryPlanHyperVReplicaAzureFailoverInput
type RecoveryPlanInMageAzureV2FailoverInput = original.RecoveryPlanInMageAzureV2FailoverInput
type RecoveryPlanInMageFailoverInput = original.RecoveryPlanInMageFailoverInput
type RecoveryPlanManualActionDetails = original.RecoveryPlanManualActionDetails
type RecoveryPlanPlannedFailoverInput = original.RecoveryPlanPlannedFailoverInput
type RecoveryPlanPlannedFailoverInputProperties = original.RecoveryPlanPlannedFailoverInputProperties
type RecoveryPlanProperties = original.RecoveryPlanProperties
type RecoveryPlanProtectedItem = original.RecoveryPlanProtectedItem
type BasicRecoveryPlanProviderSpecificFailoverInput = original.BasicRecoveryPlanProviderSpecificFailoverInput
type RecoveryPlanProviderSpecificFailoverInput = original.RecoveryPlanProviderSpecificFailoverInput
type RecoveryPlanScriptActionDetails = original.RecoveryPlanScriptActionDetails
type RecoveryPlanShutdownGroupTaskDetails = original.RecoveryPlanShutdownGroupTaskDetails
type RecoveryPlanTestFailoverCleanupInput = original.RecoveryPlanTestFailoverCleanupInput
type RecoveryPlanTestFailoverCleanupInputProperties = original.RecoveryPlanTestFailoverCleanupInputProperties
type RecoveryPlanTestFailoverInput = original.RecoveryPlanTestFailoverInput
type RecoveryPlanTestFailoverInputProperties = original.RecoveryPlanTestFailoverInputProperties
type RecoveryPlanUnplannedFailoverInput = original.RecoveryPlanUnplannedFailoverInput
type RecoveryPlanUnplannedFailoverInputProperties = original.RecoveryPlanUnplannedFailoverInputProperties
type RecoveryPoint = original.RecoveryPoint
type RecoveryPointCollection = original.RecoveryPointCollection
type RecoveryPointCollectionIterator = original.RecoveryPointCollectionIterator
type RecoveryPointCollectionPage = original.RecoveryPointCollectionPage
type RecoveryPointProperties = original.RecoveryPointProperties
type RecoveryServicesProvider = original.RecoveryServicesProvider
type RecoveryServicesProviderCollection = original.RecoveryServicesProviderCollection
type RecoveryServicesProviderCollectionIterator = original.RecoveryServicesProviderCollectionIterator
type RecoveryServicesProviderCollectionPage = original.RecoveryServicesProviderCollectionPage
type RecoveryServicesProviderProperties = original.RecoveryServicesProviderProperties
type RemoveProtectionContainerMappingInput = original.RemoveProtectionContainerMappingInput
type RemoveProtectionContainerMappingInputProperties = original.RemoveProtectionContainerMappingInputProperties
type RenewCertificateInput = original.RenewCertificateInput
type RenewCertificateInputProperties = original.RenewCertificateInputProperties
type ReplicationFabricsCheckConsistencyFuture = original.ReplicationFabricsCheckConsistencyFuture
type ReplicationFabricsCreateFuture = original.ReplicationFabricsCreateFuture
type ReplicationFabricsDeleteFuture = original.ReplicationFabricsDeleteFuture
type ReplicationFabricsMigrateToAadFuture = original.ReplicationFabricsMigrateToAadFuture
type ReplicationFabricsPurgeFuture = original.ReplicationFabricsPurgeFuture
type ReplicationFabricsReassociateGatewayFuture = original.ReplicationFabricsReassociateGatewayFuture
type ReplicationFabricsRenewCertificateFuture = original.ReplicationFabricsRenewCertificateFuture
type ReplicationGroupDetails = original.ReplicationGroupDetails
type ReplicationJobsCancelFuture = original.ReplicationJobsCancelFuture
type ReplicationJobsExportFuture = original.ReplicationJobsExportFuture
type ReplicationJobsRestartFuture = original.ReplicationJobsRestartFuture
type ReplicationJobsResumeFuture = original.ReplicationJobsResumeFuture
type ReplicationNetworkMappingsCreateFuture = original.ReplicationNetworkMappingsCreateFuture
type ReplicationNetworkMappingsDeleteFuture = original.ReplicationNetworkMappingsDeleteFuture
type ReplicationNetworkMappingsUpdateFuture = original.ReplicationNetworkMappingsUpdateFuture
type ReplicationPoliciesCreateFuture = original.ReplicationPoliciesCreateFuture
type ReplicationPoliciesDeleteFuture = original.ReplicationPoliciesDeleteFuture
type ReplicationPoliciesUpdateFuture = original.ReplicationPoliciesUpdateFuture
type ReplicationProtectedItem = original.ReplicationProtectedItem
type ReplicationProtectedItemCollection = original.ReplicationProtectedItemCollection
type ReplicationProtectedItemCollectionIterator = original.ReplicationProtectedItemCollectionIterator
type ReplicationProtectedItemCollectionPage = original.ReplicationProtectedItemCollectionPage
type ReplicationProtectedItemProperties = original.ReplicationProtectedItemProperties
type ReplicationProtectedItemsApplyRecoveryPointFuture = original.ReplicationProtectedItemsApplyRecoveryPointFuture
type ReplicationProtectedItemsCreateFuture = original.ReplicationProtectedItemsCreateFuture
type ReplicationProtectedItemsDeleteFuture = original.ReplicationProtectedItemsDeleteFuture
type ReplicationProtectedItemsFailoverCommitFuture = original.ReplicationProtectedItemsFailoverCommitFuture
type ReplicationProtectedItemsPlannedFailoverFuture = original.ReplicationProtectedItemsPlannedFailoverFuture
type ReplicationProtectedItemsPurgeFuture = original.ReplicationProtectedItemsPurgeFuture
type ReplicationProtectedItemsRepairReplicationFuture = original.ReplicationProtectedItemsRepairReplicationFuture
type ReplicationProtectedItemsReprotectFuture = original.ReplicationProtectedItemsReprotectFuture
type ReplicationProtectedItemsTestFailoverCleanupFuture = original.ReplicationProtectedItemsTestFailoverCleanupFuture
type ReplicationProtectedItemsTestFailoverFuture = original.ReplicationProtectedItemsTestFailoverFuture
type ReplicationProtectedItemsUnplannedFailoverFuture = original.ReplicationProtectedItemsUnplannedFailoverFuture
type ReplicationProtectedItemsUpdateFuture = original.ReplicationProtectedItemsUpdateFuture
type ReplicationProtectedItemsUpdateMobilityServiceFuture = original.ReplicationProtectedItemsUpdateMobilityServiceFuture
type ReplicationProtectionContainerMappingsCreateFuture = original.ReplicationProtectionContainerMappingsCreateFuture
type ReplicationProtectionContainerMappingsDeleteFuture = original.ReplicationProtectionContainerMappingsDeleteFuture
type ReplicationProtectionContainerMappingsPurgeFuture = original.ReplicationProtectionContainerMappingsPurgeFuture
type ReplicationProtectionContainerMappingsUpdateFuture = original.ReplicationProtectionContainerMappingsUpdateFuture
type ReplicationProtectionContainersCreateFuture = original.ReplicationProtectionContainersCreateFuture
type ReplicationProtectionContainersDeleteFuture = original.ReplicationProtectionContainersDeleteFuture
type ReplicationProtectionContainersDiscoverProtectableItemFuture = original.ReplicationProtectionContainersDiscoverProtectableItemFuture
type ReplicationProtectionContainersSwitchProtectionFuture = original.ReplicationProtectionContainersSwitchProtectionFuture
type ReplicationProviderContainerUnmappingInput = original.ReplicationProviderContainerUnmappingInput
type BasicReplicationProviderSpecificContainerCreationInput = original.BasicReplicationProviderSpecificContainerCreationInput
type ReplicationProviderSpecificContainerCreationInput = original.ReplicationProviderSpecificContainerCreationInput
type BasicReplicationProviderSpecificContainerMappingInput = original.BasicReplicationProviderSpecificContainerMappingInput
type ReplicationProviderSpecificContainerMappingInput = original.ReplicationProviderSpecificContainerMappingInput
type BasicReplicationProviderSpecificSettings = original.BasicReplicationProviderSpecificSettings
type ReplicationProviderSpecificSettings = original.ReplicationProviderSpecificSettings
type BasicReplicationProviderSpecificUpdateContainerMappingInput = original.BasicReplicationProviderSpecificUpdateContainerMappingInput
type ReplicationProviderSpecificUpdateContainerMappingInput = original.ReplicationProviderSpecificUpdateContainerMappingInput
type ReplicationRecoveryPlansCreateFuture = original.ReplicationRecoveryPlansCreateFuture
type ReplicationRecoveryPlansDeleteFuture = original.ReplicationRecoveryPlansDeleteFuture
type ReplicationRecoveryPlansFailoverCommitFuture = original.ReplicationRecoveryPlansFailoverCommitFuture
type ReplicationRecoveryPlansPlannedFailoverFuture = original.ReplicationRecoveryPlansPlannedFailoverFuture
type ReplicationRecoveryPlansReprotectFuture = original.ReplicationRecoveryPlansReprotectFuture
type ReplicationRecoveryPlansTestFailoverCleanupFuture = original.ReplicationRecoveryPlansTestFailoverCleanupFuture
type ReplicationRecoveryPlansTestFailoverFuture = original.ReplicationRecoveryPlansTestFailoverFuture
type ReplicationRecoveryPlansUnplannedFailoverFuture = original.ReplicationRecoveryPlansUnplannedFailoverFuture
type ReplicationRecoveryPlansUpdateFuture = original.ReplicationRecoveryPlansUpdateFuture
type ReplicationRecoveryServicesProvidersDeleteFuture = original.ReplicationRecoveryServicesProvidersDeleteFuture
type ReplicationRecoveryServicesProvidersPurgeFuture = original.ReplicationRecoveryServicesProvidersPurgeFuture
type ReplicationRecoveryServicesProvidersRefreshProviderFuture = original.ReplicationRecoveryServicesProvidersRefreshProviderFuture
type ReplicationStorageClassificationMappingsCreateFuture = original.ReplicationStorageClassificationMappingsCreateFuture
type ReplicationStorageClassificationMappingsDeleteFuture = original.ReplicationStorageClassificationMappingsDeleteFuture
type ReplicationVaultHealthRefreshFuture = original.ReplicationVaultHealthRefreshFuture
type ReplicationvCentersCreateFuture = original.ReplicationvCentersCreateFuture
type ReplicationvCentersDeleteFuture = original.ReplicationvCentersDeleteFuture
type ReplicationvCentersUpdateFuture = original.ReplicationvCentersUpdateFuture
type Resource = original.Resource
type ResourceHealthSummary = original.ResourceHealthSummary
type ResumeJobParams = original.ResumeJobParams
type ResumeJobParamsProperties = original.ResumeJobParamsProperties
type RetentionVolume = original.RetentionVolume
type ReverseReplicationInput = original.ReverseReplicationInput
type ReverseReplicationInputProperties = original.ReverseReplicationInputProperties
type BasicReverseReplicationProviderSpecificInput = original.BasicReverseReplicationProviderSpecificInput
type ReverseReplicationProviderSpecificInput = original.ReverseReplicationProviderSpecificInput
type RoleAssignment = original.RoleAssignment
type RunAsAccount = original.RunAsAccount
type SanEnableProtectionInput = original.SanEnableProtectionInput
type ScriptActionTaskDetails = original.ScriptActionTaskDetails
type ServiceError = original.ServiceError
type StorageClassification = original.StorageClassification
type StorageClassificationCollection = original.StorageClassificationCollection
type StorageClassificationCollectionIterator = original.StorageClassificationCollectionIterator
type StorageClassificationCollectionPage = original.StorageClassificationCollectionPage
type StorageClassificationMapping = original.StorageClassificationMapping
type StorageClassificationMappingCollection = original.StorageClassificationMappingCollection
type StorageClassificationMappingCollectionIterator = original.StorageClassificationMappingCollectionIterator
type StorageClassificationMappingCollectionPage = original.StorageClassificationMappingCollectionPage
type StorageClassificationMappingInput = original.StorageClassificationMappingInput
type StorageClassificationMappingProperties = original.StorageClassificationMappingProperties
type StorageClassificationProperties = original.StorageClassificationProperties
type StorageMappingInputProperties = original.StorageMappingInputProperties
type Subnet = original.Subnet
type SwitchProtectionInput = original.SwitchProtectionInput
type SwitchProtectionInputProperties = original.SwitchProtectionInputProperties
type SwitchProtectionJobDetails = original.SwitchProtectionJobDetails
type BasicSwitchProtectionProviderSpecificInput = original.BasicSwitchProtectionProviderSpecificInput
type SwitchProtectionProviderSpecificInput = original.SwitchProtectionProviderSpecificInput
type TargetComputeSize = original.TargetComputeSize
type TargetComputeSizeCollection = original.TargetComputeSizeCollection
type TargetComputeSizeCollectionIterator = original.TargetComputeSizeCollectionIterator
type TargetComputeSizeCollectionPage = original.TargetComputeSizeCollectionPage
type TargetComputeSizeProperties = original.TargetComputeSizeProperties
type BasicTaskTypeDetails = original.BasicTaskTypeDetails
type TaskTypeDetails = original.TaskTypeDetails
type TestFailoverCleanupInput = original.TestFailoverCleanupInput
type TestFailoverCleanupInputProperties = original.TestFailoverCleanupInputProperties
type TestFailoverInput = original.TestFailoverInput
type TestFailoverInputProperties = original.TestFailoverInputProperties
type TestFailoverJobDetails = original.TestFailoverJobDetails
type UnplannedFailoverInput = original.UnplannedFailoverInput
type UnplannedFailoverInputProperties = original.UnplannedFailoverInputProperties
type UpdateMobilityServiceRequest = original.UpdateMobilityServiceRequest
type UpdateMobilityServiceRequestProperties = original.UpdateMobilityServiceRequestProperties
type UpdateNetworkMappingInput = original.UpdateNetworkMappingInput
type UpdateNetworkMappingInputProperties = original.UpdateNetworkMappingInputProperties
type UpdatePolicyInput = original.UpdatePolicyInput
type UpdatePolicyInputProperties = original.UpdatePolicyInputProperties
type UpdateProtectionContainerMappingInput = original.UpdateProtectionContainerMappingInput
type UpdateProtectionContainerMappingInputProperties = original.UpdateProtectionContainerMappingInputProperties
type UpdateRecoveryPlanInput = original.UpdateRecoveryPlanInput
type UpdateRecoveryPlanInputProperties = original.UpdateRecoveryPlanInputProperties
type UpdateReplicationProtectedItemInput = original.UpdateReplicationProtectedItemInput
type UpdateReplicationProtectedItemInputProperties = original.UpdateReplicationProtectedItemInputProperties
type BasicUpdateReplicationProtectedItemProviderInput = original.BasicUpdateReplicationProtectedItemProviderInput
type UpdateReplicationProtectedItemProviderInput = original.UpdateReplicationProtectedItemProviderInput
type UpdateVCenterRequest = original.UpdateVCenterRequest
type UpdateVCenterRequestProperties = original.UpdateVCenterRequestProperties
type VaultHealthDetails = original.VaultHealthDetails
type VaultHealthProperties = original.VaultHealthProperties
type VCenter = original.VCenter
type VCenterCollection = original.VCenterCollection
type VCenterCollectionIterator = original.VCenterCollectionIterator
type VCenterCollectionPage = original.VCenterCollectionPage
type VCenterProperties = original.VCenterProperties
type VersionDetails = original.VersionDetails
type VirtualMachineTaskDetails = original.VirtualMachineTaskDetails
type VmmDetails = original.VmmDetails
type VmmToAzureCreateNetworkMappingInput = original.VmmToAzureCreateNetworkMappingInput
type VmmToAzureNetworkMappingSettings = original.VmmToAzureNetworkMappingSettings
type VmmToAzureUpdateNetworkMappingInput = original.VmmToAzureUpdateNetworkMappingInput
type VmmToVmmCreateNetworkMappingInput = original.VmmToVmmCreateNetworkMappingInput
type VmmToVmmNetworkMappingSettings = original.VmmToVmmNetworkMappingSettings
type VmmToVmmUpdateNetworkMappingInput = original.VmmToVmmUpdateNetworkMappingInput
type VmmVirtualMachineDetails = original.VmmVirtualMachineDetails
type VMNicDetails = original.VMNicDetails
type VMNicInputDetails = original.VMNicInputDetails
type VMNicUpdatesTaskDetails = original.VMNicUpdatesTaskDetails
type VMwareCbtPolicyCreationInput = original.VMwareCbtPolicyCreationInput
type VmwareCbtPolicyDetails = original.VmwareCbtPolicyDetails
type VMwareDetails = original.VMwareDetails
type VMwareV2FabricCreationInput = original.VMwareV2FabricCreationInput
type VMwareV2FabricSpecificDetails = original.VMwareV2FabricSpecificDetails
type VMwareVirtualMachineDetails = original.VMwareVirtualMachineDetails
type OperationsClient = original.OperationsClient
type RecoveryPointsClient = original.RecoveryPointsClient
type ReplicationAlertSettingsClient = original.ReplicationAlertSettingsClient
type ReplicationEventsClient = original.ReplicationEventsClient
type ReplicationFabricsClient = original.ReplicationFabricsClient
type ReplicationJobsClient = original.ReplicationJobsClient
type ReplicationLogicalNetworksClient = original.ReplicationLogicalNetworksClient
type ReplicationNetworkMappingsClient = original.ReplicationNetworkMappingsClient
type ReplicationNetworksClient = original.ReplicationNetworksClient
type ReplicationPoliciesClient = original.ReplicationPoliciesClient
type ReplicationProtectableItemsClient = original.ReplicationProtectableItemsClient
type ReplicationProtectedItemsClient = original.ReplicationProtectedItemsClient
type ReplicationProtectionContainerMappingsClient = original.ReplicationProtectionContainerMappingsClient
type ReplicationProtectionContainersClient = original.ReplicationProtectionContainersClient
type ReplicationRecoveryPlansClient = original.ReplicationRecoveryPlansClient
type ReplicationRecoveryServicesProvidersClient = original.ReplicationRecoveryServicesProvidersClient
type ReplicationStorageClassificationMappingsClient = original.ReplicationStorageClassificationMappingsClient
type ReplicationStorageClassificationsClient = original.ReplicationStorageClassificationsClient
type ReplicationVaultHealthClient = original.ReplicationVaultHealthClient
type ReplicationvCentersClient = original.ReplicationvCentersClient
type TargetComputeSizesClient = original.TargetComputeSizesClient
func New(subscriptionID string, resourceGroupName string, resourceName string) BaseClient {
return original.New(subscriptionID, resourceGroupName, resourceName)
}
func NewWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) BaseClient {
return original.NewWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func PossibleA2ARpRecoveryPointTypeValues() []A2ARpRecoveryPointType {
return original.PossibleA2ARpRecoveryPointTypeValues()
}
func PossibleAgentAutoUpdateStatusValues() []AgentAutoUpdateStatus {
return original.PossibleAgentAutoUpdateStatusValues()
}
func PossibleAgentVersionStatusValues() []AgentVersionStatus {
return original.PossibleAgentVersionStatusValues()
}
func PossibleAlternateLocationRecoveryOptionValues() []AlternateLocationRecoveryOption {
return original.PossibleAlternateLocationRecoveryOptionValues()
}
func PossibleDataSyncStatusValues() []DataSyncStatus {
return original.PossibleDataSyncStatusValues()
}
func PossibleDisableProtectionReasonValues() []DisableProtectionReason {
return original.PossibleDisableProtectionReasonValues()
}
func PossibleFailoverDeploymentModelValues() []FailoverDeploymentModel {
return original.PossibleFailoverDeploymentModelValues()
}
func PossibleHealthErrorCategoryValues() []HealthErrorCategory {
return original.PossibleHealthErrorCategoryValues()
}
func PossibleHyperVReplicaAzureRpRecoveryPointTypeValues() []HyperVReplicaAzureRpRecoveryPointType {
return original.PossibleHyperVReplicaAzureRpRecoveryPointTypeValues()
}
func PossibleIdentityProviderTypeValues() []IdentityProviderType {
return original.PossibleIdentityProviderTypeValues()
}
func PossibleInMageV2RpRecoveryPointTypeValues() []InMageV2RpRecoveryPointType {
return original.PossibleInMageV2RpRecoveryPointTypeValues()
}
func PossibleInstanceTypeValues() []InstanceType {
return original.PossibleInstanceTypeValues()
}
func PossibleInstanceTypeBasicConfigurationSettingsValues() []InstanceTypeBasicConfigurationSettings {
return original.PossibleInstanceTypeBasicConfigurationSettingsValues()
}
func PossibleInstanceTypeBasicDisableProtectionProviderSpecificInputValues() []InstanceTypeBasicDisableProtectionProviderSpecificInput {
return original.PossibleInstanceTypeBasicDisableProtectionProviderSpecificInputValues()
}
func PossibleInstanceTypeBasicEnableProtectionProviderSpecificInputValues() []InstanceTypeBasicEnableProtectionProviderSpecificInput {
return original.PossibleInstanceTypeBasicEnableProtectionProviderSpecificInputValues()
}
func PossibleInstanceTypeBasicEventProviderSpecificDetailsValues() []InstanceTypeBasicEventProviderSpecificDetails {
return original.PossibleInstanceTypeBasicEventProviderSpecificDetailsValues()
}
func PossibleInstanceTypeBasicEventSpecificDetailsValues() []InstanceTypeBasicEventSpecificDetails {
return original.PossibleInstanceTypeBasicEventSpecificDetailsValues()
}
func PossibleInstanceTypeBasicFabricSpecificCreateNetworkMappingInputValues() []InstanceTypeBasicFabricSpecificCreateNetworkMappingInput {
return original.PossibleInstanceTypeBasicFabricSpecificCreateNetworkMappingInputValues()
}
func PossibleInstanceTypeBasicFabricSpecificCreationInputValues() []InstanceTypeBasicFabricSpecificCreationInput {
return original.PossibleInstanceTypeBasicFabricSpecificCreationInputValues()
}
func PossibleInstanceTypeBasicFabricSpecificDetailsValues() []InstanceTypeBasicFabricSpecificDetails {
return original.PossibleInstanceTypeBasicFabricSpecificDetailsValues()
}
func PossibleInstanceTypeBasicFabricSpecificUpdateNetworkMappingInputValues() []InstanceTypeBasicFabricSpecificUpdateNetworkMappingInput {
return original.PossibleInstanceTypeBasicFabricSpecificUpdateNetworkMappingInputValues()
}
func PossibleInstanceTypeBasicGroupTaskDetailsValues() []InstanceTypeBasicGroupTaskDetails {
return original.PossibleInstanceTypeBasicGroupTaskDetailsValues()
}
func PossibleInstanceTypeBasicJobDetailsValues() []InstanceTypeBasicJobDetails {
return original.PossibleInstanceTypeBasicJobDetailsValues()
}
func PossibleInstanceTypeBasicNetworkMappingFabricSpecificSettingsValues() []InstanceTypeBasicNetworkMappingFabricSpecificSettings {
return original.PossibleInstanceTypeBasicNetworkMappingFabricSpecificSettingsValues()
}
func PossibleInstanceTypeBasicPolicyProviderSpecificDetailsValues() []InstanceTypeBasicPolicyProviderSpecificDetails {
return original.PossibleInstanceTypeBasicPolicyProviderSpecificDetailsValues()
}
func PossibleInstanceTypeBasicPolicyProviderSpecificInputValues() []InstanceTypeBasicPolicyProviderSpecificInput {
return original.PossibleInstanceTypeBasicPolicyProviderSpecificInputValues()
}
func PossibleInstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsValues() []InstanceTypeBasicProtectionContainerMappingProviderSpecificDetails {
return original.PossibleInstanceTypeBasicProtectionContainerMappingProviderSpecificDetailsValues()
}
func PossibleInstanceTypeBasicProviderSpecificFailoverInputValues() []InstanceTypeBasicProviderSpecificFailoverInput {
return original.PossibleInstanceTypeBasicProviderSpecificFailoverInputValues()
}
func PossibleInstanceTypeBasicProviderSpecificRecoveryPointDetailsValues() []InstanceTypeBasicProviderSpecificRecoveryPointDetails {
return original.PossibleInstanceTypeBasicProviderSpecificRecoveryPointDetailsValues()
}
func PossibleInstanceTypeBasicRecoveryPlanActionDetailsValues() []InstanceTypeBasicRecoveryPlanActionDetails {
return original.PossibleInstanceTypeBasicRecoveryPlanActionDetailsValues()
}
func PossibleInstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputValues() []InstanceTypeBasicRecoveryPlanProviderSpecificFailoverInput {
return original.PossibleInstanceTypeBasicRecoveryPlanProviderSpecificFailoverInputValues()
}
func PossibleInstanceTypeBasicReplicationProviderSpecificContainerCreationInputValues() []InstanceTypeBasicReplicationProviderSpecificContainerCreationInput {
return original.PossibleInstanceTypeBasicReplicationProviderSpecificContainerCreationInputValues()
}
func PossibleInstanceTypeBasicReplicationProviderSpecificContainerMappingInputValues() []InstanceTypeBasicReplicationProviderSpecificContainerMappingInput {
return original.PossibleInstanceTypeBasicReplicationProviderSpecificContainerMappingInputValues()
}
func PossibleInstanceTypeBasicReplicationProviderSpecificSettingsValues() []InstanceTypeBasicReplicationProviderSpecificSettings {
return original.PossibleInstanceTypeBasicReplicationProviderSpecificSettingsValues()
}
func PossibleInstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputValues() []InstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInput {
return original.PossibleInstanceTypeBasicReplicationProviderSpecificUpdateContainerMappingInputValues()
}
func PossibleInstanceTypeBasicReverseReplicationProviderSpecificInputValues() []InstanceTypeBasicReverseReplicationProviderSpecificInput {
return original.PossibleInstanceTypeBasicReverseReplicationProviderSpecificInputValues()
}
func PossibleInstanceTypeBasicSwitchProtectionProviderSpecificInputValues() []InstanceTypeBasicSwitchProtectionProviderSpecificInput {
return original.PossibleInstanceTypeBasicSwitchProtectionProviderSpecificInputValues()
}
func PossibleInstanceTypeBasicTaskTypeDetailsValues() []InstanceTypeBasicTaskTypeDetails {
return original.PossibleInstanceTypeBasicTaskTypeDetailsValues()
}
func PossibleInstanceTypeBasicUpdateReplicationProtectedItemProviderInputValues() []InstanceTypeBasicUpdateReplicationProtectedItemProviderInput {
return original.PossibleInstanceTypeBasicUpdateReplicationProtectedItemProviderInputValues()
}
func PossibleLicenseTypeValues() []LicenseType {
return original.PossibleLicenseTypeValues()
}
func PossibleMultiVMGroupCreateOptionValues() []MultiVMGroupCreateOption {
return original.PossibleMultiVMGroupCreateOptionValues()
}
func PossibleMultiVMSyncPointOptionValues() []MultiVMSyncPointOption {
return original.PossibleMultiVMSyncPointOptionValues()
}
func PossibleMultiVMSyncStatusValues() []MultiVMSyncStatus {
return original.PossibleMultiVMSyncStatusValues()
}
func PossiblePossibleOperationsDirectionsValues() []PossibleOperationsDirections {
return original.PossiblePossibleOperationsDirectionsValues()
}
func PossiblePresenceStatusValues() []PresenceStatus {
return original.PossiblePresenceStatusValues()
}
func PossibleRecoveryPlanActionLocationValues() []RecoveryPlanActionLocation {
return original.PossibleRecoveryPlanActionLocationValues()
}
func PossibleRecoveryPlanGroupTypeValues() []RecoveryPlanGroupType {
return original.PossibleRecoveryPlanGroupTypeValues()
}
func PossibleRecoveryPointSyncTypeValues() []RecoveryPointSyncType {
return original.PossibleRecoveryPointSyncTypeValues()
}
func PossibleRecoveryPointTypeValues() []RecoveryPointType {
return original.PossibleRecoveryPointTypeValues()
}
func PossibleReplicationProtectedItemOperationValues() []ReplicationProtectedItemOperation {
return original.PossibleReplicationProtectedItemOperationValues()
}
func PossibleRpInMageRecoveryPointTypeValues() []RpInMageRecoveryPointType {
return original.PossibleRpInMageRecoveryPointTypeValues()
}
func PossibleSetMultiVMSyncStatusValues() []SetMultiVMSyncStatus {
return original.PossibleSetMultiVMSyncStatusValues()
}
func PossibleSeverityValues() []Severity {
return original.PossibleSeverityValues()
}
func PossibleSourceSiteOperationsValues() []SourceSiteOperations {
return original.PossibleSourceSiteOperationsValues()
}
func NewOperationsClient(subscriptionID string, resourceGroupName string, resourceName string) OperationsClient {
return original.NewOperationsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewOperationsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) OperationsClient {
return original.NewOperationsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewRecoveryPointsClient(subscriptionID string, resourceGroupName string, resourceName string) RecoveryPointsClient {
return original.NewRecoveryPointsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewRecoveryPointsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) RecoveryPointsClient {
return original.NewRecoveryPointsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationAlertSettingsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationAlertSettingsClient {
return original.NewReplicationAlertSettingsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationAlertSettingsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationAlertSettingsClient {
return original.NewReplicationAlertSettingsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationEventsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationEventsClient {
return original.NewReplicationEventsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationEventsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationEventsClient {
return original.NewReplicationEventsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationFabricsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationFabricsClient {
return original.NewReplicationFabricsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationFabricsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationFabricsClient {
return original.NewReplicationFabricsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationJobsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationJobsClient {
return original.NewReplicationJobsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationJobsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationJobsClient {
return original.NewReplicationJobsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationLogicalNetworksClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationLogicalNetworksClient {
return original.NewReplicationLogicalNetworksClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationLogicalNetworksClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationLogicalNetworksClient {
return original.NewReplicationLogicalNetworksClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationNetworkMappingsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationNetworkMappingsClient {
return original.NewReplicationNetworkMappingsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationNetworkMappingsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationNetworkMappingsClient {
return original.NewReplicationNetworkMappingsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationNetworksClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationNetworksClient {
return original.NewReplicationNetworksClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationNetworksClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationNetworksClient {
return original.NewReplicationNetworksClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationPoliciesClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationPoliciesClient {
return original.NewReplicationPoliciesClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationPoliciesClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationPoliciesClient {
return original.NewReplicationPoliciesClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectableItemsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectableItemsClient {
return original.NewReplicationProtectableItemsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectableItemsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectableItemsClient {
return original.NewReplicationProtectableItemsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectedItemsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectedItemsClient {
return original.NewReplicationProtectedItemsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectedItemsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectedItemsClient {
return original.NewReplicationProtectedItemsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectionContainerMappingsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectionContainerMappingsClient {
return original.NewReplicationProtectionContainerMappingsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectionContainerMappingsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectionContainerMappingsClient {
return original.NewReplicationProtectionContainerMappingsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectionContainersClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectionContainersClient {
return original.NewReplicationProtectionContainersClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationProtectionContainersClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationProtectionContainersClient {
return original.NewReplicationProtectionContainersClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationRecoveryPlansClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationRecoveryPlansClient {
return original.NewReplicationRecoveryPlansClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationRecoveryPlansClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationRecoveryPlansClient {
return original.NewReplicationRecoveryPlansClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationRecoveryServicesProvidersClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationRecoveryServicesProvidersClient {
return original.NewReplicationRecoveryServicesProvidersClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationRecoveryServicesProvidersClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationRecoveryServicesProvidersClient {
return original.NewReplicationRecoveryServicesProvidersClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationStorageClassificationMappingsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationStorageClassificationMappingsClient {
return original.NewReplicationStorageClassificationMappingsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationStorageClassificationMappingsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationStorageClassificationMappingsClient {
return original.NewReplicationStorageClassificationMappingsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationStorageClassificationsClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationStorageClassificationsClient {
return original.NewReplicationStorageClassificationsClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationStorageClassificationsClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationStorageClassificationsClient {
return original.NewReplicationStorageClassificationsClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationVaultHealthClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationVaultHealthClient {
return original.NewReplicationVaultHealthClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationVaultHealthClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationVaultHealthClient {
return original.NewReplicationVaultHealthClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationvCentersClient(subscriptionID string, resourceGroupName string, resourceName string) ReplicationvCentersClient {
return original.NewReplicationvCentersClient(subscriptionID, resourceGroupName, resourceName)
}
func NewReplicationvCentersClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) ReplicationvCentersClient {
return original.NewReplicationvCentersClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func NewTargetComputeSizesClient(subscriptionID string, resourceGroupName string, resourceName string) TargetComputeSizesClient {
return original.NewTargetComputeSizesClient(subscriptionID, resourceGroupName, resourceName)
}
func NewTargetComputeSizesClientWithBaseURI(baseURI string, subscriptionID string, resourceGroupName string, resourceName string) TargetComputeSizesClient {
return original.NewTargetComputeSizesClientWithBaseURI(baseURI, subscriptionID, resourceGroupName, resourceName)
}
func UserAgent() string {
return original.UserAgent() + " profiles/preview"
}
func Version() string {
return original.Version()
}<|fim▁end|>
|
)
type AgentVersionStatus = original.AgentVersionStatus
|
<|file_name|>removebucketencryption.go<|end_file_name|><|fim▁begin|>//go:build example
// +build example
/*
* MinIO Go Library for Amazon S3 Compatible Cloud Storage
* Copyright 2020 MinIO, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main
import (
"context"
"log"
"github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials"
)
func main() {
// Note: YOUR-ACCESSKEYID, YOUR-SECRETACCESSKEY and my-bucketname are
// dummy values, please replace them with original values.
// Requests are always secure (HTTPS) by default. Set secure=false to enable insecure (HTTP) access.
// This boolean value is the last argument for New().
<|fim▁hole|> // New returns an Amazon S3 compatible client object. API compatibility (v2 or v4) is automatically
// determined based on the Endpoint value.
s3Client, err := minio.New("s3.amazonaws.com", &minio.Options{
Creds: credentials.NewStaticV4("YOUR-ACCESSKEYID", "YOUR-SECRETACCESSKEY", ""),
Secure: true,
})
if err != nil {
log.Fatalln(err)
}
// s3Client.TraceOn(os.Stderr)
// Get default encryption configuration set on a S3 bucket
err = s3Client.RemoveBucketEncryption(context.Background(), "my-bucketname")
if err != nil {
log.Fatalln(err)
}
}<|fim▁end|>
| |
<|file_name|>scarf.py<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
|
# scarf1{background-image: url('https://rollforfantasy.com/images/clothing/nmale/scarf1.png');}
scarf = ["scarf{}.png".format(i) for i in range(1, 31)]
|
<|file_name|>baseline.py<|end_file_name|><|fim▁begin|># Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""FixMatch with Distribution Alignment and Adaptative Confidence Ratio.
"""
import os
import sys
from typing import Callable
import jax
import jax.numpy as jn
import objax
from absl import app
from absl import flags
from absl.flags import FLAGS
from objax.typing import JaxArray
from semi_supervised_domain_adaptation.lib.data import MixData, CTAData
from semi_supervised_domain_adaptation.lib.train import TrainableSSDAModule
from shared.data.fsl import DATASETS as FSL_DATASETS
from shared.data.ssl import DATASETS as SSL_DATASETS, DataSetSSL
from shared.train import ScheduleCos
from shared.util import setup_tf, MyParallel
from shared.zoo.models import network, ARCHS
class Baseline(TrainableSSDAModule):
def __init__(self, nclass: int, model: Callable, **kwargs):
super().__init__(nclass, kwargs)
self.model: objax.Module = model(colors=3, nclass=nclass, **kwargs)
self.model_ema = objax.optimizer.ExponentialMovingAverageModule(self.model, momentum=0.999)
if FLAGS.arch.endswith('pretrain'):
# Initialize weights of EMA with pretrained model's weights.
self.model_ema.ema.momentum = 0
self.model_ema.update_ema()
self.model_ema.ema.momentum = 0.999
self.stats = objax.Module()
self.stats.keygen = objax.random.DEFAULT_GENERATOR
self.stats.p_labeled = objax.nn.ExponentialMovingAverage((nclass,), init_value=1 / nclass)
self.stats.p_unlabeled = objax.nn.MovingAverage((nclass,), buffer_size=128, init_value=1 / nclass)
train_vars = self.model.vars() + self.stats.vars()
self.opt = objax.optimizer.Momentum(train_vars)
self.lr = ScheduleCos(self.params.lr, self.params.lr_decay)
@objax.Function.with_vars(self.model_ema.vars())
def eval_op(x: JaxArray, domain: int) -> JaxArray:
return objax.functional.softmax(self.model_ema(x, training=False, domain=domain))
def loss_function(sx, sy, tu):
c, h, w = sx.shape[-3:]
xu = jn.concatenate((sx, tu)).reshape((-1, c, h, w))
logit = self.model(xu, training=True)
logit_sx = jn.split(logit, (2 * sx.shape[0],))[0]
logit_sx_weak, logit_sx_strong = logit_sx[::2], logit_sx[1::2]<|fim▁hole|>
xe = 0.5 * (objax.functional.loss.cross_entropy_logits(logit_sx_weak, sy).mean() +
objax.functional.loss.cross_entropy_logits(logit_sx_strong, sy).mean())
wd = 0.5 * sum((v.value ** 2).sum() for k, v in train_vars.items() if k.endswith('.w'))
loss = xe + self.params.wd * wd
return loss, {'losses/xe': xe, 'losses/wd': wd}
gv = objax.GradValues(loss_function, train_vars)
@objax.Function.with_vars(self.vars())
def train_op(step, sx, sy, tx, ty, tu, probe=None):
y_probe = eval_op(probe, 1) if probe is not None else None
p = step / (FLAGS.train_mimg << 20)
lr = self.lr(p)
g, v = gv(jn.concatenate((sx, tx)), jn.concatenate((sy, ty)), tu)
self.opt(lr, objax.functional.parallel.pmean(g))
self.model_ema.update_ema()
return objax.functional.parallel.pmean({'monitors/lr': lr, **v[1]}), y_probe
self.train_op = MyParallel(train_op, reduce=lambda x: x)
self.eval_op = MyParallel(eval_op, static_argnums=(1,))
def main(argv):
del argv
print('JAX host: %d / %d' % (jax.host_id(), jax.host_count()))
print('JAX devices:\n%s' % '\n'.join(str(d) for d in jax.devices()), flush=True)
setup_tf()
source = FSL_DATASETS()[f'{FLAGS.dataset}_{FLAGS.source}-0']()
target_name, target_samples_per_class, target_seed = DataSetSSL.parse_name(f'{FLAGS.dataset}_{FLAGS.target}')
target_labeled = SSL_DATASETS()[target_name](target_samples_per_class, target_seed)
target_unlabeled = FSL_DATASETS()[f'{target_name}-0']()
testsets = [target_unlabeled.test, source.test] # Ordered by domain (unlabeled always first)
module = Baseline(source.nclass, network(FLAGS.arch),
lr=FLAGS.lr,
lr_decay=FLAGS.lr_decay,
wd=FLAGS.wd,
arch=FLAGS.arch,
batch=FLAGS.batch,
uratio=FLAGS.uratio)
logdir = f'SSDA/{FLAGS.dataset}/{FLAGS.source}/{FLAGS.target}/{FLAGS.augment}/{module.__class__.__name__}/'
logdir += '_'.join(sorted('%s%s' % k for k in module.params.items()))
logdir = os.path.join(FLAGS.logdir, logdir)
test = {}
for domain, testset in enumerate(testsets):
test.update((k, v.parse().batch(FLAGS.batch).nchw().map(lambda d: {**d, 'domain': domain}).prefetch(16))
for k, v in testset.items())
if FLAGS.augment.startswith('('):
train = MixData(source.train, target_labeled.train, target_unlabeled.train, source.nclass, FLAGS.batch,
FLAGS.uratio)
elif FLAGS.augment.startswith('CTA('):
train = CTAData(source.train, target_labeled.train, target_unlabeled.train, source.nclass, FLAGS.batch,
FLAGS.uratio)
else:
raise ValueError(f'Augment flag value {FLAGS.augment} not supported.')
module.train(FLAGS.train_mimg << 10, FLAGS.report_kimg, train, test, logdir, FLAGS.keep_ckpts)
train.stop()
if __name__ == '__main__':
flags.DEFINE_enum('arch', 'wrn28-2', ARCHS, 'Model architecture.')
flags.DEFINE_float('lr', 0.03, 'Learning rate.')
flags.DEFINE_float('lr_decay', 0.25, 'Learning rate decay.')
flags.DEFINE_float('wd', 0.001, 'Weight decay.')
flags.DEFINE_integer('batch', 64, 'Batch size')
flags.DEFINE_integer('uratio', 3, 'Unlabeled batch size ratio')
flags.DEFINE_integer('report_kimg', 64, 'Reporting period in kibi-images.')
flags.DEFINE_integer('train_mimg', 8, 'Training duration in mega-images.')
flags.DEFINE_integer('keep_ckpts', 5, 'Number of checkpoints to keep (0 for all).')
flags.DEFINE_string('logdir', 'experiments', 'Directory where to save checkpoints and tensorboard data.')
flags.DEFINE_string('dataset', 'domainnet32', 'Source data to train on.')
flags.DEFINE_string('source', 'clipart', 'Source data to train on.')
flags.DEFINE_string('target', 'infograph(10,seed=1)', 'Target data to train on.')
FLAGS.set_default('augment', 'CTA(sm,sm,probe=1)')
FLAGS.set_default('para_augment', 8)
app.run(main)<|fim▁end|>
| |
<|file_name|>LimitSet.py<|end_file_name|><|fim▁begin|># Copyright (C) 2010-2011 Richard Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from CIM14.IEC61970.Core.IdentifiedObject import IdentifiedObject
class LimitSet(IdentifiedObject):
"""Specifies a set of Limits that are associated with a Measurement. A Measurement may have several LimitSets corresponding to seasonal or other changing conditions. The condition is captured in the name and description attributes. The same LimitSet may be used for several Measurements. In particular percentage limits are used this way.
"""<|fim▁hole|>
def __init__(self, isPercentageLimits=False, *args, **kw_args):
"""Initialises a new 'LimitSet' instance.
@param isPercentageLimits: Tells if the limit values are in percentage of normalValue or the specified Unit for Measurements and Controls.
"""
#: Tells if the limit values are in percentage of normalValue or the specified Unit for Measurements and Controls.
self.isPercentageLimits = isPercentageLimits
super(LimitSet, self).__init__(*args, **kw_args)
_attrs = ["isPercentageLimits"]
_attr_types = {"isPercentageLimits": bool}
_defaults = {"isPercentageLimits": False}
_enums = {}
_refs = []
_many_refs = []<|fim▁end|>
| |
<|file_name|>available-demos.js<|end_file_name|><|fim▁begin|>export default [
{
name: 'hello-wold-functional',
displayName: 'Hello World (functional)',
directory: 'hello-world',
files: ['hello-world-functional.js']
},
{
name: 'hello-wold-class',
displayName: 'Hello World (class component)',
directory: 'hello-world',
files: ['hello-world-class.js']
},
{
name: 'clock',<|fim▁hole|> directory: 'clock',
files: ['clock.js']
},
{
name: 'simple-counter',
displayName: 'Simple Counter',
directory: 'counter',
files: ['simple-counter.js']
},
{
name: 'complex-counter',
displayName: 'Complex Counter',
directory: 'counter',
files: ['complex-counter.js']
},
{
name: 'injection',
displayName: 'Injection',
directory: 'injection',
files: ['injection.js']
},
{
name: 'i18n',
displayName: 'Internationalization',
directory: 'i18n',
files: ['i18n.js']
}
];<|fim▁end|>
|
displayName: 'Clock',
|
<|file_name|>JavaCSVTransform.java<|end_file_name|><|fim▁begin|>package com.igonics.transformers.simple;
import java.io.PrintStream;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.Statement;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException;
import org.relique.jdbc.csv.CsvDriver;
import com.igonics.transformers.simple.helpers.CSVDirWalker;
import com.igonics.transformers.simple.helpers.CSVLogger;
/**
* @author gggordon <https://github.com/gggordon>
* @version 1.0.0
* @description Transforms sub-directories of similar CSV files into one database file
* @created 1.11.2015
*
* */
public class JavaCSVTransform {
/**
* @param baseDir Base Directory to Check for CSV Files
* @param dbFile Database File Name or Path
* @param subDirectoryDepth Recursive Depth to check for CSV Files. -1 will recurse indefinitely
* @param keepTemporaryFile Keep Temporary Buffer File or Delete
* */
public void createCSVDatabase(String baseDir, String dbFile,int subDirectoryDepth,boolean keepTemporaryFile){
final String BASE_DIR =baseDir==null? System.getProperty("user.dir") + "\\dataFiles" : baseDir;
final String DB_FILE = dbFile==null?"DB-" + System.currentTimeMillis() + ".csv":dbFile;
long startTime = System.currentTimeMillis();
CSVLogger.info("Base Dir : " + BASE_DIR);
try {
CSVDirWalker dirWalker = new CSVDirWalker(BASE_DIR, subDirectoryDepth);
//Process Directories
dirWalker.start();
CSVLogger.debug("Column Names : " + dirWalker.getHeader());
CSVLogger.info("Temporary Buffer File Complete. Starting Database Queries");
// Writing to database
// Load the driver.
Class.forName("org.relique.jdbc.csv.CsvDriver");
// Create a connection. The first command line parameter is the directory containing the .csv files.
Connection conn = DriverManager.getConnection("jdbc:relique:csv:"
+ System.getProperty("user.dir"));
<|fim▁hole|> // Create a Statement object to execute the query with.
Statement stmt = conn.createStatement();
ResultSet results = stmt.executeQuery("SELECT * FROM "
+ dirWalker.getTempBufferPath().replaceAll(".csv", ""));
CSVLogger.info("Retrieved Records From Temporary File");
// Dump out the results to a CSV file with the same format
// using CsvJdbc helper function
CSVLogger.info("Writing Records to database file");
long databaseSaveStartTime = System.currentTimeMillis();
//Create redirect stream to database file
PrintStream printStream = new PrintStream(DB_FILE);
//print column headings
printStream.print(dirWalker.getHeader()+System.lineSeparator());
CsvDriver.writeToCsv(results, printStream, false);
CSVLogger.info("Time taken to save records to database (ms): "+(System.currentTimeMillis() - databaseSaveStartTime));
//delete temporary file
if(!keepTemporaryFile){
CSVLogger.info("Removing Temporary File");
dirWalker.removeTemporaryFile();
}
//Output Program Execution Completed
CSVLogger.info("Total execution time (ms) : "
+ (System.currentTimeMillis() - startTime)
+ " | Approx Size (bytes) : "
+ dirWalker.getTotalBytesRead());
} catch (Exception ioe) {
CSVLogger.error(ioe.getMessage(), ioe);
}
}
// TODO: Modularize Concepts
public static void main(String args[]) {
//Parse Command Line Options
Options opts = new Options();
HelpFormatter formatter = new HelpFormatter();
opts.addOption("d", "dir", false, "Base Directory of CSV files. Default : Current Directory");
opts.addOption("db", "database", false, "Database File Name. Default DB-{timestamp}.csv");
opts.addOption("depth", "depth", false, "Recursive Depth. Set -1 to recurse indefintely. Default : -1");
opts.addOption("keepTemp",false,"Keeps Temporary file. Default : false");
opts.addOption("h", "help", false, "Display Help");
try {
CommandLine cmd = new DefaultParser().parse(opts,args);
if(cmd.hasOption("h") || cmd.hasOption("help")){
formatter.printHelp( "javacsvtransform", opts );
return;
}
//Create CSV Database With Command Line Options or Defaults
new JavaCSVTransform().createCSVDatabase(cmd.getOptionValue("d"), cmd.getOptionValue("db"),Integer.parseInt(cmd.getOptionValue("depth", "-1")), cmd.hasOption("keepTemp"));
} catch (ParseException e) {
formatter.printHelp( "javacsvtransform", opts );
}
}
}<|fim▁end|>
| |
<|file_name|>TestUtils.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2015 IBM Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*******************************************************************************/
package com.ibm.ws.lars.rest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.HashMap;
import java.util.Map;
<|fim▁hole|> *
*/
public class TestUtils {
/**
* Reads the specified InputStream and returns a byte array containing all the bytes read.
*/
public static byte[] slurp(InputStream is) throws IOException {
byte[] buffer = new byte[1024];
int length;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while ((length = is.read(buffer)) != -1) {
baos.write(buffer, 0, length);
}
return baos.toByteArray();
}
/**
* Assert that an AssetList contains exactly the given list of assets
* <p>
* This method assumes that all assets have an ID and there are no duplicates in the asset list.
*/
public static void assertAssetList(AssetList list, Asset... assets) {
Map<String, Asset> assetIdMap = new HashMap<>();
for (Asset asset : assets) {
if (assetIdMap.put(asset.get_id(), asset) != null) {
throw new AssertionError("Duplicate found in list of expected assets:\n" + asset.toJson());
}
}
for (Asset asset : list) {
if (assetIdMap.remove(asset.get_id()) == null) {
throw new AssertionError("Unexpected asset found in the asset list:\n" + asset.toJson());
}
}
if (!assetIdMap.isEmpty()) {
StringBuilder message = new StringBuilder("Assets missing from asset list:\n");
for (Asset asset : assetIdMap.values()) {
message.append(asset.toJson());
message.append("\n");
}
throw new AssertionError(message.toString());
}
}
}<|fim▁end|>
|
import com.ibm.ws.lars.rest.model.Asset;
import com.ibm.ws.lars.rest.model.AssetList;
/**
|
<|file_name|>RGA.py<|end_file_name|><|fim▁begin|>import time
import numpy
from rga_telnet import *
# Connecting to RGA - RGA(HOST,PORT)
class RGA:
scan = True # This is used for stop of peak scan - if set to False
status = [0 for col in range(4)] # Status of the device, look in rga_status method
showReadout = True # This one is responsible for the text output from RGA
# Class constructor
def __init__(self, host, port):
print("Starting connection with RGA: ")
self.rga_id03 = RgaTelnet(host, port)
out = self.rga_readout(1, True)
if out.find("MKSRGA Single") > -1:
self.status[0] = 1
print("List of available sensors: ")
self.rga_id03.write("Sensors\n")
out = self.rga_readout(1, True)
out = out.replace("\r", "") # Removing \r, \n form the output
out = out.replace("\n", "")
out = out.split(' ')
out[:] = (i for i in out if i != '') # Removing empty fields
print("Status of sensors with RGA: ")
self.rga_id03.write("SensorState\n")
self.rga_readout(1, True)
print("Selecting sensor: ")
sensor = "Select " + str(out[7]) + "\n"
self.rga_id03.write(sensor)
self.rga_readout(1, True)
<|fim▁hole|> print("Taking control over the sensor: ")
self.rga_id03.write("Control \"RGA python server\" \"1.0\" \n")
out = self.rga_readout(1, True)
if out.find("Control OK") > -1:
self.status[1] = 1
# Read output
def rga_readout(self, timeout, show):
out = "Nothing"
print_output = self.showReadout and show
if print_output:
out = self.rga_id03.read("\r\r", timeout)
print(out)
elif print_output:
out = self.rga_id03.read("\r\r", timeout)
return out
# Release
def rga_release(self):
print("Release of the sensor: ")
self.rga_id03.write("Release\n")
self.rga_readout(1, True)
self.status[1] = 0
self.status[0] = 0
# Filament control
def rga_filament(self, state):
if state == "On":
self.rga_id03.write("FilamentControl On\n")
time.sleep(5)
for i in range(3):
self.rga_readout(1, True) # Little bit robust but works
self.status[2] = 1
elif state == "Off":
self.rga_id03.write("FilamentControl Off\n")
time.sleep(5)
for i in range(3):
self.rga_readout(1, True)
self.status[2] = 0
else:
print("Wrong filament input")
# Single peaks scan
def rga_peakscan(self, mass_selected):
global mass_read
mass_read = numpy.array([0, 0, 0])
# Here we convert string to numbers- selecting masses to scan from input
mass_selected = [int(i) for i in mass_selected]
print("Masses selected for scan :", mass_selected, "\n")
# Defining peak jump scan
print("Add peak jump measurement: ")
self.rga_id03.write("AddPeakJump Peak1 PeakCenter 2 0 0 0\n")
self.rga_readout(1, True)
# Adding masses to scan
for i in range(len(mass_selected)):
self.rga_id03.write("MeasurementAddMass " + str(mass_selected[i]) + "\n") # Here we again convert number to string - just for training
self.rga_readout(1, True)
time.sleep(1)
# Adding scan to scan list
self.rga_id03.write("ScanAdd Peak1\n")
self.rga_readout(1, True)
# Starting scan
self.rga_id03.write("ScanStart 1\n")
self.status[3] = 1
while self.scan:
# Processing output string
# out = self.rga_id03.read_until("\r\r", 1)
out = self.rga_readout(1, True)
out = out.split(' ')
out[:] = (i for i in out if i != '')
# If the list length is 3, it corresponds to one of measured masses
if len(out) == 3 and out[0] == "MassReading":
new_row = [time.time(), float(out[1]), float(out[2])] # The row is : time, mass number, mass pressure
mass_read = numpy.vstack([mass_read, new_row]) # Adding measured value to array
if float(out[1]) == mass_selected[-1]: # When last mass value of scan is read , restart scan
self.rga_id03.write("ScanResume 1\n")
# Stop scan
self.rga_id03.write("ScanStop\n")
print(self.rga_id03.read("never", 1)) # Collect all garbage output
print("Mass read stop...")
self.status[3] = 0
self.scan = True
# Stop scan
def rga_peakscan_stop(self):
if self.scan:
self.scan = False
else:
print("Rga is not scanning, nothing to stop")
# Read one mass
def rga_onemass(self, one_mass):
find_mass = numpy.nonzero(mass_read == one_mass)
mass_found = mass_read[find_mass[0], :]
out = [int(mass_found[-1, 0]), int(mass_found[-1, 1]), mass_found[-1, 2]]
return out
def rga_status(self):
status_str = []
status_str.append([["not connected"], ["connected"], ["RGA connection : "]])
status_str.append([["not controlled"], ["controlled"], ["RGA control : "]])
status_str.append([["off"], ["on"], ["Filament status :"]])
status_str.append([["idle"], ["running"], ["Scan status: "]])
for i in range(4):
print("".join(map(str, (status_str[i][2]))) + "".join(map(str, (status_str[i][self.status[i]]))))
if __name__ == "__main__":
rga_eh1 = RGA("rga-id03-eh1", 10014)
rga_eh1.rga_release()<|fim▁end|>
| |
<|file_name|>icontrol_driver.py<|end_file_name|><|fim▁begin|># coding=utf-8#
# Copyright (c) 2014-2018, F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import datetime
import hashlib
import json
import logging as std_logging
import os
import urllib
from eventlet import greenthread
from time import strftime
from time import time
from requests import HTTPError
from oslo_config import cfg
from oslo_log import helpers as log_helpers
from oslo_log import log as logging
from oslo_utils import importutils
from f5.bigip import ManagementRoot
from f5_openstack_agent.lbaasv2.drivers.bigip.cluster_manager import \
ClusterManager
from f5_openstack_agent.lbaasv2.drivers.bigip import constants_v2 as f5const
from f5_openstack_agent.lbaasv2.drivers.bigip.esd_filehandler import \
EsdTagProcessor
from f5_openstack_agent.lbaasv2.drivers.bigip import exceptions as f5ex
from f5_openstack_agent.lbaasv2.drivers.bigip.lbaas_builder import \
LBaaSBuilder
from f5_openstack_agent.lbaasv2.drivers.bigip.lbaas_driver import \
LBaaSBaseDriver
from f5_openstack_agent.lbaasv2.drivers.bigip import network_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.network_service import \
NetworkServiceBuilder
from f5_openstack_agent.lbaasv2.drivers.bigip import resource_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.service_adapter import \
ServiceModelAdapter
from f5_openstack_agent.lbaasv2.drivers.bigip import ssl_profile
from f5_openstack_agent.lbaasv2.drivers.bigip import stat_helper
from f5_openstack_agent.lbaasv2.drivers.bigip.system_helper import \
SystemHelper
from f5_openstack_agent.lbaasv2.drivers.bigip.tenants import \
BigipTenantManager
from f5_openstack_agent.lbaasv2.drivers.bigip.utils import serialized
from f5_openstack_agent.lbaasv2.drivers.bigip.virtual_address import \
VirtualAddress
LOG = logging.getLogger(__name__)
NS_PREFIX = 'qlbaas-'
__VERSION__ = '0.1.1'
# configuration objects specific to iControl driver
# XXX see /etc/neutron/services/f5/f5-openstack-agent.ini
OPTS = [ # XXX maybe we should make this a dictionary
cfg.StrOpt(
'bigiq_hostname',
help='The hostname (name or IP address) to use for the BIG-IQ host'
),
cfg.StrOpt(
'bigiq_admin_username',
default='admin',
help='The admin username to use for BIG-IQ authentication',
),
cfg.StrOpt(
'bigiq_admin_password',
default='[Provide password in config file]',
secret=True,
help='The admin password to use for BIG-IQ authentication'
),
cfg.StrOpt(
'openstack_keystone_uri',
default='http://192.0.2.248:5000/',
help='The admin password to use for BIG-IQ authentication'
),
cfg.StrOpt(
'openstack_admin_username',
default='admin',
help='The admin username to use for authentication '
'with the Keystone service'
),
cfg.StrOpt(
'openstack_admin_password',
default='[Provide password in config file]',
secret=True,
help='The admin password to use for authentication'
' with the Keystone service'
),
cfg.StrOpt(
'bigip_management_username',
default='admin',
help='The admin username that the BIG-IQ will use to manage '
'discovered BIG-IPs'
),
cfg.StrOpt(
'bigip_management_password',
default='[Provide password in config file]',
secret=True,
help='The admin password that the BIG-IQ will use to manage '
'discovered BIG-IPs'
),
cfg.StrOpt(
'f5_device_type', default='external',
help='What type of device onboarding'
),
cfg.StrOpt(
'f5_ha_type', default='pair',
help='Are we standalone, pair(active/standby), or scalen'
),
cfg.ListOpt(
'f5_external_physical_mappings', default=['default:1.1:True'],
help='Mapping between Neutron physical_network to interfaces'
),
cfg.StrOpt(
'f5_vtep_folder', default='Common',
help='Folder for the VTEP SelfIP'
),
cfg.StrOpt(
'f5_vtep_selfip_name', default=None,
help='Name of the VTEP SelfIP'
),
cfg.ListOpt(
'advertised_tunnel_types', default=['vxlan'],
help='tunnel types which are advertised to other VTEPs'
),
cfg.BoolOpt(
'f5_populate_static_arp', default=False,
help='create static arp entries based on service entries'
),
cfg.StrOpt(
'vlan_binding_driver',
default=None,
help='driver class for binding vlans to device ports'
),
cfg.StrOpt(
'interface_port_static_mappings',
default=None,
help='JSON encoded static mapping of'
'devices to list of '
'interface and port_id'
),
cfg.StrOpt(
'l3_binding_driver',
default=None,
help='driver class for binding l3 address to l2 ports'
),
cfg.StrOpt(
'l3_binding_static_mappings', default=None,
help='JSON encoded static mapping of'
'subnet_id to list of '
'port_id, device_id list.'
),
cfg.BoolOpt(
'f5_route_domain_strictness', default=False,
help='Strict route domain isolation'
),
cfg.BoolOpt(
'f5_common_networks', default=False,
help='All networks defined under Common partition'
),
cfg.BoolOpt(
'f5_common_external_networks', default=True,
help='Treat external networks as common'
),
cfg.BoolOpt(
'external_gateway_mode', default=False,
help='All subnets have an external l3 route on gateway'
),
cfg.StrOpt(
'icontrol_vcmp_hostname',
help='The hostname (name or IP address) to use for vCMP Host '
'iControl access'
),
cfg.StrOpt(
'icontrol_hostname',
default="10.190.5.7",
help='The hostname (name or IP address) to use for iControl access'
),
cfg.StrOpt(
'icontrol_username', default='admin',
help='The username to use for iControl access'
),
cfg.StrOpt(
'icontrol_password', default='admin', secret=True,
help='The password to use for iControl access'
),
cfg.IntOpt(
'icontrol_connection_timeout', default=30,
help='How many seconds to timeout a connection to BIG-IP'
),
cfg.IntOpt(
'icontrol_connection_retry_interval', default=10,
help='How many seconds to wait between retry connection attempts'
),
cfg.DictOpt(
'common_network_ids', default={},
help='network uuid to existing Common networks mapping'
),
cfg.StrOpt(
'icontrol_config_mode', default='objects',
help='Whether to use iapp or objects for bigip configuration'
),
cfg.IntOpt(
'max_namespaces_per_tenant', default=1,
help='How many routing tables the BIG-IP will allocate per tenant'
' in order to accommodate overlapping IP subnets'
),
cfg.StrOpt(
'cert_manager',
default=None,
help='Class name of the certificate mangager used for retrieving '
'certificates and keys.'
),
cfg.StrOpt(
'auth_version',
default=None,
help='Keystone authentication version (v2 or v3) for Barbican client.'
),
cfg.StrOpt(
'os_project_id',
default='service',
help='OpenStack project ID.'
),
cfg.StrOpt(
'os_auth_url',
default=None,
help='OpenStack authentication URL.'
),
cfg.StrOpt(
'os_username',
default=None,
help='OpenStack user name for Keystone authentication.'
),
cfg.StrOpt(
'os_user_domain_name',
default=None,
help='OpenStack user domain name for Keystone authentication.'
),
cfg.StrOpt(
'os_project_name',
default=None,
help='OpenStack project name for Keystone authentication.'
),
cfg.StrOpt(
'os_project_domain_name',
default=None,
help='OpenStack domain name for Keystone authentication.'
),
cfg.StrOpt(
'os_password',
default=None,
help='OpenStack user password for Keystone authentication.'
),
cfg.StrOpt(
'f5_network_segment_physical_network', default=None,
help='Name of physical network to use for discovery of segment ID'
),
cfg.StrOpt(
'unlegacy_setting_placeholder', default=None,
help='use this setting to separate legacy with hw/etc on agent side'
),
cfg.IntOpt(
'f5_network_segment_polling_interval', default=10,
help='Seconds between periodic scans for disconnected virtual servers'
),
cfg.IntOpt(
'f5_network_segment_gross_timeout', default=300,
help='Seconds to wait for a virtual server to become connected'
),
cfg.StrOpt(
'f5_parent_ssl_profile',
default='clientssl',
help='Parent profile used when creating client SSL profiles '
'for listeners with TERMINATED_HTTPS protocols.'
),
cfg.StrOpt(
'os_tenant_name',
default=None,
help='OpenStack tenant name for Keystone authentication (v2 only).'
),
cfg.BoolOpt(
'trace_service_requests',
default=False,
help='Log service object.'
),
cfg.BoolOpt(
'report_esd_names_in_agent',
default=False,
help='whether or not to add valid esd names during report.'
)
]
def is_operational(method):
# Decorator to check we are operational before provisioning.
def wrapper(*args, **kwargs):
instance = args[0]
if instance.operational:
try:
return method(*args, **kwargs)
except IOError as ioe:
LOG.error('IO Error detected: %s' % method.__name__)
LOG.error(str(ioe))
raise ioe
else:
LOG.error('Cannot execute %s. Not operational. Re-initializing.'
% method.__name__)
instance._init_bigips()
return wrapper
class iControlDriver(LBaaSBaseDriver):
"""Control service deployment."""
# pzhang(NOTE) here: we only sync, CRUD objs in below status
positive_plugin_const_state = \
tuple([f5const.F5_PENDING_CREATE,
f5const.F5_PENDING_UPDATE])
def __init__(self, conf, registerOpts=True):
# The registerOpts parameter allows a test to
# turn off config option handling so that it can
# set the options manually instead.
super(iControlDriver, self).__init__(conf)
self.conf = conf
if registerOpts:
self.conf.register_opts(OPTS)
self.initialized = False
self.hostnames = None
self.device_type = conf.f5_device_type
self.plugin_rpc = None # overrides base, same value
self.agent_report_state = None # overrides base, same value
self.operational = False # overrides base, same value
self.driver_name = 'f5-lbaasv2-icontrol'
#
# BIG-IP containers
#
# BIG-IPs which currectly active
self.__bigips = {}
self.__last_connect_attempt = None
# HA and traffic group validation
self.ha_validated = False
self.tg_initialized = False
# traffic groups discovered from BIG-IPs for service placement
self.__traffic_groups = []
# base configurations to report to Neutron agent state reports
self.agent_configurations = {} # overrides base, same value
self.agent_configurations['device_drivers'] = [self.driver_name]
self.agent_configurations['icontrol_endpoints'] = {}
# to store the verified esd names
self.esd_names = []
# service component managers
self.tenant_manager = None
self.cluster_manager = None
self.system_helper = None
self.lbaas_builder = None
self.service_adapter = None
self.vlan_binding = None
self.l3_binding = None
self.cert_manager = None # overrides register_OPTS
# server helpers
self.stat_helper = stat_helper.StatHelper()
self.network_helper = network_helper.NetworkHelper()
# f5-sdk helpers
self.vs_manager = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual)
self.pool_manager = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool)
try:
# debug logging of service requests recieved by driver
if self.conf.trace_service_requests:
path = '/var/log/neutron/service/'
if not os.path.exists(path):
os.makedirs(path)
self.file_name = path + strftime("%H%M%S-%m%d%Y") + '.json'
with open(self.file_name, 'w') as fp:
fp.write('[{}] ')
# driver mode settings - GRM vs L2 adjacent
if self.conf.f5_global_routed_mode:
LOG.info('WARNING - f5_global_routed_mode enabled.'
' There will be no L2 or L3 orchestration'
' or tenant isolation provisioned. All vips'
' and pool members must be routable through'
' pre-provisioned SelfIPs.')
self.conf.use_namespaces = False
self.conf.f5_snat_mode = True
self.conf.f5_snat_addresses_per_subnet = 0
self.agent_configurations['tunnel_types'] = []
self.agent_configurations['bridge_mappings'] = {}
else:
self.agent_configurations['tunnel_types'] = \
self.conf.advertised_tunnel_types
for net_id in self.conf.common_network_ids:
LOG.debug('network %s will be mapped to /Common/%s'
% (net_id, self.conf.common_network_ids[net_id]))
self.agent_configurations['common_networks'] = \
self.conf.common_network_ids
LOG.debug('Setting static ARP population to %s'
% self.conf.f5_populate_static_arp)
self.agent_configurations['f5_common_external_networks'] = \
self.conf.f5_common_external_networks
f5const.FDB_POPULATE_STATIC_ARP = \
self.conf.f5_populate_static_arp
# parse the icontrol_hostname setting
self._init_bigip_hostnames()
# instantiate the managers
self._init_bigip_managers()
self.initialized = True
LOG.debug('iControlDriver loaded successfully')
except Exception as exc:
LOG.error("exception in intializing driver %s" % str(exc))
self._set_agent_status(False)
def connect(self):
# initialize communications wiht BIG-IP via iControl
try:
self._init_bigips()
except Exception as exc:
LOG.error("exception in intializing communications to BIG-IPs %s"
% str(exc))
self._set_agent_status(False)
def get_valid_esd_names(self):
LOG.debug("verified esd names in get_valid_esd_names():")
LOG.debug(self.esd_names)
return self.esd_names
def _init_bigip_managers(self):
if self.conf.vlan_binding_driver:
try:
self.vlan_binding = importutils.import_object(
self.conf.vlan_binding_driver, self.conf, self)
except ImportError:
LOG.error('Failed to import VLAN binding driver: %s'
% self.conf.vlan_binding_driver)
if self.conf.l3_binding_driver:
try:
self.l3_binding = importutils.import_object(
self.conf.l3_binding_driver, self.conf, self)
except ImportError:
LOG.error('Failed to import L3 binding driver: %s'
% self.conf.l3_binding_driver)
else:
LOG.debug('No L3 binding driver configured.'
' No L3 binding will be done.')
if self.conf.cert_manager:
try:
self.cert_manager = importutils.import_object(
self.conf.cert_manager, self.conf)
except ImportError as import_err:
LOG.error('Failed to import CertManager: %s.' %
import_err.message)
raise
except Exception as err:
LOG.error('Failed to initialize CertManager. %s' % err.message)
# re-raise as ImportError to cause agent exit
raise ImportError(err.message)
self.service_adapter = ServiceModelAdapter(self.conf)
self.tenant_manager = BigipTenantManager(self.conf, self)
self.cluster_manager = ClusterManager()
self.system_helper = SystemHelper()
self.lbaas_builder = LBaaSBuilder(self.conf, self)
if self.conf.f5_global_routed_mode:
self.network_builder = None
else:
self.network_builder = NetworkServiceBuilder(
self.conf.f5_global_routed_mode,
self.conf,
self,
self.l3_binding)
def _init_bigip_hostnames(self):
# Validate and parse bigip credentials
if not self.conf.icontrol_hostname:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_hostname',
opt_value='valid hostname or IP address'
)
if not self.conf.icontrol_username:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_username',
opt_value='valid username'
)
if not self.conf.icontrol_password:
raise f5ex.F5InvalidConfigurationOption(
opt_name='icontrol_password',
opt_value='valid password'
)
self.hostnames = self.conf.icontrol_hostname.split(',')
self.hostnames = [item.strip() for item in self.hostnames]
self.hostnames = sorted(self.hostnames)
# initialize per host agent_configurations
for hostname in self.hostnames:
self.__bigips[hostname] = bigip = type('', (), {})()
bigip.hostname = hostname
bigip.status = 'creating'
bigip.status_message = 'creating BIG-IP from iControl hostnames'
bigip.device_interfaces = dict()
self.agent_configurations[
'icontrol_endpoints'][hostname] = {}
self.agent_configurations[
'icontrol_endpoints'][hostname]['failover_state'] = \
'undiscovered'
self.agent_configurations[
'icontrol_endpoints'][hostname]['status'] = 'unknown'
self.agent_configurations[
'icontrol_endpoints'][hostname]['status_message'] = ''
def _init_bigips(self):
# Connect to all BIG-IPs
if self.operational:
LOG.debug('iControl driver reports connection is operational')
return
LOG.debug('initializing communications to BIG-IPs')
try:
# setup logging options
if not self.conf.debug:
requests_log = std_logging.getLogger(
"requests.packages.urllib3")
requests_log.setLevel(std_logging.ERROR)
requests_log.propagate = False
else:
requests_log = std_logging.getLogger(
"requests.packages.urllib3")
requests_log.setLevel(std_logging.DEBUG)
requests_log.propagate = True
self.__last_connect_attempt = datetime.datetime.now()
for hostname in self.hostnames:
# connect to each BIG-IP and set it status
bigip = self._open_bigip(hostname)
if bigip.status == 'connected':
# set the status down until we assure initialized
bigip.status = 'initializing'
bigip.status_message = 'initializing HA viability'
LOG.debug('initializing HA viability %s' % hostname)
device_group_name = None
if not self.ha_validated:
device_group_name = self._validate_ha(bigip)
LOG.debug('HA validated from %s with DSG %s' %
(hostname, device_group_name))
self.ha_validated = True
if not self.tg_initialized:
self._init_traffic_groups(bigip)
LOG.debug('learned traffic groups from %s as %s' %
(hostname, self.__traffic_groups))
self.tg_initialized = True
LOG.debug('initializing bigip %s' % hostname)
self._init_bigip(bigip, hostname, device_group_name)
LOG.debug('initializing agent configurations %s'
% hostname)
self._init_agent_config(bigip)
# Assure basic BIG-IP HA is operational
LOG.debug('validating HA state for %s' % hostname)
bigip.status = 'validating_HA'
bigip.status_message = 'validating the current HA state'
if self._validate_ha_operational(bigip):
LOG.debug('setting status to active for %s' % hostname)
bigip.status = 'active'
bigip.status_message = 'BIG-IP ready for provisioning'
self._post_init()
else:
LOG.debug('setting status to error for %s' % hostname)
bigip.status = 'error'
bigip.status_message = 'BIG-IP is not operational'
self._set_agent_status(False)
else:
LOG.error('error opening BIG-IP %s - %s:%s'
% (hostname, bigip.status, bigip.status_message))
self._set_agent_status(False)
except Exception as exc:
LOG.error('Invalid agent configuration: %s' % exc.message)
raise
self._set_agent_status(force_resync=True)
def _init_errored_bigips(self):
try:
errored_bigips = self.get_errored_bigips_hostnames()
if errored_bigips:
LOG.debug('attempting to recover %s BIG-IPs' %
len(errored_bigips))
for hostname in errored_bigips:
# try to connect and set status
bigip = self._open_bigip(hostname)
if bigip.status == 'connected':
# set the status down until we assure initialized
bigip.status = 'initializing'
bigip.status_message = 'initializing HA viability'
LOG.debug('initializing HA viability %s' % hostname)
LOG.debug('proceeding to initialize %s' % hostname)
device_group_name = None
if not self.ha_validated:
device_group_name = self._validate_ha(bigip)
LOG.debug('HA validated from %s with DSG %s' %
(hostname, device_group_name))
self.ha_validated = True
if not self.tg_initialized:
self._init_traffic_groups(bigip)
LOG.debug('known traffic groups initialized',
' from %s as %s' %
(hostname, self.__traffic_groups))
self.tg_initialized = True
LOG.debug('initializing bigip %s' % hostname)
self._init_bigip(bigip, hostname, device_group_name)
LOG.debug('initializing agent configurations %s'
% hostname)
self._init_agent_config(bigip)
# Assure basic BIG-IP HA is operational
LOG.debug('validating HA state for %s' % hostname)
bigip.status = 'validating_HA'
bigip.status_message = \
'validating the current HA state'
if self._validate_ha_operational(bigip):
LOG.debug('setting status to active for %s'
% hostname)
bigip.status = 'active'
bigip.status_message = \
'BIG-IP ready for provisioning'
self._post_init()
self._set_agent_status(True)
else:
LOG.debug('setting status to error for %s'
% hostname)
bigip.status = 'error'
bigip.status_message = 'BIG-IP is not operational'
self._set_agent_status(False)
else:
LOG.debug('there are no BIG-IPs with error status')
except Exception as exc:
LOG.error('Invalid agent configuration: %s' % exc.message)
raise
def _open_bigip(self, hostname):
# Open bigip connection
try:
bigip = self.__bigips[hostname]
if bigip.status not in ['creating', 'error']:
LOG.debug('BIG-IP %s status invalid %s to open a connection'
% (hostname, bigip.status))
return bigip
bigip.status = 'connecting'
bigip.status_message = 'requesting iControl endpoint'
LOG.info('opening iControl connection to %s @ %s' %
(self.conf.icontrol_username, hostname))
bigip = ManagementRoot(hostname,
self.conf.icontrol_username,
self.conf.icontrol_password,
timeout=f5const.DEVICE_CONNECTION_TIMEOUT,
debug=self.conf.debug)
bigip.status = 'connected'
bigip.status_message = 'connected to BIG-IP'
self.__bigips[hostname] = bigip
return bigip
except Exception as exc:
LOG.error('could not communicate with ' +
'iControl device: %s' % hostname)
# since no bigip object was created, create a dummy object
# so we can store the status and status_message attributes
errbigip = type('', (), {})()
errbigip.hostname = hostname
errbigip.status = 'error'
errbigip.status_message = str(exc)[:80]
self.__bigips[hostname] = errbigip
return errbigip
def _init_bigip(self, bigip, hostname, check_group_name=None):
# Prepare a bigip for usage
try:
major_version, minor_version = self._validate_bigip_version(
bigip, hostname)
device_group_name = None
extramb = self.system_helper.get_provision_extramb(bigip)
if int(extramb) < f5const.MIN_EXTRA_MB:
raise f5ex.ProvisioningExtraMBValidateFailed(
'Device %s BIG-IP not provisioned for '
'management LARGE.' % hostname)
if self.conf.f5_ha_type == 'pair' and \
self.cluster_manager.get_sync_status(bigip) == \
'Standalone':
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and bigip %s in standalone mode'
% hostname)
if self.conf.f5_ha_type == 'scalen' and \
self.cluster_manager.get_sync_status(bigip) == \
'Standalone':
raise f5ex.BigIPClusterInvalidHA(
'HA mode is scalen and bigip %s in standalone mode'
% hostname)
if self.conf.f5_ha_type != 'standalone':
device_group_name = \
self.cluster_manager.get_device_group(bigip)
if not device_group_name:
raise f5ex.BigIPClusterInvalidHA(
'HA mode is %s and no sync failover '
'device group found for device %s.'
% (self.conf.f5_ha_type, hostname))
if check_group_name and device_group_name != check_group_name:
raise f5ex.BigIPClusterInvalidHA(
'Invalid HA. Device %s is in device group'
' %s but should be in %s.'
% (hostname, device_group_name, check_group_name))
bigip.device_group_name = device_group_name
if self.network_builder:
for network in self.conf.common_network_ids.values():
if not self.network_builder.vlan_exists(bigip,
network,
folder='Common'):
raise f5ex.MissingNetwork(
'Common network %s on %s does not exist'
% (network, bigip.hostname))
bigip.device_name = self.cluster_manager.get_device_name(bigip)
bigip.mac_addresses = self.system_helper.get_mac_addresses(bigip)
LOG.debug("Initialized BIG-IP %s with MAC addresses %s" %
(bigip.device_name, ', '.join(bigip.mac_addresses)))
bigip.device_interfaces = \
self.system_helper.get_interface_macaddresses_dict(bigip)
bigip.assured_networks = {}
bigip.assured_tenant_snat_subnets = {}
bigip.assured_gateway_subnets = []
if self.conf.f5_ha_type != 'standalone':
self.cluster_manager.disable_auto_sync(
device_group_name, bigip)
# validate VTEP SelfIPs
if not self.conf.f5_global_routed_mode:
self.network_builder.initialize_tunneling(bigip)
# Turn off tunnel syncing between BIG-IP
# as our VTEPs properly use only local SelfIPs
if self.system_helper.get_tunnel_sync(bigip) == 'enable':
self.system_helper.set_tunnel_sync(bigip, enabled=False)
LOG.debug('connected to iControl %s @ %s ver %s.%s'
% (self.conf.icontrol_username, hostname,
major_version, minor_version))
except Exception as exc:
bigip.status = 'error'
bigip.status_message = str(exc)[:80]
raise
return bigip
def _post_init(self):
# After we have a connection to the BIG-IPs, initialize vCMP
# on all connected BIG-IPs
if self.network_builder:
self.network_builder.initialize_vcmp()
self.agent_configurations['network_segment_physical_network'] = \
self.conf.f5_network_segment_physical_network
LOG.info('iControlDriver initialized to %d bigips with username:%s'
% (len(self.get_active_bigips()),
self.conf.icontrol_username))
LOG.info('iControlDriver dynamic agent configurations:%s'
% self.agent_configurations)
if self.vlan_binding:
LOG.debug(
'getting BIG-IP device interface for VLAN Binding')
self.vlan_binding.register_bigip_interfaces()
if self.l3_binding:
LOG.debug('getting BIG-IP MAC Address for L3 Binding')
self.l3_binding.register_bigip_mac_addresses()
# endpoints = self.agent_configurations['icontrol_endpoints']
# for ic_host in endpoints.keys():
for hostbigip in self.get_all_bigips():
# hostbigip = self.__bigips[ic_host]
mac_addrs = [mac_addr for interface, mac_addr in
hostbigip.device_interfaces.items()
if interface != "mgmt"]
ports = self.plugin_rpc.get_ports_for_mac_addresses(
mac_addresses=mac_addrs)
if ports:
self.agent_configurations['nova_managed'] = True
else:
self.agent_configurations['nova_managed'] = False
if self.network_builder:
self.network_builder.post_init()
# read enhanced services definitions
esd_dir = os.path.join(self.get_config_dir(), 'esd')
esd = EsdTagProcessor(esd_dir)
try:
esd.process_esd(self.get_all_bigips())
self.lbaas_builder.init_esd(esd)
self.service_adapter.init_esd(esd)
LOG.debug('esd details here after process_esd(): ')
LOG.debug(esd)
self.esd_names = esd.esd_dict.keys() or []
LOG.debug('##### self.esd_names obtainded here:')
LOG.debug(self.esd_names)
except f5ex.esdJSONFileInvalidException as err:
LOG.error("unable to initialize ESD. Error: %s.", err.message)
self._set_agent_status(False)
def _validate_ha(self, bigip):
# if there was only one address supplied and
# this is not a standalone device, get the
# devices trusted by this device.
device_group_name = None
if self.conf.f5_ha_type == 'standalone':
if len(self.hostnames) != 1:
bigip.status = 'error'
bigip.status_message = \
'HA mode is standalone and %d hosts found.'\
% len(self.hostnames)
raise f5ex.BigIPClusterInvalidHA(
'HA mode is standalone and %d hosts found.'
% len(self.hostnames))
device_group_name = 'standalone'
elif self.conf.f5_ha_type == 'pair':
device_group_name = self.cluster_manager.\
get_device_group(bigip)
if len(self.hostnames) != 2:
mgmt_addrs = []
devices = self.cluster_manager.devices(bigip)
for device in devices:
mgmt_addrs.append(
self.cluster_manager.get_mgmt_addr_by_device(
bigip, device))
self.hostnames = mgmt_addrs
if len(self.hostnames) != 2:
bigip.status = 'error'
bigip.status_message = 'HA mode is pair and %d hosts found.' \
% len(self.hostnames)
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and %d hosts found.'
% len(self.hostnames))
elif self.conf.f5_ha_type == 'scalen':
device_group_name = self.cluster_manager.\
get_device_group(bigip)
if len(self.hostnames) < 2:
mgmt_addrs = []
devices = self.cluster_manager.devices(bigip)
for device in devices:
mgmt_addrs.append(
self.cluster_manager.get_mgmt_addr_by_device(
bigip, device)
)
self.hostnames = mgmt_addrs
if len(self.hostnames) < 2:
bigip.status = 'error'
bigip.status_message = 'HA mode is scale and 1 hosts found.'
raise f5ex.BigIPClusterInvalidHA(
'HA mode is pair and 1 hosts found.')
return device_group_name
def _validate_ha_operational(self, bigip):
if self.conf.f5_ha_type == 'standalone':
return True
else:
# how many active BIG-IPs are there?
active_bigips = self.get_active_bigips()
if active_bigips:
sync_status = self.cluster_manager.get_sync_status(bigip)
if sync_status in ['Disconnected', 'Sync Failure']:
if len(active_bigips) > 1:
# the device should not be in the disconnected state
return False
if len(active_bigips) > 1:
# it should be in the same sync-failover group
# as the rest of the active bigips
device_group_name = \
self.cluster_manager.get_device_group(bigip)
for active_bigip in active_bigips:
adgn = self.cluster_manager.get_device_group(
active_bigip)
if not adgn == device_group_name:
return False
return True
else:
return True
def _init_agent_config(self, bigip):
# Init agent config
ic_host = {}
ic_host['version'] = self.system_helper.get_version(bigip)
ic_host['device_name'] = bigip.device_name
ic_host['platform'] = self.system_helper.get_platform(bigip)
ic_host['serial_number'] = self.system_helper.get_serial_number(bigip)
ic_host['status'] = bigip.status
ic_host['status_message'] = bigip.status_message
ic_host['failover_state'] = self.get_failover_state(bigip)
if hasattr(bigip, 'local_ip') and bigip.local_ip:
ic_host['local_ip'] = bigip.local_ip
else:
ic_host['local_ip'] = 'VTEP disabled'
self.agent_configurations['tunnel_types'] = list()
self.agent_configurations['icontrol_endpoints'][bigip.hostname] = \
ic_host
if self.network_builder:
self.agent_configurations['bridge_mappings'] = \
self.network_builder.interface_mapping
def _set_agent_status(self, force_resync=False):
for hostname in self.__bigips:
bigip = self.__bigips[hostname]
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'status'] = bigip.status
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'status_message'] = bigip.status_message
if self.conf.report_esd_names_in_agent:
LOG.debug('adding names to report:')
self.agent_configurations['esd_name'] = \
self.get_valid_esd_names()
# Policy - if any BIG-IP are active we're operational
if self.get_active_bigips():
self.operational = True
else:
self.operational = False
if self.agent_report_state:
self.agent_report_state(force_resync=force_resync)
def get_failover_state(self, bigip):
try:
if hasattr(bigip, 'tm'):
fs = bigip.tm.sys.dbs.db.load(name='failover.state')
bigip.failover_state = fs.value
return bigip.failover_state
else:
return 'error'
except Exception as exc:
LOG.exception('Error getting %s failover state' % bigip.hostname)
bigip.status = 'error'
bigip.status_message = str(exc)[:80]
self._set_agent_status(False)
return 'error'
def get_agent_configurations(self):
for hostname in self.__bigips:
bigip = self.__bigips[hostname]
if bigip.status == 'active':
failover_state = self.get_failover_state(bigip)
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'failover_state'] = failover_state
else:
self.agent_configurations[
'icontrol_endpoints'][bigip.hostname][
'failover_state'] = 'unknown'
self.agent_configurations['icontrol_endpoints'][
bigip.hostname]['status'] = bigip.status
self.agent_configurations['icontrol_endpoints'][
bigip.hostname]['status_message'] = bigip.status_message
self.agent_configurations['operational'] = \
self.operational
LOG.debug('agent configurations are: %s' % self.agent_configurations)
return dict(self.agent_configurations)
def recover_errored_devices(self):
# trigger a retry on errored BIG-IPs
try:
self._init_errored_bigips()
except Exception as exc:
LOG.error('Could not recover devices: %s' % exc.message)
def backend_integrity(self):
if self.operational:
return True
return False
def generate_capacity_score(self, capacity_policy=None):
"""Generate the capacity score of connected devices."""
if capacity_policy:
highest_metric = 0.0
highest_metric_name = None
my_methods = dir(self)
bigips = self.get_all_bigips()
for metric in capacity_policy:
func_name = 'get_' + metric
if func_name in my_methods:
max_capacity = int(capacity_policy[metric])
metric_func = getattr(self, func_name)
metric_value = 0
for bigip in bigips:
if bigip.status == 'active':
global_stats = \
self.stat_helper.get_global_statistics(bigip)
value = int(
metric_func(bigip=bigip,
global_statistics=global_stats)
)
LOG.debug('calling capacity %s on %s returned: %s'
% (func_name, bigip.hostname, value))
else:
value = 0
if value > metric_value:
metric_value = value
metric_capacity = float(metric_value) / float(max_capacity)
if metric_capacity > highest_metric:
highest_metric = metric_capacity
highest_metric_name = metric
else:
LOG.warn('capacity policy has method '
'%s which is not implemented in this driver'
% metric)
LOG.debug('capacity score: %s based on %s'
% (highest_metric, highest_metric_name))
return highest_metric
return 0
def set_context(self, context):
# Context to keep for database access
if self.network_builder:
self.network_builder.set_context(context)
def set_plugin_rpc(self, plugin_rpc):
# Provide Plugin RPC access
self.plugin_rpc = plugin_rpc
def set_tunnel_rpc(self, tunnel_rpc):
# Provide FDB Connector with ML2 RPC access
if self.network_builder:
self.network_builder.set_tunnel_rpc(tunnel_rpc)
def set_l2pop_rpc(self, l2pop_rpc):
# Provide FDB Connector with ML2 RPC access
if self.network_builder:
self.network_builder.set_l2pop_rpc(l2pop_rpc)
def set_agent_report_state(self, report_state_callback):
"""Set Agent Report State."""
self.agent_report_state = report_state_callback
def service_exists(self, service):
return self._service_exists(service)
def flush_cache(self):
# Remove cached objects so they can be created if necessary
for bigip in self.get_all_bigips():
bigip.assured_networks = {}
bigip.assured_tenant_snat_subnets = {}
bigip.assured_gateway_subnets = []
@serialized('get_all_deployed_loadbalancers')
@is_operational
def get_all_deployed_loadbalancers(self, purge_orphaned_folders=False):
LOG.debug('getting all deployed loadbalancers on BIG-IPs')
deployed_lb_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address)
deployed_lbs = resource.get_resources(bigip, folder)
if deployed_lbs:
for lb in deployed_lbs:
lb_id = lb.name[len(self.service_adapter.prefix):]
if lb_id in deployed_lb_dict:
deployed_lb_dict[lb_id][
'hostnames'].append(bigip.hostname)
else:
deployed_lb_dict[lb_id] = {
'id': lb_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
else:
# delay to assure we are not in the tenant creation
# process before a virtual address is created.
greenthread.sleep(10)
deployed_lbs = resource.get_resources(bigip, folder)
if deployed_lbs:
for lb in deployed_lbs:
lb_id = lb.name[
len(self.service_adapter.prefix):]
deployed_lb_dict[lb_id] = \
{'id': lb_id, 'tenant_id': tenant_id}
else:
# Orphaned folder!
if purge_orphaned_folders:
try:
self.system_helper.purge_folder_contents(
bigip, folder)
self.system_helper.purge_folder(
bigip, folder)
LOG.error('orphaned folder %s on %s' %
(folder, bigip.hostname))
except Exception as exc:
LOG.error('error purging folder %s: %s' %
(folder, str(exc)))
return deployed_lb_dict
@serialized('get_all_deployed_listeners')
@is_operational
def get_all_deployed_listeners(self, expand_subcollections=False):
LOG.debug('getting all deployed listeners on BIG-IPs')
deployed_virtual_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual)
deployed_listeners = resource.get_resources(
bigip, folder, expand_subcollections)
if deployed_listeners:
for virtual in deployed_listeners:
virtual_id = \
virtual.name[len(self.service_adapter.prefix):]
l7_policy = ''
if hasattr(virtual, 'policiesReference') and \
'items' in virtual.policiesReference:
l7_policy = \
virtual.policiesReference['items'][0]
l7_policy = l7_policy['fullPath']
if virtual_id in deployed_virtual_dict:
deployed_virtual_dict[virtual_id][
'hostnames'].append(bigip.hostname)
else:
deployed_virtual_dict[virtual_id] = {
'id': virtual_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname],
'l7_policy': l7_policy
}
return deployed_virtual_dict
@serialized('purge_orphaned_nodes')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_nodes(self, tenant_members):
node_helper = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.node)
node_dict = dict()
for bigip in self.get_all_bigips():
for tenant_id, members in tenant_members.iteritems():
partition = self.service_adapter.prefix + tenant_id
nodes = node_helper.get_resources(bigip, partition=partition)
for n in nodes:
node_dict[n.name] = n
for member in members:
rd = self.network_builder.find_subnet_route_domain(
tenant_id, member.get('subnet_id', None))
node_name = "{}%{}".format(member['address'], rd)
node_dict.pop(node_name, None)
for node_name, node in node_dict.iteritems():
try:
node_helper.delete(bigip, name=urllib.quote(node_name),
partition=partition)
except HTTPError as error:
if error.response.status_code == 400:
LOG.error(error.response)
@serialized('get_all_deployed_pools')
@is_operational
def get_all_deployed_pools(self):
LOG.debug('getting all deployed pools on BIG-IPs')
deployed_pool_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool)
deployed_pools = resource.get_resources(bigip, folder)
if deployed_pools:
for pool in deployed_pools:
pool_id = \
pool.name[len(self.service_adapter.prefix):]
monitor_id = ''
if hasattr(pool, 'monitor'):
monitor = pool.monitor.split('/')[2].strip()
monitor_id = \
monitor[len(self.service_adapter.prefix):]
LOG.debug(
'pool {} has monitor {}'.format(
pool.name, monitor))
else:
LOG.debug(
'pool {} has no healthmonitors'.format(
pool.name))
if pool_id in deployed_pool_dict:
deployed_pool_dict[pool_id][
'hostnames'].append(bigip.hostname)
else:
deployed_pool_dict[pool_id] = {
'id': pool_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname],
'monitors': monitor_id
}
return deployed_pool_dict
@serialized('purge_orphaned_pool')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_pool(self, tenant_id=None, pool_id=None,
hostnames=list()):
node_helper = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.node)
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
pool_name = self.service_adapter.prefix + pool_id
partition = self.service_adapter.prefix + tenant_id
pool = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool).load(
bigip, pool_name, partition)
members = pool.members_s.get_collection()
pool.delete()
for member in members:
node_name = member.address
try:
node_helper.delete(bigip,
name=urllib.quote(node_name),
partition=partition)
except HTTPError as e:
if e.response.status_code == 404:
pass
if e.response.status_code == 400:
LOG.warn("Failed to delete node -- in use")
else:
LOG.exception("Failed to delete node")
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('pool %s not on BIG-IP %s.'
% (pool_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging pool %s' % str(exc))
@serialized('get_all_deployed_monitors')
@is_operational
def get_all_deployed_health_monitors(self):
"""Retrieve a list of all Health Monitors deployed"""
LOG.debug('getting all deployed monitors on BIG-IP\'s')
monitor_types = ['http_monitor', 'https_monitor', 'tcp_monitor',
'ping_monitor']
deployed_monitor_dict = {}
adapter_prefix = self.service_adapter.prefix
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(adapter_prefix):]
if str(folder).startswith(adapter_prefix):
resources = map(
lambda x: resource_helper.BigIPResourceHelper(
getattr(resource_helper.ResourceType, x)),
monitor_types)
for resource in resources:
deployed_monitors = resource.get_resources(
bigip, folder)
if deployed_monitors:
for monitor in deployed_monitors:
monitor_id = monitor.name[len(adapter_prefix):]
if monitor_id in deployed_monitor_dict:
deployed_monitor_dict[monitor_id][
'hostnames'].append(bigip.hostname)
else:
deployed_monitor_dict[monitor_id] = {
'id': monitor_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
return deployed_monitor_dict
@serialized('purge_orphaned_health_monitor')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_health_monitor(self, tenant_id=None, monitor_id=None,
hostnames=list()):
"""Purge all monitors that exist on the BIG-IP but not in Neutron"""
resource_types = [
resource_helper.BigIPResourceHelper(x) for x in [
resource_helper.ResourceType.http_monitor,
resource_helper.ResourceType.https_monitor,
resource_helper.ResourceType.ping_monitor,
resource_helper.ResourceType.tcp_monitor]]
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
monitor_name = self.service_adapter.prefix + monitor_id
partition = self.service_adapter.prefix + tenant_id
monitor = None
for monitor_type in resource_types:
try:
monitor = monitor_type.load(bigip, monitor_name,
partition)
break
except HTTPError as err:
if err.response.status_code == 404:
continue
monitor.delete()
except TypeError as err:
if 'NoneType' in err:
LOG.exception("Could not find monitor {}".format(
monitor_name))
except Exception as exc:
LOG.exception('Exception purging monitor %s' % str(exc))
@serialized('get_all_deployed_l7_policys')
@is_operational
def get_all_deployed_l7_policys(self):
"""Retrieve a dict of all l7policies deployed
The dict returned will have the following format:
{policy_bigip_id_0: {'id': policy_id_0,
'tenant_id': tenant_id,
'hostnames': [hostnames_0]}
...
}
Where hostnames is the list of BIG-IP hostnames impacted, and the
policy_id is the policy_bigip_id without 'wrapper_policy_'
"""
LOG.debug('getting all deployed l7_policys on BIG-IP\'s')
deployed_l7_policys_dict = {}
for bigip in self.get_all_bigips():
folders = self.system_helper.get_folders(bigip)
for folder in folders:
tenant_id = folder[len(self.service_adapter.prefix):]
if str(folder).startswith(self.service_adapter.prefix):
resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.l7policy)
deployed_l7_policys = resource.get_resources(
bigip, folder)
if deployed_l7_policys:
for l7_policy in deployed_l7_policys:
l7_policy_id = l7_policy.name
if l7_policy_id in deployed_l7_policys_dict:
my_dict = \
deployed_l7_policys_dict[l7_policy_id]
my_dict['hostnames'].append(bigip.hostname)
else:
po_id = l7_policy_id.replace(
'wrapper_policy_', '')
deployed_l7_policys_dict[l7_policy_id] = {
'id': po_id,
'tenant_id': tenant_id,
'hostnames': [bigip.hostname]
}
return deployed_l7_policys_dict
@serialized('purge_orphaned_l7_policy')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_l7_policy(self, tenant_id=None, l7_policy_id=None,
hostnames=list(), listener_id=None):
"""Purge all l7_policys that exist on the BIG-IP but not in Neutron"""
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
error = None
try:
l7_policy_name = l7_policy_id
partition = self.service_adapter.prefix + tenant_id
if listener_id and partition:
if self.service_adapter.prefix not in listener_id:
listener_id = \
self.service_adapter.prefix + listener_id
li_resource = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).load(
bigip, listener_id, partition)
li_resource.update(policies=[])
l7_policy = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.l7policy).load(
bigip, l7_policy_name, partition)
l7_policy.delete()
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('l7_policy %s not on BIG-IP %s.'
% (l7_policy_id, bigip.hostname))
else:
error = err
except Exception as exc:
error = err
if error:
kwargs = dict(
tenant_id=tenant_id, l7_policy_id=l7_policy_id,
hostname=bigip.hostname, listener_id=listener_id)
LOG.exception('Exception: purge_orphaned_l7_policy({}) '
'"{}"'.format(kwargs, exc))
@serialized('purge_orphaned_loadbalancer')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_loadbalancer(self, tenant_id=None,
loadbalancer_id=None, hostnames=list()):
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
va_name = self.service_adapter.prefix + loadbalancer_id
partition = self.service_adapter.prefix + tenant_id
va = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address).load(
bigip, va_name, partition)
# get virtual services (listeners)
# referencing this virtual address
vses = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).get_resources(
bigip, partition)
vs_dest_compare = '/' + partition + '/' + va.name
for vs in vses:
if str(vs.destination).startswith(vs_dest_compare):
if hasattr(vs, 'pool'):
pool = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.pool).load(
bigip, os.path.basename(vs.pool),
partition)
vs.delete()
pool.delete()
else:
vs.delete()
resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual_address).delete(
bigip, va_name, partition)
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('loadbalancer %s not on BIG-IP %s.'
% (loadbalancer_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging loadbalancer %s'
% str(exc))
@serialized('purge_orphaned_listener')
@is_operational
@log_helpers.log_method_call
def purge_orphaned_listener(
self, tenant_id=None, listener_id=None, hostnames=[]):
for bigip in self.get_all_bigips():
if bigip.hostname in hostnames:
try:
listener_name = self.service_adapter.prefix + listener_id
partition = self.service_adapter.prefix + tenant_id
listener = resource_helper.BigIPResourceHelper(
resource_helper.ResourceType.virtual).load(
bigip, listener_name, partition)
listener.delete()
except HTTPError as err:
if err.response.status_code == 404:
LOG.debug('listener %s not on BIG-IP %s.'
% (listener_id, bigip.hostname))
except Exception as exc:
LOG.exception('Exception purging listener %s' % str(exc))
@serialized('create_loadbalancer')
@is_operational
def create_loadbalancer(self, loadbalancer, service):
"""Create virtual server."""
self._common_service_handler(service)
return self._update_target(service)
@serialized('update_loadbalancer')
@is_operational
def update_loadbalancer(self, old_loadbalancer, loadbalancer, service):
"""Update virtual server."""
# anti-pattern three args unused.
self._common_service_handler(service)
return self._update_target(service)
@serialized('delete_loadbalancer')
@is_operational
def delete_loadbalancer(self, loadbalancer, service):
"""Delete loadbalancer."""
LOG.debug("Deleting loadbalancer")
self._common_service_handler(
service,
delete_partition=True,
delete_event=True)
return self._update_target(service)
@serialized('create_listener')
@is_operational
@log_helpers.log_method_call
def create_listener(self, listener, service):
"""Create virtual server."""
LOG.debug("Creating listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('update_listener')
@is_operational
def update_listener(self, old_listener, listener, service):
"""Update virtual server."""
LOG.debug("Updating listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('delete_listener')
@is_operational
def delete_listener(self, listener, service):
"""Delete virtual server."""
LOG.debug("Deleting listener")
self._common_service_handler(service)
return self._update_target(service,
self._update_listener_status,
service)
@serialized('create_pool')
@is_operational
def create_pool(self, pool, service):
"""Create lb pool."""
LOG.debug("Creating pool")
# pzhang(NOTE): pool may not bound with a listener
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('update_pool')
@is_operational
def update_pool(self, old_pool, pool, service):
"""Update lb pool."""
LOG.debug("Updating pool")
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('delete_pool')
@is_operational
def delete_pool(self, pool, service):
"""Delete lb pool."""
LOG.debug("Deleting pool")
if service.get("listeners"):
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_pool_status,
service["pools"])
@serialized('create_l7policy')
@is_operational
def create_l7policy(self, l7policy, service):
"""Create lb l7policy."""
LOG.debug("Creating l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
@serialized('update_l7policy')
@is_operational
def update_l7policy(self, old_l7policy, l7policy, service):
"""Update lb l7policy."""
LOG.debug("Updating l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
@serialized('delete_l7policy')
@is_operational
def delete_l7policy(self, l7policy, service):
"""Delete lb l7policy."""
LOG.debug("Deleting l7policy")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7policy_status,
service["l7policies"])
# TODO(pzhang): test this
@serialized('create_l7rule')
@is_operational
def create_l7rule(self, l7rule, service):
"""Create lb l7rule."""
LOG.debug("Creating l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
# TODO(pzhang): test this
@serialized('update_l7rule')
@is_operational
def update_l7rule(self, old_l7rule, l7rule, service):
"""Update lb l7rule."""
LOG.debug("Updating l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
# TODO(pzhang): test this
@serialized('delete_l7rule')
@is_operational
def delete_l7rule(self, l7rule, service):
"""Delete lb l7rule."""
LOG.debug("Deleting l7rule")
target_listener = service["listeners"][0]
target_listener["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_l7rule_status,
service['l7policy_rules'])
@serialized('create_member')
@is_operational
def create_member(self, member, service):
"""Create pool member."""
LOG.debug("Creating member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('update_member')
@is_operational
def update_member(self, old_member, member, service):
"""Update pool member."""
LOG.debug("Updating member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('delete_member')
@is_operational
def delete_member(self, member, service):
"""Delete pool member."""
LOG.debug("Deleting member")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_member_status,
service["members"])
@serialized('create_health_monitor')
@is_operational
def create_health_monitor(self, health_monitor, service):
"""Create pool health monitor."""
LOG.debug("Creating health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
@serialized('update_health_monitor')
@is_operational
def update_health_monitor(self, old_health_monitor,
health_monitor, service):
"""Update pool health monitor."""
LOG.debug("Updating health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
@serialized('delete_health_monitor')
@is_operational
def delete_health_monitor(self, health_monitor, service):
"""Delete pool health monitor."""
LOG.debug("Deleting health monitor")
target_pool = service["pools"][0]
target_pool["provisioning_status"] = "PENDING_UPDATE"
self._common_service_handler(service)
return self._update_target(service,
self._update_health_monitor_status,
service["healthmonitors"])
def _update_target(self, service,
update_method=None, target=None):
if self.do_service_update:
if target is not None and update_method is not None:
update_method(target)
self._update_loadbalancer_status(service, timed_out=False)
loadbalancer = service.get('loadbalancer', {})
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
lb_pending = \
(lb_provisioning_status == f5const.F5_PENDING_CREATE or
lb_provisioning_status == f5const.F5_PENDING_UPDATE)
return lb_pending
@is_operational
def get_stats(self, service):
lb_stats = {}
stats = ['clientside.bitsIn',
'clientside.bitsOut',
'clientside.curConns',
'clientside.totConns']
loadbalancer = service['loadbalancer']
try:
# sum virtual server stats for all BIG-IPs
vs_stats = self.lbaas_builder.get_listener_stats(service, stats)
# convert to bytes
lb_stats[f5const.F5_STATS_IN_BYTES] = \
vs_stats['clientside.bitsIn']/8
lb_stats[f5const.F5_STATS_OUT_BYTES] = \
vs_stats['clientside.bitsOut']/8
lb_stats[f5const.F5_STATS_ACTIVE_CONNECTIONS] = \
vs_stats['clientside.curConns']
lb_stats[f5const.F5_STATS_TOTAL_CONNECTIONS] = \
vs_stats['clientside.totConns']
# update Neutron
self.plugin_rpc.update_loadbalancer_stats(
loadbalancer['id'], lb_stats)
except Exception as e:
LOG.error("Error getting loadbalancer stats: %s", e.message)
finally:
return lb_stats
def fdb_add(self, fdb):
# Add (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.add_bigip_fdb(bigip, fdb)
def fdb_remove(self, fdb):
# Remove (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.remove_bigip_fdb(bigip, fdb)
def fdb_update(self, fdb):
# Update (L2toL3) forwarding database entries
for bigip in self.get_all_bigips():
self.network_builder.update_bigip_fdb(bigip, fdb)
def tunnel_update(self, **kwargs):
# Tunnel Update from Neutron Core RPC
pass
def tunnel_sync(self):
# Only sync when supported types are present
if not [i for i in self.agent_configurations['tunnel_types']
if i in ['gre', 'vxlan']]:
return False
tunnel_ips = []
for bigip in self.get_all_bigips():
if bigip.local_ip:
tunnel_ips.append(bigip.local_ip)
self.network_builder.tunnel_sync(tunnel_ips)
# Tunnel sync sent.
return False
@serialized('sync')
@is_operational
def sync(self, service):
"""Sync service defintion to device."""
# loadbalancer and plugin_rpc may not be set
lb_id = service.get('loadbalancer', dict()).get('id', '')
if hasattr(self, 'plugin_rpc') and self.plugin_rpc and lb_id:
# Get the latest service. It may have changed.
service = self.plugin_rpc.get_service_by_loadbalancer_id(lb_id)
if service.get('loadbalancer', None):
self.lbaas_builder.to_sync = True
self._common_service_handler(service)
self.lbaas_builder.to_sync = False
# pzhang(NOTE): move udpate neutron db out here for the lb tree
if self.do_service_update:
self.update_service_status(service)
loadbalancer = service.get('loadbalancer', {})
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
lb_pending = \
(lb_provisioning_status == f5const.F5_PENDING_CREATE or
lb_provisioning_status == f5const.F5_PENDING_UPDATE)
return lb_pending
else:
LOG.debug("Attempted sync of deleted pool")
@serialized('backup_configuration')
@is_operational
def backup_configuration(self):
# Save Configuration on Devices
for bigip in self.get_all_bigips():
LOG.debug('_backup_configuration: saving device %s.'
% bigip.hostname)
self.cluster_manager.save_config(bigip)
def _get_monitor_endpoint(self, bigip, service):
monitor_type = self.service_adapter.get_monitor_type(service)
if not monitor_type:
monitor_type = ""
if monitor_type == "HTTPS":
hm = bigip.tm.ltm.monitor.https_s.https
elif monitor_type == "TCP":
hm = bigip.tm.ltm.monitor.tcps.tcp
elif monitor_type == "PING":
hm = bigip.tm.ltm.monitor.gateway_icmps.gateway_icmp
else:
hm = bigip.tm.ltm.monitor.https.http
return hm
def service_rename_required(self, service):
rename_required = False
# Returns whether the bigip has a pool for the service
if not service['loadbalancer']:
return False
bigips = self.get_config_bigips()
loadbalancer = service['loadbalancer']
# Does the correctly named virtual address exist?
for bigip in bigips:
virtual_address = VirtualAddress(self.service_adapter,
loadbalancer)
if not virtual_address.exists(bigip):
rename_required = True
break
return rename_required
def service_object_teardown(self, service):
# Returns whether the bigip has a pool for the service
if not service['loadbalancer']:
return False
bigips = self.get_config_bigips()
loadbalancer = service['loadbalancer']
folder_name = self.service_adapter.get_folder_name(
loadbalancer['tenant_id']
)
# Change to bigips
for bigip in bigips:
# Delete all virtuals
v = bigip.tm.ltm.virtuals.virtual
for listener in service['listeners']:
l_name = listener.get("name", "")
if not l_name:
svc = {"loadbalancer": loadbalancer,
"listener": listener}
vip = self.service_adapter.get_virtual(svc)
l_name = vip['name']
if v.exists(name=l_name, partition=folder_name):
# Found a virtual that is named by the OS object,
# delete it.
l_obj = v.load(name=l_name, partition=folder_name)
LOG.warn("Deleting listener: /%s/%s" %
(folder_name, l_name))
l_obj.delete(name=l_name, partition=folder_name)
# Delete all pools
p = bigip.tm.ltm.pools.pool
for os_pool in service['pools']:
p_name = os_pool.get('name', "")
if not p_name:
svc = {"loadbalancer": loadbalancer,
"pool": os_pool}
pool = self.service_adapter.get_pool(svc)
p_name = pool['name']
if p.exists(name=p_name, partition=folder_name):
p_obj = p.load(name=p_name, partition=folder_name)
LOG.warn("Deleting pool: /%s/%s" % (folder_name, p_name))
p_obj.delete(name=p_name, partition=folder_name)
# Delete all healthmonitors
for healthmonitor in service['healthmonitors']:
svc = {'loadbalancer': loadbalancer,
'healthmonitor': healthmonitor}
monitor_ep = self._get_monitor_endpoint(bigip, svc)
m_name = healthmonitor.get('name', "")
if not m_name:
hm = self.service_adapter.get_healthmonitor(svc)
m_name = hm['name']
if monitor_ep.exists(name=m_name, partition=folder_name):
m_obj = monitor_ep.load(name=m_name, partition=folder_name)
LOG.warn("Deleting monitor: /%s/%s" % (
folder_name, m_name))
m_obj.delete()
def _service_exists(self, service):
# Returns whether the bigip has the service defined
if not service['loadbalancer']:
return False
loadbalancer = service['loadbalancer']
folder_name = self.service_adapter.get_folder_name(
loadbalancer['tenant_id']
)
if self.network_builder:
# append route domain to member address
self.network_builder._annotate_service_route_domains(service)
# Foreach bigip in the cluster:
for bigip in self.get_config_bigips():
# Does the tenant folder exist?
if not self.system_helper.folder_exists(bigip, folder_name):
LOG.error("Folder %s does not exists on bigip: %s" %
(folder_name, bigip.hostname))
return False
# Get the virtual address
virtual_address = VirtualAddress(self.service_adapter,
loadbalancer)
if not virtual_address.exists(bigip):
LOG.error("Virtual address %s(%s) does not "
"exists on bigip: %s" % (virtual_address.name,
virtual_address.address,
bigip.hostname))
return False
# Ensure that each virtual service exists.
for listener in service['listeners']:
svc = {"loadbalancer": loadbalancer,
"listener": listener}
virtual_server = self.service_adapter.get_virtual_name(svc)
if not self.vs_manager.exists(bigip,
name=virtual_server['name'],
partition=folder_name):
LOG.error("Virtual /%s/%s not found on bigip: %s" %
(virtual_server['name'], folder_name,
bigip.hostname))
return False
# Ensure that each pool exists.
for pool in service['pools']:
svc = {"loadbalancer": loadbalancer,
"pool": pool}
bigip_pool = self.service_adapter.get_pool(svc)
if not self.pool_manager.exists(
bigip,
name=bigip_pool['name'],
partition=folder_name):
LOG.error("Pool /%s/%s not found on bigip: %s" %
(folder_name, bigip_pool['name'],
bigip.hostname))
return False
else:
deployed_pool = self.pool_manager.load(
bigip,
name=bigip_pool['name'],
partition=folder_name)
deployed_members = \
deployed_pool.members_s.get_collection()
# First check that number of members deployed
# is equal to the number in the service.
if len(deployed_members) != len(pool['members']):
LOG.error("Pool %s members member count mismatch "
"match: deployed %d != service %d" %
(bigip_pool['name'], len(deployed_members),
len(pool['members'])))
return False
# Ensure each pool member exists
for member in service['members']:
if member['pool_id'] == pool['id']:
lb = self.lbaas_builder
pool = lb.get_pool_by_id(
service, member["pool_id"])
svc = {"loadbalancer": loadbalancer,
"member": member,
"pool": pool}
if not lb.pool_builder.member_exists(svc, bigip):
LOG.error("Pool member not found: %s" %
svc['member'])
return False
# Ensure that each health monitor exists.
for healthmonitor in service['healthmonitors']:
svc = {"loadbalancer": loadbalancer,
"healthmonitor": healthmonitor}
monitor = self.service_adapter.get_healthmonitor(svc)
monitor_ep = self._get_monitor_endpoint(bigip, svc)
if not monitor_ep.exists(name=monitor['name'],
partition=folder_name):
LOG.error("Monitor /%s/%s not found on bigip: %s" %
(monitor['name'], folder_name, bigip.hostname))
return False
return True
def get_loadbalancers_in_tenant(self, tenant_id):
loadbalancers = self.plugin_rpc.get_all_loadbalancers()
return [lb['lb_id'] for lb in loadbalancers
if lb['tenant_id'] == tenant_id]
def _common_service_handler(self, service,
delete_partition=False,
delete_event=False):
# Assure that the service is configured on bigip(s)
start_time = time()
lb_pending = True
self.do_service_update = True
if self.conf.trace_service_requests:
self.trace_service_requests(service)
loadbalancer = service.get("loadbalancer", None)
if not loadbalancer:
LOG.error("_common_service_handler: Service loadbalancer is None")
return lb_pending
lb_provisioning_status = loadbalancer.get("provisioning_status",
f5const.F5_ERROR)
try:
try:
self.tenant_manager.assure_tenant_created(service)
except Exception as e:
LOG.error("Tenant folder creation exception: %s",
e.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
raise e
LOG.debug(" _assure_tenant_created took %.5f secs" %
(time() - start_time))
traffic_group = self.service_to_traffic_group(service)
loadbalancer['traffic_group'] = traffic_group
if self.network_builder:
start_time = time()
try:
self.network_builder.prep_service_networking(
service, traffic_group)
except f5ex.NetworkNotReady as error:
LOG.debug("Network creation deferred until network "
"definition is completed: %s",
error.message)
if not delete_event:
self.do_service_update = False
raise error
except Exception as error:
LOG.error("Prep-network exception: icontrol_driver: %s",
error.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
if not delete_event:
raise error
finally:
if time() - start_time > .001:
LOG.debug(" _prep_service_networking "
"took %.5f secs" % (time() - start_time))
all_subnet_hints = {}
for bigip in self.get_config_bigips():
# check_for_delete_subnets:
# keep track of which subnets we should check to delete
# for a deleted vip or member
# do_not_delete_subnets:
# If we add an IP to a subnet we must not delete the subnet
all_subnet_hints[bigip.device_name] = \
{'check_for_delete_subnets': {},
'do_not_delete_subnets': []}
LOG.debug("XXXXXXXXX: Pre assure service")
self.lbaas_builder.assure_service(service,
traffic_group,
all_subnet_hints)
LOG.debug("XXXXXXXXX: Post assure service")
if self.network_builder:
start_time = time()
try:
self.network_builder.post_service_networking(
service, all_subnet_hints)
except Exception as error:
LOG.error("Post-network exception: icontrol_driver: %s",
error.message)
if lb_provisioning_status != f5const.F5_PENDING_DELETE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
raise error
if time() - start_time > .001:
LOG.debug(" _post_service_networking "
"took %.5f secs" % (time() - start_time))
except f5ex.NetworkNotReady as error:
pass
except Exception as err:
LOG.exception(err)
finally:
# only delete partition if loadbalancer is being deleted
if lb_provisioning_status == f5const.F5_PENDING_DELETE:
self.tenant_manager.assure_tenant_cleanup(service,
all_subnet_hints)
def update_service_status(self, service, timed_out=False):
"""Update status of objects in controller."""
LOG.debug("_update_service_status")
if not self.plugin_rpc:
LOG.error("Cannot update status in Neutron without "
"RPC handler.")
return
if 'members' in service:
# Call update_members_status
self._update_member_status(service['members'], timed_out)
if 'healthmonitors' in service:
# Call update_monitor_status
self._update_health_monitor_status(
service['healthmonitors']
)
if 'pools' in service:
# Call update_pool_status
self._update_pool_status(
service['pools']
)
if 'listeners' in service:
# Call update_listener_status
self._update_listener_status(service)
if 'l7policy_rules' in service:
self._update_l7rule_status(service['l7policy_rules'])
if 'l7policies' in service:
self._update_l7policy_status(service['l7policies'])
self._update_loadbalancer_status(service, timed_out)
def _update_member_status(self, members, timed_out=False):
"""Update member status in OpenStack."""
for member in members:
if 'provisioning_status' in member:
provisioning_status = member['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
if timed_out and \
provisioning_status != f5const.F5_ACTIVE:
member['provisioning_status'] = f5const.F5_ERROR
operating_status = f5const.F5_OFFLINE
else:
member['provisioning_status'] = f5const.F5_ACTIVE
operating_status = f5const.F5_ONLINE
self.plugin_rpc.update_member_status(
member['id'],
member['provisioning_status'],
operating_status
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
if not member.get('parent_pool_deleted', False):
self.plugin_rpc.member_destroyed(
member['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_member_status(
member['id'],
f5const.F5_ERROR,
f5const.F5_OFFLINE)
def _update_health_monitor_status(self, health_monitors):
"""Update pool monitor status in OpenStack."""
for health_monitor in health_monitors:
if 'provisioning_status' in health_monitor:
provisioning_status = health_monitor['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_health_monitor_status(
health_monitor['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
health_monitor['provisioning_status'] = \
f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.health_monitor_destroyed(
health_monitor['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_health_monitor_status(
health_monitor['id'])
@log_helpers.log_method_call
def _update_pool_status(self, pools):
"""Update pool status in OpenStack."""
for pool in pools:
if 'provisioning_status' in pool:
provisioning_status = pool['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_pool_status(
pool['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
pool['provisioning_status'] = f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.pool_destroyed(
pool['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_pool_status(pool['id'])
@log_helpers.log_method_call
def _update_listener_status(self, service):
"""Update listener status in OpenStack."""
listeners = service['listeners']
for listener in listeners:
if 'provisioning_status' in listener:
provisioning_status = listener['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_listener_status(
listener['id'],
f5const.F5_ACTIVE,
listener['operating_status']
)
listener['provisioning_status'] = \
f5const.F5_ACTIVE
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.listener_destroyed(
listener['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_listener_status(
listener['id'],
provisioning_status,
f5const.F5_OFFLINE)
@log_helpers.log_method_call
def _update_l7rule_status(self, l7rules):
"""Update l7rule status in OpenStack."""
for l7rule in l7rules:
if 'provisioning_status' in l7rule:
provisioning_status = l7rule['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_l7rule_status(
l7rule['id'],
l7rule['policy_id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.l7rule_destroyed(
l7rule['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_l7rule_status(
l7rule['id'], l7rule['policy_id'])
@log_helpers.log_method_call
def _update_l7policy_status(self, l7policies):
LOG.debug("_update_l7policy_status")
"""Update l7policy status in OpenStack."""
for l7policy in l7policies:
if 'provisioning_status' in l7policy:
provisioning_status = l7policy['provisioning_status']
if provisioning_status in self.positive_plugin_const_state:
self.plugin_rpc.update_l7policy_status(
l7policy['id'],
f5const.F5_ACTIVE,
f5const.F5_ONLINE
)
elif provisioning_status == f5const.F5_PENDING_DELETE:
LOG.debug("calling l7policy_destroyed")
self.plugin_rpc.l7policy_destroyed(
l7policy['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_l7policy_status(l7policy['id'])
@log_helpers.log_method_call
def _update_loadbalancer_status(self, service, timed_out=False):
"""Update loadbalancer status in OpenStack."""
loadbalancer = service.get('loadbalancer', {})
provisioning_status = loadbalancer.get('provisioning_status',
f5const.F5_ERROR)
# if provisioning_status in self.positive_plugin_const_state:
if provisioning_status in self.positive_plugin_const_state:
if timed_out:
operating_status = (f5const.F5_OFFLINE)
if provisioning_status == f5const.F5_PENDING_CREATE:
loadbalancer['provisioning_status'] = \
f5const.F5_ERROR
else:
loadbalancer['provisioning_status'] = \
f5const.F5_ACTIVE
else:
operating_status = (f5const.F5_ONLINE)
loadbalancer['provisioning_status'] = \
f5const.F5_ACTIVE
self.plugin_rpc.update_loadbalancer_status(
loadbalancer['id'],
loadbalancer['provisioning_status'],
operating_status)
elif provisioning_status == f5const.F5_PENDING_DELETE:
self.plugin_rpc.loadbalancer_destroyed(
loadbalancer['id'])
elif provisioning_status == f5const.F5_ERROR:
self.plugin_rpc.update_loadbalancer_status(
loadbalancer['id'],
provisioning_status,
f5const.F5_OFFLINE)
elif provisioning_status == f5const.F5_ACTIVE:
LOG.debug('Loadbalancer provisioning status is active')
else:
LOG.error('Loadbalancer provisioning status is invalid')
@is_operational
def update_operating_status(self, service):
if 'members' in service:
if self.network_builder:
# append route domain to member address
try:
self.network_builder._annotate_service_route_domains(
service)
except f5ex.InvalidNetworkType as exc:
LOG.warning(exc.msg)
return<|fim▁hole|> self.lbaas_builder.update_operating_status(service)
# udpate Neutron
for member in service['members']:
if member['provisioning_status'] == f5const.F5_ACTIVE:
operating_status = member.get('operating_status', None)
self.plugin_rpc.update_member_status(
member['id'],
provisioning_status=None,
operating_status=operating_status)
def get_active_bigip(self):
bigips = self.get_all_bigips()
if len(bigips) == 1:
return bigips[0]
for bigip in bigips:
if hasattr(bigip, 'failover_state'):
if bigip.failover_state == 'active':
return bigip
# if can't determine active, default to first one
return bigips[0]
def service_to_traffic_group(self, service):
# Hash service tenant id to index of traffic group
# return which iControlDriver.__traffic_group that tenant is "in?"
return self.tenant_to_traffic_group(
service['loadbalancer']['tenant_id'])
def tenant_to_traffic_group(self, tenant_id):
# Hash tenant id to index of traffic group
hexhash = hashlib.md5(tenant_id).hexdigest()
tg_index = int(hexhash, 16) % len(self.__traffic_groups)
return self.__traffic_groups[tg_index]
# these functions should return only active BIG-IP
# not errored BIG-IPs.
def get_bigip(self):
hostnames = sorted(list(self.__bigips))
for host in hostnames:
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return self.__bigips[host]
def get_bigip_hosts(self):
return_hosts = []
for host in list(self.__bigips):
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return_hosts.append(host)
return sorted(return_hosts)
def get_all_bigips(self):
return_bigips = []
for host in list(self.__bigips):
if hasattr(self.__bigips[host], 'status') and \
self.__bigips[host].status == 'active':
return_bigips.append(self.__bigips[host])
return return_bigips
def get_config_bigips(self):
return self.get_all_bigips()
# these are the refactored methods
def get_active_bigips(self):
return self.get_all_bigips()
def get_errored_bigips_hostnames(self):
return_hostnames = []
for host in list(self.__bigips):
bigip = self.__bigips[host]
if hasattr(bigip, 'status') and bigip.status == 'error':
return_hostnames.append(host)
return return_hostnames
def get_inbound_throughput(self, bigip, global_statistics=None):
return self.stat_helper.get_inbound_throughput(
bigip, global_stats=global_statistics)
def get_outbound_throughput(self, bigip, global_statistics=None):
return self.stat_helper.get_outbound_throughput(
bigip, global_stats=global_statistics)
def get_throughput(self, bigip=None, global_statistics=None):
return self.stat_helper.get_throughput(
bigip, global_stats=global_statistics)
def get_active_connections(self, bigip=None, global_statistics=None):
return self.stat_helper.get_active_connection_count(
bigip, global_stats=global_statistics)
def get_ssltps(self, bigip=None, global_statistics=None):
return self.stat_helper.get_active_SSL_TPS(
bigip, global_stats=global_statistics)
def get_node_count(self, bigip=None, global_statistics=None):
return len(bigip.tm.ltm.nodes.get_collection())
def get_clientssl_profile_count(self, bigip=None, global_statistics=None):
return ssl_profile.SSLProfileHelper.get_client_ssl_profile_count(bigip)
def get_tenant_count(self, bigip=None, global_statistics=None):
return self.system_helper.get_tenant_folder_count(bigip)
def get_tunnel_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_tunnel_count(bigip)
def get_vlan_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_vlan_count(bigip)
def get_route_domain_count(self, bigip=None, global_statistics=None):
return self.network_helper.get_route_domain_count(bigip)
def _init_traffic_groups(self, bigip):
try:
LOG.debug('retrieving traffic groups from %s' % bigip.hostname)
self.__traffic_groups = \
self.cluster_manager.get_traffic_groups(bigip)
if 'traffic-group-local-only' in self.__traffic_groups:
LOG.debug('removing reference to non-floating traffic group')
self.__traffic_groups.remove('traffic-group-local-only')
self.__traffic_groups.sort()
LOG.debug('service placement will done on traffic group(s): %s'
% self.__traffic_groups)
except Exception:
bigip.status = 'error'
bigip.status_message = \
'could not determine traffic groups for service placement'
raise
def _validate_bigip_version(self, bigip, hostname):
# Ensure the BIG-IP has sufficient version
major_version = self.system_helper.get_major_version(bigip)
if major_version < f5const.MIN_TMOS_MAJOR_VERSION:
raise f5ex.MajorVersionValidateFailed(
'Device %s must be at least TMOS %s.%s'
% (hostname, f5const.MIN_TMOS_MAJOR_VERSION,
f5const.MIN_TMOS_MINOR_VERSION))
minor_version = self.system_helper.get_minor_version(bigip)
if minor_version < f5const.MIN_TMOS_MINOR_VERSION:
raise f5ex.MinorVersionValidateFailed(
'Device %s must be at least TMOS %s.%s'
% (hostname, f5const.MIN_TMOS_MAJOR_VERSION,
f5const.MIN_TMOS_MINOR_VERSION))
return major_version, minor_version
def trace_service_requests(self, service):
"""Dump services to a file for debugging."""
with open(self.file_name, 'r+') as fp:
fp.seek(-1, 2)
fp.write(',')
json.dump(service, fp, sort_keys=True, indent=2)
fp.write(']')
def get_config_dir(self):
"""Determine F5 agent configuration directory.
Oslo cfg has a config_dir option, but F5 agent is not currently
started with this option. To be complete, the code will check if
config_dir is defined, and use that value as long as it is a single
string (no idea what to do if it is not a str). If not defined,
get the full dir path of the INI file, which is currently used when
starting F5 agent. If neither option is available,
use /etc/neutron/services/f5.
:return: str defining configuration directory.
"""
if self.conf.config_dir and isinstance(self.conf.config_dir, str):
# use config_dir parameter if defined, and is a string
return self.conf.config_dir
elif self.conf.config_file:
# multiple config files (neutron and agent) are usually defined
if isinstance(self.conf.config_file, list):
# find agent config (f5-openstack-agent.ini)
config_files = self.conf.config_file
for file_name in config_files:
if 'f5-openstack-agent.ini' in file_name:
return os.path.dirname(file_name)
elif isinstance(self.conf.config_file, str):
# not a list, just a single string
return os.path.dirname(self.conf.config_file)
# if all else fails
return '/etc/neutron/services/f5'<|fim▁end|>
|
# get currrent member status
|
<|file_name|>highmaps.src.js<|end_file_name|><|fim▁begin|>/**
* @license Highmaps JS v5.0.1 (2016-10-26)
*
* (c) 2011-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
(function(root, factory) {
if (typeof module === 'object' && module.exports) {
module.exports = root.document ?
factory(root) :
factory;
} else {
root.Highcharts = factory(root);
}
}(typeof window !== 'undefined' ? window : this, function(win) {
var Highcharts = (function() {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
/* global window */
var win = window,
doc = win.document;
var SVG_NS = 'http://www.w3.org/2000/svg',
userAgent = (win.navigator && win.navigator.userAgent) || '',
svg = doc && doc.createElementNS && !!doc.createElementNS(SVG_NS, 'svg').createSVGRect,
isMS = /(edge|msie|trident)/i.test(userAgent) && !window.opera,
vml = !svg,
isFirefox = /Firefox/.test(userAgent),
hasBidiBug = isFirefox && parseInt(userAgent.split('Firefox/')[1], 10) < 4; // issue #38
var Highcharts = win.Highcharts ? win.Highcharts.error(16, true) : {
product: 'Highmaps',
version: '5.0.1',
deg2rad: Math.PI * 2 / 360,
doc: doc,
hasBidiBug: hasBidiBug,
hasTouch: doc && doc.documentElement.ontouchstart !== undefined,
isMS: isMS,
isWebKit: /AppleWebKit/.test(userAgent),
isFirefox: isFirefox,
isTouchDevice: /(Mobile|Android|Windows Phone)/.test(userAgent),
SVG_NS: SVG_NS,
idCounter: 0,
chartCount: 0,
seriesTypes: {},
symbolSizes: {},
svg: svg,
vml: vml,
win: win,
charts: [],
marginNames: ['plotTop', 'marginRight', 'marginBottom', 'plotLeft'],
noop: function() {
return undefined;
}
};
return Highcharts;
}());
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var timers = [];
var charts = H.charts,
doc = H.doc,
win = H.win;
/**
* Provide error messages for debugging, with links to online explanation
*/
H.error = function(code, stop) {
var msg = 'Highcharts error #' + code + ': www.highcharts.com/errors/' + code;
if (stop) {
throw new Error(msg);
}
// else ...
if (win.console) {
console.log(msg); // eslint-disable-line no-console
}
};
/**
* An animator object. One instance applies to one property (attribute or style prop)
* on one element.
*
* @param {object} elem The element to animate. May be a DOM element or a Highcharts SVGElement wrapper.
* @param {object} options Animation options, including duration, easing, step and complete.
* @param {object} prop The property to animate.
*/
H.Fx = function(elem, options, prop) {
this.options = options;
this.elem = elem;
this.prop = prop;
};
H.Fx.prototype = {
/**
* Animating a path definition on SVGElement
* @returns {undefined}
*/
dSetter: function() {
var start = this.paths[0],
end = this.paths[1],
ret = [],
now = this.now,
i = start.length,
startVal;
if (now === 1) { // land on the final path without adjustment points appended in the ends
ret = this.toD;
} else if (i === end.length && now < 1) {
while (i--) {
startVal = parseFloat(start[i]);
ret[i] =
isNaN(startVal) ? // a letter instruction like M or L
start[i] :
now * (parseFloat(end[i] - startVal)) + startVal;
}
} else { // if animation is finished or length not matching, land on right value
ret = end;
}
this.elem.attr('d', ret);
},
/**
* Update the element with the current animation step
* @returns {undefined}
*/
update: function() {
var elem = this.elem,
prop = this.prop, // if destroyed, it is null
now = this.now,
step = this.options.step;
// Animation setter defined from outside
if (this[prop + 'Setter']) {
this[prop + 'Setter']();
// Other animations on SVGElement
} else if (elem.attr) {
if (elem.element) {
elem.attr(prop, now);
}
// HTML styles, raw HTML content like container size
} else {
elem.style[prop] = now + this.unit;
}
if (step) {
step.call(elem, now, this);
}
},
/**
* Run an animation
*/
run: function(from, to, unit) {
var self = this,
timer = function(gotoEnd) {
return timer.stopped ? false : self.step(gotoEnd);
},
i;
this.startTime = +new Date();
this.start = from;
this.end = to;
this.unit = unit;
this.now = this.start;
this.pos = 0;
timer.elem = this.elem;
if (timer() && timers.push(timer) === 1) {
timer.timerId = setInterval(function() {
for (i = 0; i < timers.length; i++) {
if (!timers[i]()) {
timers.splice(i--, 1);
}
}
if (!timers.length) {
clearInterval(timer.timerId);
}
}, 13);
}
},
/**
* Run a single step in the animation
* @param {Boolean} gotoEnd Whether to go to then endpoint of the animation after abort
* @returns {Boolean} True if animation continues
*/
step: function(gotoEnd) {
var t = +new Date(),
ret,
done,
options = this.options,
elem = this.elem,
complete = options.complete,
duration = options.duration,
curAnim = options.curAnim,
i;
if (elem.attr && !elem.element) { // #2616, element including flag is destroyed
ret = false;
} else if (gotoEnd || t >= duration + this.startTime) {
this.now = this.end;
this.pos = 1;
this.update();
curAnim[this.prop] = true;
done = true;
for (i in curAnim) {
if (curAnim[i] !== true) {
done = false;
}
}
if (done && complete) {
complete.call(elem);
}
ret = false;
} else {
this.pos = options.easing((t - this.startTime) / duration);
this.now = this.start + ((this.end - this.start) * this.pos);
this.update();
ret = true;
}
return ret;
},
/**
* Prepare start and end values so that the path can be animated one to one
*/
initPath: function(elem, fromD, toD) {
fromD = fromD || '';
var shift,
startX = elem.startX,
endX = elem.endX,
bezier = fromD.indexOf('C') > -1,
numParams = bezier ? 7 : 3,
fullLength,
slice,
i,
start = fromD.split(' '),
end = toD.slice(), // copy
isArea = elem.isArea,
positionFactor = isArea ? 2 : 1,
reverse;
/**
* In splines make move points have six parameters like bezier curves
*/
function sixify(arr) {
i = arr.length;
while (i--) {
if (arr[i] === 'M' || arr[i] === 'L') {
arr.splice(i + 1, 0, arr[i + 1], arr[i + 2], arr[i + 1], arr[i + 2]);
}
}
}
/**
* Insert an array at the given position of another array
*/
function insertSlice(arr, subArr, index) {
[].splice.apply(
arr, [index, 0].concat(subArr)
);
}
/**
* If shifting points, prepend a dummy point to the end path.
*/
function prepend(arr, other) {
while (arr.length < fullLength) {
// Move to, line to or curve to?
arr[0] = other[fullLength - arr.length];
// Prepend a copy of the first point
insertSlice(arr, arr.slice(0, numParams), 0);
// For areas, the bottom path goes back again to the left, so we need
// to append a copy of the last point.
if (isArea) {
insertSlice(arr, arr.slice(arr.length - numParams), arr.length);
i--;
}
}
arr[0] = 'M';
}
/**
* Copy and append last point until the length matches the end length
*/
function append(arr, other) {
var i = (fullLength - arr.length) / numParams;
while (i > 0 && i--) {
// Pull out the slice that is going to be appended or inserted. In a line graph,
// the positionFactor is 1, and the last point is sliced out. In an area graph,
// the positionFactor is 2, causing the middle two points to be sliced out, since
// an area path starts at left, follows the upper path then turns and follows the
// bottom back.
slice = arr.slice().splice(
(arr.length / positionFactor) - numParams,
numParams * positionFactor
);
// Move to, line to or curve to?
slice[0] = other[fullLength - numParams - (i * numParams)];
// Disable first control point
if (bezier) {
slice[numParams - 6] = slice[numParams - 2];
slice[numParams - 5] = slice[numParams - 1];
}
// Now insert the slice, either in the middle (for areas) or at the end (for lines)
insertSlice(arr, slice, arr.length / positionFactor);
if (isArea) {
i--;
}
}
}
if (bezier) {
sixify(start);
sixify(end);
}
// For sideways animation, find out how much we need to shift to get the start path Xs
// to match the end path Xs.
if (startX && endX) {
for (i = 0; i < startX.length; i++) {
if (startX[i] === endX[0]) { // Moving left, new points coming in on right
shift = i;
break;
} else if (startX[0] === endX[endX.length - startX.length + i]) { // Moving right
shift = i;
reverse = true;
break;
}
}
if (shift === undefined) {
start = [];
}
}
if (start.length && H.isNumber(shift)) {
// The common target length for the start and end array, where both
// arrays are padded in opposite ends
fullLength = end.length + shift * positionFactor * numParams;
if (!reverse) {
prepend(end, start);
append(start, end);
} else {
prepend(start, end);
append(end, start);
}
}
return [start, end];
}
}; // End of Fx prototype
/**
* Extend an object with the members of another
* @param {Object} a The object to be extended
* @param {Object} b The object to add to the first one
*/
H.extend = function(a, b) {
var n;
if (!a) {
a = {};
}
for (n in b) {
a[n] = b[n];
}
return a;
};
/**
* Deep merge two or more objects and return a third object. If the first argument is
* true, the contents of the second object is copied into the first object.
* Previously this function redirected to jQuery.extend(true), but this had two limitations.
* First, it deep merged arrays, which lead to workarounds in Highcharts. Second,
* it copied properties from extended prototypes.
*/
H.merge = function() {
var i,
args = arguments,
len,
ret = {},
doCopy = function(copy, original) {
var value, key;
// An object is replacing a primitive
if (typeof copy !== 'object') {
copy = {};
}
for (key in original) {
if (original.hasOwnProperty(key)) {
value = original[key];
// Copy the contents of objects, but not arrays or DOM nodes
if (H.isObject(value, true) &&
key !== 'renderTo' && typeof value.nodeType !== 'number') {
copy[key] = doCopy(copy[key] || {}, value);
// Primitives and arrays are copied over directly
} else {
copy[key] = original[key];
}
}
}
return copy;
};
// If first argument is true, copy into the existing object. Used in setOptions.
if (args[0] === true) {
ret = args[1];
args = Array.prototype.slice.call(args, 2);
}
// For each argument, extend the return
len = args.length;
for (i = 0; i < len; i++) {
ret = doCopy(ret, args[i]);
}
return ret;
};
/**
* Shortcut for parseInt
* @param {Object} s
* @param {Number} mag Magnitude
*/
H.pInt = function(s, mag) {
return parseInt(s, mag || 10);
};
/**
* Check for string
* @param {Object} s
*/
H.isString = function(s) {
return typeof s === 'string';
};
/**
* Check for object
* @param {Object} obj
* @param {Boolean} strict Also checks that the object is not an array
*/
H.isArray = function(obj) {
var str = Object.prototype.toString.call(obj);
return str === '[object Array]' || str === '[object Array Iterator]';
};
/**
* Check for array
* @param {Object} obj
*/
H.isObject = function(obj, strict) {
return obj && typeof obj === 'object' && (!strict || !H.isArray(obj));
};
/**
* Check for number
* @param {Object} n
*/
H.isNumber = function(n) {
return typeof n === 'number' && !isNaN(n);
};
/**
* Remove last occurence of an item from an array
* @param {Array} arr
* @param {Mixed} item
*/
H.erase = function(arr, item) {
var i = arr.length;
while (i--) {
if (arr[i] === item) {
arr.splice(i, 1);
break;
}
}
//return arr;
};
/**
* Returns true if the object is not null or undefined.
* @param {Object} obj
*/
H.defined = function(obj) {
return obj !== undefined && obj !== null;
};
/**
* Set or get an attribute or an object of attributes. Can't use jQuery attr because
* it attempts to set expando properties on the SVG element, which is not allowed.
*
* @param {Object} elem The DOM element to receive the attribute(s)
* @param {String|Object} prop The property or an abject of key-value pairs
* @param {String} value The value if a single property is set
*/
H.attr = function(elem, prop, value) {
var key,
ret;
// if the prop is a string
if (H.isString(prop)) {
// set the value
if (H.defined(value)) {
elem.setAttribute(prop, value);
// get the value
} else if (elem && elem.getAttribute) { // elem not defined when printing pie demo...
ret = elem.getAttribute(prop);
}
// else if prop is defined, it is a hash of key/value pairs
} else if (H.defined(prop) && H.isObject(prop)) {
for (key in prop) {
elem.setAttribute(key, prop[key]);
}
}
return ret;
};
/**
* Check if an element is an array, and if not, make it into an array.
*/
H.splat = function(obj) {
return H.isArray(obj) ? obj : [obj];
};
/**
* Set a timeout if the delay is given, otherwise perform the function synchronously
* @param {Function} fn The function to perform
* @param {Number} delay Delay in milliseconds
* @param {Ojbect} context The context
* @returns {Nubmer} An identifier for the timeout
*/
H.syncTimeout = function(fn, delay, context) {
if (delay) {
return setTimeout(fn, delay, context);
}
fn.call(0, context);
};
/**
* Return the first value that is defined.
*/
H.pick = function() {
var args = arguments,
i,
arg,
length = args.length;
for (i = 0; i < length; i++) {
arg = args[i];
if (arg !== undefined && arg !== null) {
return arg;
}
}
};
/**
* Set CSS on a given element
* @param {Object} el
* @param {Object} styles Style object with camel case property names
*/
H.css = function(el, styles) {
if (H.isMS && !H.svg) { // #2686
if (styles && styles.opacity !== undefined) {
styles.filter = 'alpha(opacity=' + (styles.opacity * 100) + ')';
}
}
H.extend(el.style, styles);
};
/**
* Utility function to create element with attributes and styles
* @param {Object} tag
* @param {Object} attribs
* @param {Object} styles
* @param {Object} parent
* @param {Object} nopad
*/
H.createElement = function(tag, attribs, styles, parent, nopad) {
var el = doc.createElement(tag),
css = H.css;
if (attribs) {
H.extend(el, attribs);
}
if (nopad) {
css(el, {
padding: 0,
border: 'none',
margin: 0
});
}
if (styles) {
css(el, styles);
}
if (parent) {
parent.appendChild(el);
}
return el;
};
/**
* Extend a prototyped class by new members
* @param {Object} parent
* @param {Object} members
*/
H.extendClass = function(Parent, members) {
var object = function() {};
object.prototype = new Parent();
H.extend(object.prototype, members);
return object;
};
/**
* Pad a string to a given length by adding 0 to the beginning
* @param {Number} number
* @param {Number} length
*/
H.pad = function(number, length, padder) {
return new Array((length || 2) + 1 - String(number).length).join(padder || 0) + number;
};
/**
* Return a length based on either the integer value, or a percentage of a base.
*/
H.relativeLength = function(value, base) {
return (/%$/).test(value) ? base * parseFloat(value) / 100 : parseFloat(value);
};
/**
* Wrap a method with extended functionality, preserving the original function
* @param {Object} obj The context object that the method belongs to
* @param {String} method The name of the method to extend
* @param {Function} func A wrapper function callback. This function is called with the same arguments
* as the original function, except that the original function is unshifted and passed as the first
* argument.
*/
H.wrap = function(obj, method, func) {
var proceed = obj[method];
obj[method] = function() {
var args = Array.prototype.slice.call(arguments);
args.unshift(proceed);
return func.apply(this, args);
};
};
H.getTZOffset = function(timestamp) {
var d = H.Date;
return ((d.hcGetTimezoneOffset && d.hcGetTimezoneOffset(timestamp)) || d.hcTimezoneOffset || 0) * 60000;
};
/**
* Based on http://www.php.net/manual/en/function.strftime.php
* @param {String} format
* @param {Number} timestamp
* @param {Boolean} capitalize
*/
H.dateFormat = function(format, timestamp, capitalize) {
if (!H.defined(timestamp) || isNaN(timestamp)) {
return H.defaultOptions.lang.invalidDate || '';
}
format = H.pick(format, '%Y-%m-%d %H:%M:%S');
var D = H.Date,
date = new D(timestamp - H.getTZOffset(timestamp)),
key, // used in for constuct below
// get the basic time values
hours = date[D.hcGetHours](),
day = date[D.hcGetDay](),
dayOfMonth = date[D.hcGetDate](),
month = date[D.hcGetMonth](),
fullYear = date[D.hcGetFullYear](),
lang = H.defaultOptions.lang,
langWeekdays = lang.weekdays,
shortWeekdays = lang.shortWeekdays,
pad = H.pad,
// List all format keys. Custom formats can be added from the outside.
replacements = H.extend({
// Day
'a': shortWeekdays ? shortWeekdays[day] : langWeekdays[day].substr(0, 3), // Short weekday, like 'Mon'
'A': langWeekdays[day], // Long weekday, like 'Monday'
'd': pad(dayOfMonth), // Two digit day of the month, 01 to 31
'e': pad(dayOfMonth, 2, ' '), // Day of the month, 1 through 31
'w': day,
// Week (none implemented)
//'W': weekNumber(),
// Month
'b': lang.shortMonths[month], // Short month, like 'Jan'
'B': lang.months[month], // Long month, like 'January'
'm': pad(month + 1), // Two digit month number, 01 through 12
// Year
'y': fullYear.toString().substr(2, 2), // Two digits year, like 09 for 2009
'Y': fullYear, // Four digits year, like 2009
// Time
'H': pad(hours), // Two digits hours in 24h format, 00 through 23
'k': hours, // Hours in 24h format, 0 through 23
'I': pad((hours % 12) || 12), // Two digits hours in 12h format, 00 through 11
'l': (hours % 12) || 12, // Hours in 12h format, 1 through 12
'M': pad(date[D.hcGetMinutes]()), // Two digits minutes, 00 through 59
'p': hours < 12 ? 'AM' : 'PM', // Upper case AM or PM
'P': hours < 12 ? 'am' : 'pm', // Lower case AM or PM
'S': pad(date.getSeconds()), // Two digits seconds, 00 through 59
'L': pad(Math.round(timestamp % 1000), 3) // Milliseconds (naming from Ruby)
}, H.dateFormats);
// do the replaces
for (key in replacements) {
while (format.indexOf('%' + key) !== -1) { // regex would do it in one line, but this is faster
format = format.replace(
'%' + key,
typeof replacements[key] === 'function' ?
replacements[key](timestamp) :
replacements[key]
);
}
}
// Optionally capitalize the string and return
return capitalize ? format.substr(0, 1).toUpperCase() + format.substr(1) : format;
};
/**
* Format a single variable. Similar to sprintf, without the % prefix.
*/
H.formatSingle = function(format, val) {
var floatRegex = /f$/,
decRegex = /\.([0-9])/,
lang = H.defaultOptions.lang,
decimals;
if (floatRegex.test(format)) { // float
decimals = format.match(decRegex);
decimals = decimals ? decimals[1] : -1;
if (val !== null) {
val = H.numberFormat(
val,
decimals,
lang.decimalPoint,
format.indexOf(',') > -1 ? lang.thousandsSep : ''
);
}
} else {
val = H.dateFormat(format, val);
}
return val;
};
/**
* Format a string according to a subset of the rules of Python's String.format method.
*/
H.format = function(str, ctx) {
var splitter = '{',
isInside = false,
segment,
valueAndFormat,
path,
i,
len,
ret = [],
val,
index;
while (str) {
index = str.indexOf(splitter);
if (index === -1) {
break;
}
segment = str.slice(0, index);
if (isInside) { // we're on the closing bracket looking back
valueAndFormat = segment.split(':');
path = valueAndFormat.shift().split('.'); // get first and leave format
len = path.length;
val = ctx;
// Assign deeper paths
for (i = 0; i < len; i++) {
val = val[path[i]];
}
// Format the replacement
if (valueAndFormat.length) {
val = H.formatSingle(valueAndFormat.join(':'), val);
}
// Push the result and advance the cursor
ret.push(val);
} else {
ret.push(segment);
}
str = str.slice(index + 1); // the rest
isInside = !isInside; // toggle
splitter = isInside ? '}' : '{'; // now look for next matching bracket
}
ret.push(str);
return ret.join('');
};
/**
* Get the magnitude of a number
*/
H.getMagnitude = function(num) {
return Math.pow(10, Math.floor(Math.log(num) / Math.LN10));
};
/**
* Take an interval and normalize it to multiples of 1, 2, 2.5 and 5
* @param {Number} interval
* @param {Array} multiples
* @param {Number} magnitude
* @param {Object} options
*/
H.normalizeTickInterval = function(interval, multiples, magnitude, allowDecimals, preventExceed) {
var normalized,
i,
retInterval = interval;
// round to a tenfold of 1, 2, 2.5 or 5
magnitude = H.pick(magnitude, 1);
normalized = interval / magnitude;
// multiples for a linear scale
if (!multiples) {
multiples = [1, 2, 2.5, 5, 10];
// the allowDecimals option
if (allowDecimals === false) {
if (magnitude === 1) {
multiples = [1, 2, 5, 10];
} else if (magnitude <= 0.1) {
multiples = [1 / magnitude];
}
}
}
// normalize the interval to the nearest multiple
for (i = 0; i < multiples.length; i++) {
retInterval = multiples[i];
if ((preventExceed && retInterval * magnitude >= interval) || // only allow tick amounts smaller than natural
(!preventExceed && (normalized <= (multiples[i] + (multiples[i + 1] || multiples[i])) / 2))) {
break;
}
}
// multiply back to the correct magnitude
retInterval *= magnitude;
return retInterval;
};
/**
* Utility method that sorts an object array and keeping the order of equal items.
* ECMA script standard does not specify the behaviour when items are equal.
*/
H.stableSort = function(arr, sortFunction) {
var length = arr.length,
sortValue,
i;
// Add index to each item
for (i = 0; i < length; i++) {
arr[i].safeI = i; // stable sort index
}
arr.sort(function(a, b) {
sortValue = sortFunction(a, b);
return sortValue === 0 ? a.safeI - b.safeI : sortValue;
});
// Remove index from items
for (i = 0; i < length; i++) {
delete arr[i].safeI; // stable sort index
}
};
/**
* Non-recursive method to find the lowest member of an array. Math.min raises a maximum
* call stack size exceeded error in Chrome when trying to apply more than 150.000 points. This
* method is slightly slower, but safe.
*/
H.arrayMin = function(data) {
var i = data.length,
min = data[0];
while (i--) {
if (data[i] < min) {
min = data[i];
}
}
return min;
};
/**
* Non-recursive method to find the lowest member of an array. Math.min raises a maximum
* call stack size exceeded error in Chrome when trying to apply more than 150.000 points. This
* method is slightly slower, but safe.
*/
H.arrayMax = function(data) {
var i = data.length,
max = data[0];
while (i--) {
if (data[i] > max) {
max = data[i];
}
}
return max;
};
/**
* Utility method that destroys any SVGElement or VMLElement that are properties on the given object.
* It loops all properties and invokes destroy if there is a destroy method. The property is
* then delete'ed.
* @param {Object} The object to destroy properties on
* @param {Object} Exception, do not destroy this property, only delete it.
*/
H.destroyObjectProperties = function(obj, except) {
var n;
for (n in obj) {
// If the object is non-null and destroy is defined
if (obj[n] && obj[n] !== except && obj[n].destroy) {
// Invoke the destroy
obj[n].destroy();
}
// Delete the property from the object.
delete obj[n];
}
};
/**
* Discard an element by moving it to the bin and delete
* @param {Object} The HTML node to discard
*/
H.discardElement = function(element) {
var garbageBin = H.garbageBin;
// create a garbage bin element, not part of the DOM
if (!garbageBin) {
garbageBin = H.createElement('div');
}
// move the node and empty bin
if (element) {
garbageBin.appendChild(element);
}
garbageBin.innerHTML = '';
};
/**
* Fix JS round off float errors
* @param {Number} num
*/
H.correctFloat = function(num, prec) {
return parseFloat(
num.toPrecision(prec || 14)
);
};
/**
* Set the global animation to either a given value, or fall back to the
* given chart's animation option
* @param {Object} animation
* @param {Object} chart
*/
H.setAnimation = function(animation, chart) {
chart.renderer.globalAnimation = H.pick(animation, chart.options.chart.animation, true);
};
/**
* Get the animation in object form, where a disabled animation is always
* returned with duration: 0
*/
H.animObject = function(animation) {
return H.isObject(animation) ? H.merge(animation) : {
duration: animation ? 500 : 0
};
};
/**
* The time unit lookup
*/
H.timeUnits = {
millisecond: 1,
second: 1000,
minute: 60000,
hour: 3600000,
day: 24 * 3600000,
week: 7 * 24 * 3600000,
month: 28 * 24 * 3600000,
year: 364 * 24 * 3600000
};
/**
* Format a number and return a string based on input settings
* @param {Number} number The input number to format
* @param {Number} decimals The amount of decimals
* @param {String} decimalPoint The decimal point, defaults to the one given in the lang options
* @param {String} thousandsSep The thousands separator, defaults to the one given in the lang options
*/
H.numberFormat = function(number, decimals, decimalPoint, thousandsSep) {
number = +number || 0;
decimals = +decimals;
var lang = H.defaultOptions.lang,
origDec = (number.toString().split('.')[1] || '').length,
decimalComponent,
strinteger,
thousands,
absNumber = Math.abs(number),
ret;
if (decimals === -1) {
decimals = Math.min(origDec, 20); // Preserve decimals. Not huge numbers (#3793).
} else if (!H.isNumber(decimals)) {
decimals = 2;
}
// A string containing the positive integer component of the number
strinteger = String(H.pInt(absNumber.toFixed(decimals)));
// Leftover after grouping into thousands. Can be 0, 1 or 3.
thousands = strinteger.length > 3 ? strinteger.length % 3 : 0;
// Language
decimalPoint = H.pick(decimalPoint, lang.decimalPoint);
thousandsSep = H.pick(thousandsSep, lang.thousandsSep);
// Start building the return
ret = number < 0 ? '-' : '';
// Add the leftover after grouping into thousands. For example, in the number 42 000 000,
// this line adds 42.
ret += thousands ? strinteger.substr(0, thousands) + thousandsSep : '';
// Add the remaining thousands groups, joined by the thousands separator
ret += strinteger.substr(thousands).replace(/(\d{3})(?=\d)/g, '$1' + thousandsSep);
// Add the decimal point and the decimal component
if (decimals) {
// Get the decimal component, and add power to avoid rounding errors with float numbers (#4573)
decimalComponent = Math.abs(absNumber - strinteger + Math.pow(10, -Math.max(decimals, origDec) - 1));
ret += decimalPoint + decimalComponent.toFixed(decimals).slice(2);
}
return ret;
};
/**
* Easing definition
* @param {Number} pos Current position, ranging from 0 to 1
*/
Math.easeInOutSine = function(pos) {
return -0.5 * (Math.cos(Math.PI * pos) - 1);
};
/**
* Internal method to return CSS value for given element and property
*/
H.getStyle = function(el, prop) {
var style;
// For width and height, return the actual inner pixel size (#4913)
if (prop === 'width') {
return Math.min(el.offsetWidth, el.scrollWidth) -
H.getStyle(el, 'padding-left') -
H.getStyle(el, 'padding-right');
} else if (prop === 'height') {
return Math.min(el.offsetHeight, el.scrollHeight) -
H.getStyle(el, 'padding-top') -
H.getStyle(el, 'padding-bottom');
}
// Otherwise, get the computed style
style = win.getComputedStyle(el, undefined);
return style && H.pInt(style.getPropertyValue(prop));
};
/**
* Return the index of an item in an array, or -1 if not found
*/
H.inArray = function(item, arr) {
return arr.indexOf ? arr.indexOf(item) : [].indexOf.call(arr, item);
};
/**
* Filter an array
*/
H.grep = function(elements, callback) {
return [].filter.call(elements, callback);
};
/**
* Map an array
*/
H.map = function(arr, fn) {
var results = [],
i = 0,
len = arr.length;
for (; i < len; i++) {
results[i] = fn.call(arr[i], arr[i], i, arr);
}
return results;
};
/**
* Get the element's offset position, corrected by overflow:auto.
*/
H.offset = function(el) {
var docElem = doc.documentElement,
box = el.getBoundingClientRect();
return {
top: box.top + (win.pageYOffset || docElem.scrollTop) - (docElem.clientTop || 0),
left: box.left + (win.pageXOffset || docElem.scrollLeft) - (docElem.clientLeft || 0)
};
};
/**
* Stop running animation.
* A possible extension to this would be to stop a single property, when
* we want to continue animating others. Then assign the prop to the timer
* in the Fx.run method, and check for the prop here. This would be an improvement
* in all cases where we stop the animation from .attr. Instead of stopping
* everything, we can just stop the actual attributes we're setting.
*/
H.stop = function(el) {
var i = timers.length;
// Remove timers related to this element (#4519)
while (i--) {
if (timers[i].elem === el) {
timers[i].stopped = true; // #4667
}
}
};
/**
* Utility for iterating over an array.
* @param {Array} arr
* @param {Function} fn
*/
H.each = function(arr, fn, ctx) { // modern browsers
return Array.prototype.forEach.call(arr, fn, ctx);
};
/**
* Add an event listener
*/
H.addEvent = function(el, type, fn) {
var events = el.hcEvents = el.hcEvents || {};
function wrappedFn(e) {
e.target = e.srcElement || win; // #2820
fn.call(el, e);
}
// Handle DOM events in modern browsers
if (el.addEventListener) {
el.addEventListener(type, fn, false);
// Handle old IE implementation
} else if (el.attachEvent) {
if (!el.hcEventsIE) {
el.hcEventsIE = {};
}
// Link wrapped fn with original fn, so we can get this in removeEvent
el.hcEventsIE[fn.toString()] = wrappedFn;
el.attachEvent('on' + type, wrappedFn);
}
if (!events[type]) {
events[type] = [];
}
events[type].push(fn);
};
/**
* Remove event added with addEvent
*/
H.removeEvent = function(el, type, fn) {
var events,
hcEvents = el.hcEvents,
index;
function removeOneEvent(type, fn) {
if (el.removeEventListener) {
el.removeEventListener(type, fn, false);
} else if (el.attachEvent) {
fn = el.hcEventsIE[fn.toString()];
el.detachEvent('on' + type, fn);
}
}
function removeAllEvents() {
var types,
len,
n;
if (!el.nodeName) {
return; // break on non-DOM events
}
if (type) {
types = {};
types[type] = true;
} else {
types = hcEvents;
}
for (n in types) {
if (hcEvents[n]) {
len = hcEvents[n].length;
while (len--) {
removeOneEvent(n, hcEvents[n][len]);
}
}
}
}
if (hcEvents) {
if (type) {
events = hcEvents[type] || [];
if (fn) {
index = H.inArray(fn, events);
if (index > -1) {
events.splice(index, 1);
hcEvents[type] = events;
}
removeOneEvent(type, fn);
} else {
removeAllEvents();
hcEvents[type] = [];
}
} else {
removeAllEvents();
el.hcEvents = {};
}
}
};
/**
* Fire an event on a custom object
*/
H.fireEvent = function(el, type, eventArguments, defaultFunction) {
var e,
hcEvents = el.hcEvents,
events,
len,
i,
fn;
eventArguments = eventArguments || {};
if (doc.createEvent && (el.dispatchEvent || el.fireEvent)) {
e = doc.createEvent('Events');
e.initEvent(type, true, true);
//e.target = el;
H.extend(e, eventArguments);
if (el.dispatchEvent) {
el.dispatchEvent(e);
} else {
el.fireEvent(type, e);
}
} else if (hcEvents) {
events = hcEvents[type] || [];
len = events.length;
if (!eventArguments.target) { // We're running a custom event
H.extend(eventArguments, {
// Attach a simple preventDefault function to skip default handler if called.
// The built-in defaultPrevented property is not overwritable (#5112)
preventDefault: function() {
eventArguments.defaultPrevented = true;
},
// Setting target to native events fails with clicking the zoom-out button in Chrome.
target: el,
// If the type is not set, we're running a custom event (#2297). If it is set,
// we're running a browser event, and setting it will cause en error in
// IE8 (#2465).
type: type
});
}
for (i = 0; i < len; i++) {
fn = events[i];
// If the event handler return false, prevent the default handler from executing
if (fn && fn.call(el, eventArguments) === false) {
eventArguments.preventDefault();
}
}
}
// Run the default if not prevented
if (defaultFunction && !eventArguments.defaultPrevented) {
defaultFunction(eventArguments);
}
};
/**
* The global animate method, which uses Fx to create individual animators.
*/
H.animate = function(el, params, opt) {
var start,
unit = '',
end,
fx,
args,
prop;
if (!H.isObject(opt)) { // Number or undefined/null
args = arguments;
opt = {
duration: args[2],
easing: args[3],
complete: args[4]
};
}
if (!H.isNumber(opt.duration)) {
opt.duration = 400;
}
opt.easing = typeof opt.easing === 'function' ? opt.easing : (Math[opt.easing] || Math.easeInOutSine);
opt.curAnim = H.merge(params);
for (prop in params) {
fx = new H.Fx(el, opt, prop);
end = null;
if (prop === 'd') {
fx.paths = fx.initPath(
el,
el.d,
params.d
);
fx.toD = params.d;
start = 0;
end = 1;
} else if (el.attr) {
start = el.attr(prop);
} else {
start = parseFloat(H.getStyle(el, prop)) || 0;
if (prop !== 'opacity') {
unit = 'px';
}
}
if (!end) {
end = params[prop];
}
if (end.match && end.match('px')) {
end = end.replace(/px/g, ''); // #4351
}
fx.run(start, end, unit);
}
};
/**
* The series type factory.
*
* @param {string} type The series type name.
* @param {string} parent The parent series type name.
* @param {object} options The additional default options that is merged with the parent's options.
* @param {object} props The properties (functions and primitives) to set on the new prototype.
* @param {object} pointProps Members for a series-specific Point prototype if needed.
*/
H.seriesType = function(type, parent, options, props, pointProps) { // docs: add to API + extending Highcharts
var defaultOptions = H.getOptions(),
seriesTypes = H.seriesTypes;
// Merge the options
defaultOptions.plotOptions[type] = H.merge(
defaultOptions.plotOptions[parent],
options
);
// Create the class
seriesTypes[type] = H.extendClass(seriesTypes[parent] || function() {}, props);
seriesTypes[type].prototype.type = type;
// Create the point class if needed
if (pointProps) {
seriesTypes[type].prototype.pointClass = H.extendClass(H.Point, pointProps);
}
return seriesTypes[type];
};
/**
* Register Highcharts as a plugin in jQuery
*/
if (win.jQuery) {
win.jQuery.fn.highcharts = function() {
var args = [].slice.call(arguments);
if (this[0]) { // this[0] is the renderTo div
// Create the chart
if (args[0]) {
new H[ // eslint-disable-line no-new
H.isString(args[0]) ? args.shift() : 'Chart' // Constructor defaults to Chart
](this[0], args[0], args[1]);
return this;
}
// When called without parameters or with the return argument, return an existing chart
return charts[H.attr(this[0], 'data-highcharts-chart')];
}
};
}
/**
* Compatibility section to add support for legacy IE. This can be removed if old IE
* support is not needed.
*/
if (doc && !doc.defaultView) {
H.getStyle = function(el, prop) {
var val,
alias = {
width: 'clientWidth',
height: 'clientHeight'
}[prop];
if (el.style[prop]) {
return H.pInt(el.style[prop]);
}
if (prop === 'opacity') {
prop = 'filter';
}
// Getting the rendered width and height
if (alias) {
el.style.zoom = 1;
return Math.max(el[alias] - 2 * H.getStyle(el, 'padding'), 0);
}
val = el.currentStyle[prop.replace(/\-(\w)/g, function(a, b) {
return b.toUpperCase();
})];
if (prop === 'filter') {
val = val.replace(
/alpha\(opacity=([0-9]+)\)/,
function(a, b) {
return b / 100;
}
);
}
return val === '' ? 1 : H.pInt(val);
};
}
if (!Array.prototype.forEach) {
H.each = function(arr, fn, ctx) { // legacy
var i = 0,
len = arr.length;
for (; i < len; i++) {
if (fn.call(ctx, arr[i], i, arr) === false) {
return i;
}
}
};
}
if (!Array.prototype.indexOf) {
H.inArray = function(item, arr) {
var len,
i = 0;
if (arr) {
len = arr.length;
for (; i < len; i++) {
if (arr[i] === item) {
return i;
}
}
}
return -1;
};
}
if (!Array.prototype.filter) {
H.grep = function(elements, fn) {
var ret = [],
i = 0,
length = elements.length;
for (; i < length; i++) {
if (fn(elements[i], i)) {
ret.push(elements[i]);
}
}
return ret;
};
}
//--- End compatibility section ---
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var each = H.each,
isNumber = H.isNumber,
map = H.map,
merge = H.merge,
pInt = H.pInt;
/**
* Handle color operations. The object methods are chainable.
* @param {String} input The input color in either rbga or hex format
*/
H.Color = function(input) {
// Backwards compatibility, allow instanciation without new
if (!(this instanceof H.Color)) {
return new H.Color(input);
}
// Initialize
this.init(input);
};
H.Color.prototype = {
// Collection of parsers. This can be extended from the outside by pushing parsers
// to Highcharts.Color.prototype.parsers.
parsers: [{
// RGBA color
regex: /rgba\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]?(?:\.[0-9]+)?)\s*\)/,
parse: function(result) {
return [pInt(result[1]), pInt(result[2]), pInt(result[3]), parseFloat(result[4], 10)];
}
}, {
// HEX color
regex: /#([a-fA-F0-9]{2})([a-fA-F0-9]{2})([a-fA-F0-9]{2})/,
parse: function(result) {
return [pInt(result[1], 16), pInt(result[2], 16), pInt(result[3], 16), 1];
}
}, {
// RGB color
regex: /rgb\(\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*,\s*([0-9]{1,3})\s*\)/,
parse: function(result) {
return [pInt(result[1]), pInt(result[2]), pInt(result[3]), 1];
}
}],
// Collection of named colors. Can be extended from the outside by adding colors
// to Highcharts.Color.prototype.names.
names: {
white: '#ffffff',
black: '#000000'
},
/**
* Parse the input color to rgba array
* @param {String} input
*/
init: function(input) {
var result,
rgba,
i,
parser;
this.input = input = this.names[input] || input;
// Gradients
if (input && input.stops) {
this.stops = map(input.stops, function(stop) {
return new H.Color(stop[1]);
});
// Solid colors
} else {
i = this.parsers.length;
while (i-- && !rgba) {
parser = this.parsers[i];
result = parser.regex.exec(input);
if (result) {
rgba = parser.parse(result);
}
}
}
this.rgba = rgba || [];
},
/**
* Return the color a specified format
* @param {String} format
*/
get: function(format) {
var input = this.input,
rgba = this.rgba,
ret;
if (this.stops) {
ret = merge(input);
ret.stops = [].concat(ret.stops);
each(this.stops, function(stop, i) {
ret.stops[i] = [ret.stops[i][0], stop.get(format)];
});
// it's NaN if gradient colors on a column chart
} else if (rgba && isNumber(rgba[0])) {
if (format === 'rgb' || (!format && rgba[3] === 1)) {
ret = 'rgb(' + rgba[0] + ',' + rgba[1] + ',' + rgba[2] + ')';
} else if (format === 'a') {
ret = rgba[3];
} else {
ret = 'rgba(' + rgba.join(',') + ')';
}
} else {
ret = input;
}
return ret;
},
/**
* Brighten the color
* @param {Number} alpha
*/
brighten: function(alpha) {
var i,
rgba = this.rgba;
if (this.stops) {
each(this.stops, function(stop) {
stop.brighten(alpha);
});
} else if (isNumber(alpha) && alpha !== 0) {
for (i = 0; i < 3; i++) {
rgba[i] += pInt(alpha * 255);
if (rgba[i] < 0) {
rgba[i] = 0;
}
if (rgba[i] > 255) {
rgba[i] = 255;
}
}
}
return this;
},
/**
* Set the color's opacity to a given alpha value
* @param {Number} alpha
*/
setOpacity: function(alpha) {
this.rgba[3] = alpha;
return this;
}
};
H.color = function(input) {
return new H.Color(input);
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var color = H.color,
each = H.each,
getTZOffset = H.getTZOffset,
isTouchDevice = H.isTouchDevice,
merge = H.merge,
pick = H.pick,
svg = H.svg,
win = H.win;
/* ****************************************************************************
* Handle the options *
*****************************************************************************/
H.defaultOptions = {
symbols: ['circle', 'diamond', 'square', 'triangle', 'triangle-down'],
lang: {
loading: 'Loading...',
months: ['January', 'February', 'March', 'April', 'May', 'June', 'July',
'August', 'September', 'October', 'November', 'December'
],
shortMonths: ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
weekdays: ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'],
// invalidDate: '',
decimalPoint: '.',
numericSymbols: ['k', 'M', 'G', 'T', 'P', 'E'], // SI prefixes used in axis labels
resetZoom: 'Reset zoom',
resetZoomTitle: 'Reset zoom level 1:1',
thousandsSep: ' '
},
global: {
useUTC: true,
//timezoneOffset: 0
},
chart: {
//animation: true,
//alignTicks: false,
//reflow: true,
//className: null,
//events: { load, selection },
//margin: [null],
//marginTop: null,
//marginRight: null,
//marginBottom: null,
//marginLeft: null,
borderRadius: 0,
colorCount: 10,
defaultSeriesType: 'line',
ignoreHiddenSeries: true,
//inverted: false,
spacing: [10, 10, 15, 10],
//spacingTop: 10,
//spacingRight: 10,
//spacingBottom: 15,
//spacingLeft: 10,
//zoomType: ''
resetZoomButton: {
theme: {
zIndex: 20
},
position: {
align: 'right',
x: -10,
//verticalAlign: 'top',
y: 10
}
// relativeTo: 'plot'
},
width: null,
height: null
},
defs: {
dropShadow: { // used by tooltip
tagName: 'filter',
id: 'drop-shadow',
opacity: 0.5,
children: [{
tagName: 'feGaussianBlur',
in: 'SourceAlpha',
stdDeviation: 1
}, {
tagName: 'feOffset',
dx: 1,
dy: 1
}, {
tagName: 'feComponentTransfer',
children: [{
tagName: 'feFuncA',
type: 'linear',
slope: 0.3
}]
}, {
tagName: 'feMerge',
children: [{
tagName: 'feMergeNode'
}, {
tagName: 'feMergeNode',
in: 'SourceGraphic'
}]
}]
},
style: {
tagName: 'style',
textContent: '.highcharts-tooltip{' +
'filter:url(#drop-shadow)' +
'}'
}
},
title: {
text: 'Chart title',
align: 'center',
// floating: false,
margin: 15,
// x: 0,
// verticalAlign: 'top',
// y: null,
widthAdjust: -44
},
subtitle: {
text: '',
align: 'center',
// floating: false
// x: 0,
// verticalAlign: 'top',
// y: null,
widthAdjust: -44
},
plotOptions: {},
labels: {
//items: [],
style: {
//font: defaultFont,
position: 'absolute',
color: '#333333'
}
},
legend: {
enabled: true,
align: 'center',
//floating: false,
layout: 'horizontal',
labelFormatter: function() {
return this.name;
},
//borderWidth: 0,
borderColor: '#999999',
borderRadius: 0,
navigation: {
// animation: true,
// arrowSize: 12
// style: {} // text styles
},
// margin: 20,
// reversed: false,
// backgroundColor: null,
/*style: {
padding: '5px'
},*/
itemCheckboxStyle: {
position: 'absolute',
width: '13px', // for IE precision
height: '13px'
},
// itemWidth: undefined,
squareSymbol: true,
// symbolRadius: 0,
// symbolWidth: 16,
symbolPadding: 5,
verticalAlign: 'bottom',
// width: undefined,
x: 0,
y: 0,
title: {
//text: null
}
},
loading: {
// hideDuration: 100,
// showDuration: 0
},
tooltip: {
enabled: true,
animation: svg,
//crosshairs: null,
borderRadius: 3,
dateTimeLabelFormats: {
millisecond: '%A, %b %e, %H:%M:%S.%L',
second: '%A, %b %e, %H:%M:%S',
minute: '%A, %b %e, %H:%M',
hour: '%A, %b %e, %H:%M',
day: '%A, %b %e, %Y',
week: 'Week from %A, %b %e, %Y',
month: '%B %Y',
year: '%Y'
},
footerFormat: '',
//formatter: defaultFormatter,
/* todo: em font-size when finished comparing against HC4
headerFormat: '<span style="font-size: 0.85em">{point.key}</span><br/>',
*/
padding: 8,
//shape: 'callout',
//shared: false,
snap: isTouchDevice ? 25 : 10,
headerFormat: '<span class="highcharts-header">{point.key}</span><br/>',
pointFormat: '<span class="highcharts-color-{point.colorIndex}">\u25CF</span> {series.name}: <b>{point.y}</b><br/>',
//xDateFormat: '%A, %b %e, %Y',
//valueDecimals: null,
//valuePrefix: '',
//valueSuffix: ''
},
credits: {
enabled: true,
href: 'http://www.highcharts.com',
position: {
align: 'right',
x: -10,
verticalAlign: 'bottom',
y: -5
},
text: 'Highcharts.com'
}
};
/**
* Set the time methods globally based on the useUTC option. Time method can be either
* local time or UTC (default).
*/
function setTimeMethods() {
var globalOptions = H.defaultOptions.global,
Date,
useUTC = globalOptions.useUTC,
GET = useUTC ? 'getUTC' : 'get',
SET = useUTC ? 'setUTC' : 'set';
H.Date = Date = globalOptions.Date || win.Date; // Allow using a different Date class
Date.hcTimezoneOffset = useUTC && globalOptions.timezoneOffset;
Date.hcGetTimezoneOffset = useUTC && globalOptions.getTimezoneOffset;
Date.hcMakeTime = function(year, month, date, hours, minutes, seconds) {
var d;
if (useUTC) {
d = Date.UTC.apply(0, arguments);
d += getTZOffset(d);
} else {
d = new Date(
year,
month,
pick(date, 1),
pick(hours, 0),
pick(minutes, 0),
pick(seconds, 0)
).getTime();
}
return d;
};
each(['Minutes', 'Hours', 'Day', 'Date', 'Month', 'FullYear'], function(s) {
Date['hcGet' + s] = GET + s;
});
each(['Milliseconds', 'Seconds', 'Minutes', 'Hours', 'Date', 'Month', 'FullYear'], function(s) {
Date['hcSet' + s] = SET + s;
});
}
/**
* Merge the default options with custom options and return the new options structure
* @param {Object} options The new custom options
*/
H.setOptions = function(options) {
// Copy in the default options
H.defaultOptions = merge(true, H.defaultOptions, options);
// Apply UTC
setTimeMethods();
return H.defaultOptions;
};
/**
* Get the updated default options. Until 3.0.7, merely exposing defaultOptions for outside modules
* wasn't enough because the setOptions method created a new object.
*/
H.getOptions = function() {
return H.defaultOptions;
};
// Series defaults
H.defaultPlotOptions = H.defaultOptions.plotOptions;
// set the default time methods
setTimeMethods();
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var SVGElement,
SVGRenderer,
addEvent = H.addEvent,
animate = H.animate,
attr = H.attr,
charts = H.charts,
color = H.color,
css = H.css,
createElement = H.createElement,
defined = H.defined,
deg2rad = H.deg2rad,
destroyObjectProperties = H.destroyObjectProperties,
doc = H.doc,
each = H.each,
extend = H.extend,
erase = H.erase,
grep = H.grep,
hasTouch = H.hasTouch,
isArray = H.isArray,
isFirefox = H.isFirefox,
isMS = H.isMS,
isObject = H.isObject,
isString = H.isString,
isWebKit = H.isWebKit,
merge = H.merge,
noop = H.noop,
pick = H.pick,
pInt = H.pInt,
removeEvent = H.removeEvent,
splat = H.splat,
stop = H.stop,
svg = H.svg,
SVG_NS = H.SVG_NS,
symbolSizes = H.symbolSizes,
win = H.win;
/**
* A wrapper object for SVG elements
*/
SVGElement = H.SVGElement = function() {
return this;
};
SVGElement.prototype = {
// Default base for animation
opacity: 1,
SVG_NS: SVG_NS,
// For labels, these CSS properties are applied to the <text> node directly
textProps: ['direction', 'fontSize', 'fontWeight', 'fontFamily', 'fontStyle', 'color',
'lineHeight', 'width', 'textDecoration', 'textOverflow', 'textShadow'
],
/**
* Initialize the SVG renderer
* @param {Object} renderer
* @param {String} nodeName
*/
init: function(renderer, nodeName) {
var wrapper = this;
wrapper.element = nodeName === 'span' ?
createElement(nodeName) :
doc.createElementNS(wrapper.SVG_NS, nodeName);
wrapper.renderer = renderer;
},
/**
* Animate a given attribute
* @param {Object} params
* @param {Number} options Options include duration, easing, step and complete
* @param {Function} complete Function to perform at the end of animation
*/
animate: function(params, options, complete) {
var animOptions = pick(options, this.renderer.globalAnimation, true);
stop(this); // stop regardless of animation actually running, or reverting to .attr (#607)
if (animOptions) {
if (complete) { // allows using a callback with the global animation without overwriting it
animOptions.complete = complete;
}
animate(this, params, animOptions);
} else {
this.attr(params, null, complete);
}
return this;
},
/**
* Build an SVG gradient out of a common JavaScript configuration object
*/
colorGradient: function(color, prop, elem) {
var renderer = this.renderer,
colorObject,
gradName,
gradAttr,
radAttr,
gradients,
gradientObject,
stops,
stopColor,
stopOpacity,
radialReference,
n,
id,
key = [],
value;
// Apply linear or radial gradients
if (color.linearGradient) {
gradName = 'linearGradient';
} else if (color.radialGradient) {
gradName = 'radialGradient';
}
if (gradName) {
gradAttr = color[gradName];
gradients = renderer.gradients;
stops = color.stops;
radialReference = elem.radialReference;
// Keep < 2.2 kompatibility
if (isArray(gradAttr)) {
color[gradName] = gradAttr = {
x1: gradAttr[0],
y1: gradAttr[1],
x2: gradAttr[2],
y2: gradAttr[3],
gradientUnits: 'userSpaceOnUse'
};
}
// Correct the radial gradient for the radial reference system
if (gradName === 'radialGradient' && radialReference && !defined(gradAttr.gradientUnits)) {
radAttr = gradAttr; // Save the radial attributes for updating
gradAttr = merge(gradAttr,
renderer.getRadialAttr(radialReference, radAttr), {
gradientUnits: 'userSpaceOnUse'
}
);
}
// Build the unique key to detect whether we need to create a new element (#1282)
for (n in gradAttr) {
if (n !== 'id') {
key.push(n, gradAttr[n]);
}
}
for (n in stops) {
key.push(stops[n]);
}
key = key.join(',');
// Check if a gradient object with the same config object is created within this renderer
if (gradients[key]) {
id = gradients[key].attr('id');
} else {
// Set the id and create the element
gradAttr.id = id = 'highcharts-' + H.idCounter++;
gradients[key] = gradientObject = renderer.createElement(gradName)
.attr(gradAttr)
.add(renderer.defs);
gradientObject.radAttr = radAttr;
// The gradient needs to keep a list of stops to be able to destroy them
gradientObject.stops = [];
each(stops, function(stop) {
var stopObject;
if (stop[1].indexOf('rgba') === 0) {
colorObject = H.color(stop[1]);
stopColor = colorObject.get('rgb');
stopOpacity = colorObject.get('a');
} else {
stopColor = stop[1];
stopOpacity = 1;
}
stopObject = renderer.createElement('stop').attr({
offset: stop[0],
'stop-color': stopColor,
'stop-opacity': stopOpacity
}).add(gradientObject);
// Add the stop element to the gradient
gradientObject.stops.push(stopObject);
});
}
// Set the reference to the gradient object
value = 'url(' + renderer.url + '#' + id + ')';
elem.setAttribute(prop, value);
elem.gradient = key;
// Allow the color to be concatenated into tooltips formatters etc. (#2995)
color.toString = function() {
return value;
};
}
},
/**
* Apply a polyfill to the text-stroke CSS property, by copying the text element
* and apply strokes to the copy.
*
* Contrast checks at http://jsfiddle.net/highcharts/43soe9m1/2/
*/
applyTextShadow: function(textShadow) {
var elem = this.element,
tspans,
hasContrast = textShadow.indexOf('contrast') !== -1,
styles = {},
forExport = this.renderer.forExport,
// IE10 and IE11 report textShadow in elem.style even though it doesn't work. Check
// this again with new IE release. In exports, the rendering is passed to PhantomJS.
supports = this.renderer.forExport || (elem.style.textShadow !== undefined && !isMS);
// When the text shadow is set to contrast, use dark stroke for light text and vice versa
if (hasContrast) {
styles.textShadow = textShadow = textShadow.replace(/contrast/g, this.renderer.getContrast(elem.style.fill));
}
// Safari with retina displays as well as PhantomJS bug (#3974). Firefox does not tolerate this,
// it removes the text shadows.
if (isWebKit || forExport) {
styles.textRendering = 'geometricPrecision';
}
/* Selective side-by-side testing in supported browser (http://jsfiddle.net/highcharts/73L1ptrh/)
if (elem.textContent.indexOf('2.') === 0) {
elem.style['text-shadow'] = 'none';
supports = false;
}
// */
// No reason to polyfill, we've got native support
if (supports) {
this.css(styles); // Apply altered textShadow or textRendering workaround
} else {
this.fakeTS = true; // Fake text shadow
// In order to get the right y position of the clones,
// copy over the y setter
this.ySetter = this.xSetter;
tspans = [].slice.call(elem.getElementsByTagName('tspan'));
each(textShadow.split(/\s?,\s?/g), function(textShadow) {
var firstChild = elem.firstChild,
color,
strokeWidth;
textShadow = textShadow.split(' ');
color = textShadow[textShadow.length - 1];
// Approximately tune the settings to the text-shadow behaviour
strokeWidth = textShadow[textShadow.length - 2];
if (strokeWidth) {
each(tspans, function(tspan, y) {
var clone;
// Let the first line start at the correct X position
if (y === 0) {
tspan.setAttribute('x', elem.getAttribute('x'));
y = elem.getAttribute('y');
tspan.setAttribute('y', y || 0);
if (y === null) {
elem.setAttribute('y', 0);
}
}
// Create the clone and apply shadow properties
clone = tspan.cloneNode(1);
attr(clone, {
'class': 'highcharts-text-shadow',
'fill': color,
'stroke': color,
'stroke-opacity': 1 / Math.max(pInt(strokeWidth), 3),
'stroke-width': strokeWidth,
'stroke-linejoin': 'round'
});
elem.insertBefore(clone, firstChild);
});
}
});
}
},
/**
* Set or get a given attribute
* @param {Object|String} hash
* @param {Mixed|Undefined} val
*/
attr: function(hash, val, complete) {
var key,
value,
element = this.element,
hasSetSymbolSize,
ret = this,
skipAttr,
setter;
// single key-value pair
if (typeof hash === 'string' && val !== undefined) {
key = hash;
hash = {};
hash[key] = val;
}
// used as a getter: first argument is a string, second is undefined
if (typeof hash === 'string') {
ret = (this[hash + 'Getter'] || this._defaultGetter).call(this, hash, element);
// setter
} else {
for (key in hash) {
value = hash[key];
skipAttr = false;
if (this.symbolName && /^(x|y|width|height|r|start|end|innerR|anchorX|anchorY)/.test(key)) {
if (!hasSetSymbolSize) {
this.symbolAttr(hash);
hasSetSymbolSize = true;
}
skipAttr = true;
}
if (this.rotation && (key === 'x' || key === 'y')) {
this.doTransform = true;
}
if (!skipAttr) {
setter = this[key + 'Setter'] || this._defaultSetter;
setter.call(this, value, key, element);
}
}
// Update transform. Do this outside the loop to prevent redundant updating for batch setting
// of attributes.
if (this.doTransform) {
this.updateTransform();
this.doTransform = false;
}
}
// In accordance with animate, run a complete callback
if (complete) {
complete();
}
return ret;
},
/**
* Add a class name to an element
*/
addClass: function(className, replace) {
var currentClassName = this.attr('class') || '';
if (currentClassName.indexOf(className) === -1) {
if (!replace) {
className = (currentClassName + (currentClassName ? ' ' : '') + className).replace(' ', ' ');
}
this.attr('class', className);
}
return this;
},
hasClass: function(className) {
return attr(this.element, 'class').indexOf(className) !== -1;
},
removeClass: function(className) {
attr(this.element, 'class', (attr(this.element, 'class') || '').replace(className, ''));
return this;
},
/**
* If one of the symbol size affecting parameters are changed,
* check all the others only once for each call to an element's
* .attr() method
* @param {Object} hash
*/
symbolAttr: function(hash) {
var wrapper = this;
each(['x', 'y', 'r', 'start', 'end', 'width', 'height', 'innerR', 'anchorX', 'anchorY'], function(key) {
wrapper[key] = pick(hash[key], wrapper[key]);
});
wrapper.attr({
d: wrapper.renderer.symbols[wrapper.symbolName](
wrapper.x,
wrapper.y,
wrapper.width,
wrapper.height,
wrapper
)
});
},
/**
* Apply a clipping path to this object
* @param {String} id
*/
clip: function(clipRect) {
return this.attr('clip-path', clipRect ? 'url(' + this.renderer.url + '#' + clipRect.id + ')' : 'none');
},
/**
* Calculate the coordinates needed for drawing a rectangle crisply and return the
* calculated attributes
* @param {Number} strokeWidth
* @param {Number} x
* @param {Number} y
* @param {Number} width
* @param {Number} height
*/
crisp: function(rect, strokeWidth) {
var wrapper = this,
key,
attribs = {},
normalizer;
strokeWidth = strokeWidth || rect.strokeWidth || 0;
normalizer = Math.round(strokeWidth) % 2 / 2; // Math.round because strokeWidth can sometimes have roundoff errors
// normalize for crisp edges
rect.x = Math.floor(rect.x || wrapper.x || 0) + normalizer;
rect.y = Math.floor(rect.y || wrapper.y || 0) + normalizer;
rect.width = Math.floor((rect.width || wrapper.width || 0) - 2 * normalizer);
rect.height = Math.floor((rect.height || wrapper.height || 0) - 2 * normalizer);
if (defined(rect.strokeWidth)) {
rect.strokeWidth = strokeWidth;
}
for (key in rect) {
if (wrapper[key] !== rect[key]) { // only set attribute if changed
wrapper[key] = attribs[key] = rect[key];
}
}
return attribs;
},
/**
* Set styles for the element
* @param {Object} styles
*/
css: function(styles) {
var elemWrapper = this,
oldStyles = elemWrapper.styles,
newStyles = {},
elem = elemWrapper.element,
textWidth,
n,
serializedCss = '',
hyphenate,
hasNew = !oldStyles;
// convert legacy
if (styles && styles.color) {
styles.fill = styles.color;
}
// Filter out existing styles to increase performance (#2640)
if (oldStyles) {
for (n in styles) {
if (styles[n] !== oldStyles[n]) {
newStyles[n] = styles[n];
hasNew = true;
}
}
}
if (hasNew) {
textWidth = elemWrapper.textWidth =
(styles && styles.width && elem.nodeName.toLowerCase() === 'text' && pInt(styles.width)) ||
elemWrapper.textWidth; // #3501
// Merge the new styles with the old ones
if (oldStyles) {
styles = extend(
oldStyles,
newStyles
);
}
// store object
elemWrapper.styles = styles;
if (textWidth && (!svg && elemWrapper.renderer.forExport)) {
delete styles.width;
}
// serialize and set style attribute
if (isMS && !svg) {
css(elemWrapper.element, styles);
} else {
hyphenate = function(a, b) {
return '-' + b.toLowerCase();
};
for (n in styles) {
serializedCss += n.replace(/([A-Z])/g, hyphenate) + ':' + styles[n] + ';';
}
attr(elem, 'style', serializedCss); // #1881
}
// Rebuild text after added
if (elemWrapper.added && textWidth) {
elemWrapper.renderer.buildText(elemWrapper);
}
}
return elemWrapper;
},
/**
* Get a computed style
*/
getStyle: function(prop) {
return win.getComputedStyle(this.element || this, '').getPropertyValue(prop);
},
/**
* Get a computed style in pixel values
*/
strokeWidth: function() {
var val = this.getStyle('stroke-width'),
ret,
dummy;
// Read pixel values directly
if (val.indexOf('px') === val.length - 2) {
ret = pInt(val);
// Other values like em, pt etc need to be measured
} else {
dummy = doc.createElementNS(SVG_NS, 'rect');
attr(dummy, {
'width': val,
'stroke-width': 0
});
this.element.parentNode.appendChild(dummy);
ret = dummy.getBBox().width;
dummy.parentNode.removeChild(dummy);
}
return ret;
},
/**
* Add an event listener
* @param {String} eventType
* @param {Function} handler
*/
on: function(eventType, handler) {
var svgElement = this,
element = svgElement.element;
// touch
if (hasTouch && eventType === 'click') {
element.ontouchstart = function(e) {
svgElement.touchEventFired = Date.now();
e.preventDefault();
handler.call(element, e);
};
element.onclick = function(e) {
if (win.navigator.userAgent.indexOf('Android') === -1 || Date.now() - (svgElement.touchEventFired || 0) > 1100) { // #2269
handler.call(element, e);
}
};
} else {
// simplest possible event model for internal use
element['on' + eventType] = handler;
}
return this;
},
/**
* Set the coordinates needed to draw a consistent radial gradient across
* pie slices regardless of positioning inside the chart. The format is
* [centerX, centerY, diameter] in pixels.
*/
setRadialReference: function(coordinates) {
var existingGradient = this.renderer.gradients[this.element.gradient];
this.element.radialReference = coordinates;
// On redrawing objects with an existing gradient, the gradient needs
// to be repositioned (#3801)
if (existingGradient && existingGradient.radAttr) {
existingGradient.animate(
this.renderer.getRadialAttr(
coordinates,
existingGradient.radAttr
)
);
}
return this;
},
/**
* Move an object and its children by x and y values
* @param {Number} x
* @param {Number} y
*/
translate: function(x, y) {
return this.attr({
translateX: x,
translateY: y
});
},
/**
* Invert a group, rotate and flip
*/
invert: function(inverted) {
var wrapper = this;
wrapper.inverted = inverted;
wrapper.updateTransform();
return wrapper;
},
/**
* Private method to update the transform attribute based on internal
* properties
*/
updateTransform: function() {
var wrapper = this,
translateX = wrapper.translateX || 0,
translateY = wrapper.translateY || 0,
scaleX = wrapper.scaleX,
scaleY = wrapper.scaleY,
inverted = wrapper.inverted,
rotation = wrapper.rotation,
element = wrapper.element,
transform;
// flipping affects translate as adjustment for flipping around the group's axis
if (inverted) {
translateX += wrapper.attr('width');
translateY += wrapper.attr('height');
}
// Apply translate. Nearly all transformed elements have translation, so instead
// of checking for translate = 0, do it always (#1767, #1846).
transform = ['translate(' + translateX + ',' + translateY + ')'];
// apply rotation
if (inverted) {
transform.push('rotate(90) scale(-1,1)');
} else if (rotation) { // text rotation
transform.push('rotate(' + rotation + ' ' + (element.getAttribute('x') || 0) + ' ' + (element.getAttribute('y') || 0) + ')');
// Delete bBox memo when the rotation changes
//delete wrapper.bBox;
}
// apply scale
if (defined(scaleX) || defined(scaleY)) {
transform.push('scale(' + pick(scaleX, 1) + ' ' + pick(scaleY, 1) + ')');
}
if (transform.length) {
element.setAttribute('transform', transform.join(' '));
}
},
/**
* Bring the element to the front
*/
toFront: function() {
var element = this.element;
element.parentNode.appendChild(element);
return this;
},
/**
* Break down alignment options like align, verticalAlign, x and y
* to x and y relative to the chart.
*
* @param {Object} alignOptions
* @param {Boolean} alignByTranslate
* @param {String[Object} box The box to align to, needs a width and height. When the
* box is a string, it refers to an object in the Renderer. For example, when
* box is 'spacingBox', it refers to Renderer.spacingBox which holds width, height
* x and y properties.
*
*/
align: function(alignOptions, alignByTranslate, box) {
var align,
vAlign,
x,
y,
attribs = {},
alignTo,
renderer = this.renderer,
alignedObjects = renderer.alignedObjects,
alignFactor,
vAlignFactor;
// First call on instanciate
if (alignOptions) {
this.alignOptions = alignOptions;
this.alignByTranslate = alignByTranslate;
if (!box || isString(box)) { // boxes other than renderer handle this internally
this.alignTo = alignTo = box || 'renderer';
erase(alignedObjects, this); // prevent duplicates, like legendGroup after resize
alignedObjects.push(this);
box = null; // reassign it below
}
// When called on resize, no arguments are supplied
} else {
alignOptions = this.alignOptions;
alignByTranslate = this.alignByTranslate;
alignTo = this.alignTo;
}
box = pick(box, renderer[alignTo], renderer);
// Assign variables
align = alignOptions.align;
vAlign = alignOptions.verticalAlign;
x = (box.x || 0) + (alignOptions.x || 0); // default: left align
y = (box.y || 0) + (alignOptions.y || 0); // default: top align
// Align
if (align === 'right') {
alignFactor = 1;
} else if (align === 'center') {
alignFactor = 2;
}
if (alignFactor) {
x += (box.width - (alignOptions.width || 0)) / alignFactor;
}
attribs[alignByTranslate ? 'translateX' : 'x'] = Math.round(x);
// Vertical align
if (vAlign === 'bottom') {
vAlignFactor = 1;
} else if (vAlign === 'middle') {
vAlignFactor = 2;
}
if (vAlignFactor) {
y += (box.height - (alignOptions.height || 0)) / vAlignFactor;
}
attribs[alignByTranslate ? 'translateY' : 'y'] = Math.round(y);
// Animate only if already placed
this[this.placed ? 'animate' : 'attr'](attribs);
this.placed = true;
this.alignAttr = attribs;
return this;
},
/**
* Get the bounding box (width, height, x and y) for the element
*/
getBBox: function(reload, rot) {
var wrapper = this,
bBox, // = wrapper.bBox,
renderer = wrapper.renderer,
width,
height,
rotation,
rad,
element = wrapper.element,
styles = wrapper.styles,
fontSize,
textStr = wrapper.textStr,
textShadow,
elemStyle = element.style,
toggleTextShadowShim,
cache = renderer.cache,
cacheKeys = renderer.cacheKeys,
cacheKey;
rotation = pick(rot, wrapper.rotation);
rad = rotation * deg2rad;
fontSize = element && SVGElement.prototype.getStyle.call(element, 'font-size');
if (textStr !== undefined) {
cacheKey =
// Since numbers are monospaced, and numerical labels appear a lot in a chart,
// we assume that a label of n characters has the same bounding box as others
// of the same length.
textStr.toString().replace(/[0-9]/g, '0') +
// Properties that affect bounding box
['', rotation || 0, fontSize, element.style.width].join(',');
}
if (cacheKey && !reload) {
bBox = cache[cacheKey];
}
// No cache found
if (!bBox) {
// SVG elements
if (element.namespaceURI === wrapper.SVG_NS || renderer.forExport) {
try { // Fails in Firefox if the container has display: none.
// When the text shadow shim is used, we need to hide the fake shadows
// to get the correct bounding box (#3872)
toggleTextShadowShim = this.fakeTS && function(display) {
each(element.querySelectorAll('.highcharts-text-shadow'), function(tspan) {
tspan.style.display = display;
});
};
// Workaround for #3842, Firefox reporting wrong bounding box for shadows
if (isFirefox && elemStyle.textShadow) {
textShadow = elemStyle.textShadow;
elemStyle.textShadow = '';
} else if (toggleTextShadowShim) {
toggleTextShadowShim('none');
}
bBox = element.getBBox ?
// SVG: use extend because IE9 is not allowed to change width and height in case
// of rotation (below)
extend({}, element.getBBox()) :
// Legacy IE in export mode
{
width: element.offsetWidth,
height: element.offsetHeight
};
// #3842
if (textShadow) {
elemStyle.textShadow = textShadow;
} else if (toggleTextShadowShim) {
toggleTextShadowShim('');
}
} catch (e) {}
// If the bBox is not set, the try-catch block above failed. The other condition
// is for Opera that returns a width of -Infinity on hidden elements.
if (!bBox || bBox.width < 0) {
bBox = {
width: 0,
height: 0
};
}
// VML Renderer or useHTML within SVG
} else {
bBox = wrapper.htmlGetBBox();
}
// True SVG elements as well as HTML elements in modern browsers using the .useHTML option
// need to compensated for rotation
if (renderer.isSVG) {
width = bBox.width;
height = bBox.height;
// Workaround for wrong bounding box in IE9 and IE10 (#1101, #1505, #1669, #2568)
if (isMS && styles && styles.fontSize === '11px' && height.toPrecision(3) === '16.9') {
bBox.height = height = 14;
}
// Adjust for rotated text
if (rotation) {
bBox.width = Math.abs(height * Math.sin(rad)) + Math.abs(width * Math.cos(rad));
bBox.height = Math.abs(height * Math.cos(rad)) + Math.abs(width * Math.sin(rad));
}
}
// Cache it. When loading a chart in a hidden iframe in Firefox and IE/Edge, the
// bounding box height is 0, so don't cache it (#5620).
if (cacheKey && bBox.height > 0) {
// Rotate (#4681)
while (cacheKeys.length > 250) {
delete cache[cacheKeys.shift()];
}
if (!cache[cacheKey]) {
cacheKeys.push(cacheKey);
}
cache[cacheKey] = bBox;
}
}
return bBox;
},
/**
* Show the element
*/
show: function(inherit) {
return this.attr({
visibility: inherit ? 'inherit' : 'visible'
});
},
/**
* Hide the element
*/
hide: function() {
return this.attr({
visibility: 'hidden'
});
},
fadeOut: function(duration) {
var elemWrapper = this;
elemWrapper.animate({
opacity: 0
}, {
duration: duration || 150,
complete: function() {
elemWrapper.attr({
y: -9999
}); // #3088, assuming we're only using this for tooltips
}
});
},
/**
* Add the element
* @param {Object|Undefined} parent Can be an element, an element wrapper or undefined
* to append the element to the renderer.box.
*/
add: function(parent) {
var renderer = this.renderer,
element = this.element,
inserted;
if (parent) {
this.parentGroup = parent;
}
// mark as inverted
this.parentInverted = parent && parent.inverted;
// build formatted text
if (this.textStr !== undefined) {
renderer.buildText(this);
}
// Mark as added
this.added = true;
// If we're adding to renderer root, or other elements in the group
// have a z index, we need to handle it
if (!parent || parent.handleZ || this.zIndex) {
inserted = this.zIndexSetter();
}
// If zIndex is not handled, append at the end
if (!inserted) {
(parent ? parent.element : renderer.box).appendChild(element);
}
// fire an event for internal hooks
if (this.onAdd) {
this.onAdd();
}
return this;
},
/**
* Removes a child either by removeChild or move to garbageBin.
* Issue 490; in VML removeChild results in Orphaned nodes according to sIEve, discardElement does not.
*/
safeRemoveChild: function(element) {
var parentNode = element.parentNode;
if (parentNode) {
parentNode.removeChild(element);
}
},
/**
* Destroy the element and element wrapper
*/
destroy: function() {
var wrapper = this,
element = wrapper.element || {},
parentToClean = wrapper.renderer.isSVG && element.nodeName === 'SPAN' && wrapper.parentGroup,
grandParent,
key,
i;
// remove events
element.onclick = element.onmouseout = element.onmouseover = element.onmousemove = element.point = null;
stop(wrapper); // stop running animations
if (wrapper.clipPath) {
wrapper.clipPath = wrapper.clipPath.destroy();
}
// Destroy stops in case this is a gradient object
if (wrapper.stops) {
for (i = 0; i < wrapper.stops.length; i++) {
wrapper.stops[i] = wrapper.stops[i].destroy();
}
wrapper.stops = null;
}
// remove element
wrapper.safeRemoveChild(element);
// In case of useHTML, clean up empty containers emulating SVG groups (#1960, #2393, #2697).
while (parentToClean && parentToClean.div && parentToClean.div.childNodes.length === 0) {
grandParent = parentToClean.parentGroup;
wrapper.safeRemoveChild(parentToClean.div);
delete parentToClean.div;
parentToClean = grandParent;
}
// remove from alignObjects
if (wrapper.alignTo) {
erase(wrapper.renderer.alignedObjects, wrapper);
}
for (key in wrapper) {
delete wrapper[key];
}
return null;
},
xGetter: function(key) {
if (this.element.nodeName === 'circle') {
if (key === 'x') {
key = 'cx';
} else if (key === 'y') {
key = 'cy';
}
}
return this._defaultGetter(key);
},
/**
* Get the current value of an attribute or pseudo attribute, used mainly
* for animation.
*/
_defaultGetter: function(key) {
var ret = pick(this[key], this.element ? this.element.getAttribute(key) : null, 0);
if (/^[\-0-9\.]+$/.test(ret)) { // is numerical
ret = parseFloat(ret);
}
return ret;
},
dSetter: function(value, key, element) {
if (value && value.join) { // join path
value = value.join(' ');
}
if (/(NaN| {2}|^$)/.test(value)) {
value = 'M 0 0';
}
element.setAttribute(key, value);
this[key] = value;
},
alignSetter: function(value) {
var convert = {
left: 'start',
center: 'middle',
right: 'end'
};
this.element.setAttribute('text-anchor', convert[value]);
},
opacitySetter: function(value, key, element) {
this[key] = value;
element.setAttribute(key, value);
},
titleSetter: function(value) {
var titleNode = this.element.getElementsByTagName('title')[0];
if (!titleNode) {
titleNode = doc.createElementNS(this.SVG_NS, 'title');
this.element.appendChild(titleNode);
}
// Remove text content if it exists
if (titleNode.firstChild) {
titleNode.removeChild(titleNode.firstChild);
}
titleNode.appendChild(
doc.createTextNode(
(String(pick(value), '')).replace(/<[^>]*>/g, '') // #3276, #3895
)
);
},
textSetter: function(value) {
if (value !== this.textStr) {
// Delete bBox memo when the text changes
delete this.bBox;
this.textStr = value;
if (this.added) {
this.renderer.buildText(this);
}
}
},
fillSetter: function(value, key, element) {
if (typeof value === 'string') {
element.setAttribute(key, value);
} else if (value) {
this.colorGradient(value, key, element);
}
},
visibilitySetter: function(value, key, element) {
// IE9-11 doesn't handle visibilty:inherit well, so we remove the attribute instead (#2881, #3909)
if (value === 'inherit') {
element.removeAttribute(key);
} else {
element.setAttribute(key, value);
}
},
zIndexSetter: function(value, key) {
var renderer = this.renderer,
parentGroup = this.parentGroup,
parentWrapper = parentGroup || renderer,
parentNode = parentWrapper.element || renderer.box,
childNodes,
otherElement,
otherZIndex,
element = this.element,
inserted,
run = this.added,
i;
if (defined(value)) {
element.zIndex = value; // So we can read it for other elements in the group
value = +value;
if (this[key] === value) { // Only update when needed (#3865)
run = false;
}
this[key] = value;
}
// Insert according to this and other elements' zIndex. Before .add() is called,
// nothing is done. Then on add, or by later calls to zIndexSetter, the node
// is placed on the right place in the DOM.
if (run) {
value = this.zIndex;
if (value && parentGroup) {
parentGroup.handleZ = true;
}
childNodes = parentNode.childNodes;
for (i = 0; i < childNodes.length && !inserted; i++) {
otherElement = childNodes[i];
otherZIndex = otherElement.zIndex;
if (otherElement !== element && (
// Insert before the first element with a higher zIndex
pInt(otherZIndex) > value ||
// If no zIndex given, insert before the first element with a zIndex
(!defined(value) && defined(otherZIndex)) ||
// Negative zIndex versus no zIndex:
// On all levels except the highest. If the parent is <svg>,
// then we don't want to put items before <desc> or <defs>
(value < 0 && !defined(otherZIndex) && parentNode !== renderer.box)
)) {
parentNode.insertBefore(element, otherElement);
inserted = true;
}
}
if (!inserted) {
parentNode.appendChild(element);
}
}
return inserted;
},
_defaultSetter: function(value, key, element) {
element.setAttribute(key, value);
}
};
// Some shared setters and getters
SVGElement.prototype.yGetter = SVGElement.prototype.xGetter;
SVGElement.prototype.translateXSetter = SVGElement.prototype.translateYSetter =
SVGElement.prototype.rotationSetter = SVGElement.prototype.verticalAlignSetter =
SVGElement.prototype.scaleXSetter = SVGElement.prototype.scaleYSetter = function(value, key) {
this[key] = value;
this.doTransform = true;
};
/**
* The default SVG renderer
*/
SVGRenderer = H.SVGRenderer = function() {
this.init.apply(this, arguments);
};
SVGRenderer.prototype = {
Element: SVGElement,
SVG_NS: SVG_NS,
/**
* Initialize the SVGRenderer
* @param {Object} container
* @param {Number} width
* @param {Number} height
* @param {Boolean} forExport
*/
init: function(container, width, height, style, forExport, allowHTML) {
var renderer = this,
boxWrapper,
element,
desc;
boxWrapper = renderer.createElement('svg')
.attr({
'version': '1.1',
'class': 'highcharts-root'
});
element = boxWrapper.element;
container.appendChild(element);
// For browsers other than IE, add the namespace attribute (#1978)
if (container.innerHTML.indexOf('xmlns') === -1) {
attr(element, 'xmlns', this.SVG_NS);
}
// object properties
renderer.isSVG = true;
renderer.box = element;
renderer.boxWrapper = boxWrapper;
renderer.alignedObjects = [];
// Page url used for internal references. #24, #672, #1070
renderer.url = (isFirefox || isWebKit) && doc.getElementsByTagName('base').length ?
win.location.href
.replace(/#.*?$/, '') // remove the hash
.replace(/([\('\)])/g, '\\$1') // escape parantheses and quotes
.replace(/ /g, '%20') : // replace spaces (needed for Safari only)
'';
// Add description
desc = this.createElement('desc').add();
desc.element.appendChild(doc.createTextNode('Created with Highmaps 5.0.1'));
renderer.defs = this.createElement('defs').add();
renderer.allowHTML = allowHTML;
renderer.forExport = forExport;
renderer.gradients = {}; // Object where gradient SvgElements are stored
renderer.cache = {}; // Cache for numerical bounding boxes
renderer.cacheKeys = [];
renderer.imgCount = 0;
renderer.setSize(width, height, false);
// Issue 110 workaround:
// In Firefox, if a div is positioned by percentage, its pixel position may land
// between pixels. The container itself doesn't display this, but an SVG element
// inside this container will be drawn at subpixel precision. In order to draw
// sharp lines, this must be compensated for. This doesn't seem to work inside
// iframes though (like in jsFiddle).
var subPixelFix, rect;
if (isFirefox && container.getBoundingClientRect) {
renderer.subPixelFix = subPixelFix = function() {
css(container, {
left: 0,
top: 0
});
rect = container.getBoundingClientRect();
css(container, {
left: (Math.ceil(rect.left) - rect.left) + 'px',
top: (Math.ceil(rect.top) - rect.top) + 'px'
});
};
// run the fix now
subPixelFix();
// run it on resize
addEvent(win, 'resize', subPixelFix);
}
},
/**
* General method for adding a definition. Can be used for gradients, fills, filters etc.
*
* @return SVGElement The inserted node
*/
definition: function(def) {
var ren = this;
function recurse(config, parent) {
var ret;
each(splat(config), function(item) {
var node = ren.createElement(item.tagName),
key,
attr = {};
// Set attributes
for (key in item) {
if (key !== 'tagName' && key !== 'children' && key !== 'textContent') {
attr[key] = item[key];
}
}
node.attr(attr);
// Add to the tree
node.add(parent || ren.defs);
// Add text content
if (item.textContent) {
node.element.appendChild(doc.createTextNode(item.textContent));
}
// Recurse
recurse(item.children || [], node);
ret = node;
});
// Return last node added (on top level it's the only one)
return ret;
}
return recurse(def);
},
/**
* Detect whether the renderer is hidden. This happens when one of the parent elements
* has display: none. #608.
*/
isHidden: function() {
return !this.boxWrapper.getBBox().width;
},
/**
* Destroys the renderer and its allocated members.
*/
destroy: function() {
var renderer = this,
rendererDefs = renderer.defs;
renderer.box = null;
renderer.boxWrapper = renderer.boxWrapper.destroy();
// Call destroy on all gradient elements
destroyObjectProperties(renderer.gradients || {});
renderer.gradients = null;
// Defs are null in VMLRenderer
// Otherwise, destroy them here.
if (rendererDefs) {
renderer.defs = rendererDefs.destroy();
}
// Remove sub pixel fix handler
// We need to check that there is a handler, otherwise all functions that are registered for event 'resize' are removed
// See issue #982
if (renderer.subPixelFix) {
removeEvent(win, 'resize', renderer.subPixelFix);
}
renderer.alignedObjects = null;
return null;
},
/**
* Create a wrapper for an SVG element
* @param {Object} nodeName
*/
createElement: function(nodeName) {
var wrapper = new this.Element();
wrapper.init(this, nodeName);
return wrapper;
},
/**
* Dummy function for plugins
*/
draw: noop,
/**
* Get converted radial gradient attributes
*/
getRadialAttr: function(radialReference, gradAttr) {
return {
cx: (radialReference[0] - radialReference[2] / 2) + gradAttr.cx * radialReference[2],
cy: (radialReference[1] - radialReference[2] / 2) + gradAttr.cy * radialReference[2],
r: gradAttr.r * radialReference[2]
};
},
/**
* Parse a simple HTML string into SVG tspans
*
* @param {Object} textNode The parent text SVG node
*/
buildText: function(wrapper) {
var textNode = wrapper.element,
renderer = this,
forExport = renderer.forExport,
textStr = pick(wrapper.textStr, '').toString(),
hasMarkup = textStr.indexOf('<') !== -1,
lines,
childNodes = textNode.childNodes,
clsRegex,
styleRegex,
hrefRegex,
wasTooLong,
parentX = attr(textNode, 'x'),
textStyles = wrapper.styles,
width = wrapper.textWidth,
textLineHeight = textStyles && textStyles.lineHeight,
textShadow = textStyles && textStyles.textShadow,
ellipsis = textStyles && textStyles.textOverflow === 'ellipsis',
i = childNodes.length,
tempParent = width && !wrapper.added && this.box,
getLineHeight = function(tspan) {
var fontSizeStyle;
return textLineHeight ?
pInt(textLineHeight) :
renderer.fontMetrics(
fontSizeStyle,
tspan
).h;
},
unescapeAngleBrackets = function(inputStr) {
return inputStr.replace(/</g, '<').replace(/>/g, '>');
};
/// remove old text
while (i--) {
textNode.removeChild(childNodes[i]);
}
// Skip tspans, add text directly to text node. The forceTSpan is a hook
// used in text outline hack.
if (!hasMarkup && !textShadow && !ellipsis && !width && textStr.indexOf(' ') === -1) {
textNode.appendChild(doc.createTextNode(unescapeAngleBrackets(textStr)));
// Complex strings, add more logic
} else {
clsRegex = /<.*class="([^"]+)".*>/;
styleRegex = /<.*style="([^"]+)".*>/;
hrefRegex = /<.*href="(http[^"]+)".*>/;
if (tempParent) {
tempParent.appendChild(textNode); // attach it to the DOM to read offset width
}
if (hasMarkup) {
lines = textStr
.replace(/<(b|strong)>/g, '<span style="font-weight:bold">')
.replace(/<(i|em)>/g, '<span style="font-style:italic">')
.replace(/<a/g, '<span')
.replace(/<\/(b|strong|i|em|a)>/g, '</span>')
.split(/<br.*?>/g);
} else {
lines = [textStr];
}
// Trim empty lines (#5261)
lines = grep(lines, function(line) {
return line !== '';
});
// build the lines
each(lines, function buildTextLines(line, lineNo) {
var spans,
spanNo = 0;
line = line
.replace(/^\s+|\s+$/g, '') // Trim to prevent useless/costly process on the spaces (#5258)
.replace(/<span/g, '|||<span')
.replace(/<\/span>/g, '</span>|||');
spans = line.split('|||');
each(spans, function buildTextSpans(span) {
if (span !== '' || spans.length === 1) {
var attributes = {},
tspan = doc.createElementNS(renderer.SVG_NS, 'tspan'),
spanCls,
spanStyle; // #390
if (clsRegex.test(span)) {
spanCls = span.match(clsRegex)[1];
attr(tspan, 'class', spanCls);
}
if (styleRegex.test(span)) {
spanStyle = span.match(styleRegex)[1].replace(/(;| |^)color([ :])/, '$1fill$2');
attr(tspan, 'style', spanStyle);
}
if (hrefRegex.test(span) && !forExport) { // Not for export - #1529
attr(tspan, 'onclick', 'location.href=\"' + span.match(hrefRegex)[1] + '\"');
css(tspan, {
cursor: 'pointer'
});
}
span = unescapeAngleBrackets(span.replace(/<(.|\n)*?>/g, '') || ' ');
// Nested tags aren't supported, and cause crash in Safari (#1596)
if (span !== ' ') {
// add the text node
tspan.appendChild(doc.createTextNode(span));
if (!spanNo) { // first span in a line, align it to the left
if (lineNo && parentX !== null) {
attributes.x = parentX;
}
} else {
attributes.dx = 0; // #16
}
// add attributes
attr(tspan, attributes);
// Append it
textNode.appendChild(tspan);
// first span on subsequent line, add the line height
if (!spanNo && lineNo) {
// allow getting the right offset height in exporting in IE
if (!svg && forExport) {
css(tspan, {
display: 'block'
});
}
// Set the line height based on the font size of either
// the text element or the tspan element
attr(
tspan,
'dy',
getLineHeight(tspan)
);
}
/*if (width) {
renderer.breakText(wrapper, width);
}*/
// Check width and apply soft breaks or ellipsis
if (width) {
var words = span.replace(/([^\^])-/g, '$1- ').split(' '), // #1273
noWrap = textStyles.whiteSpace === 'nowrap',
hasWhiteSpace = spans.length > 1 || lineNo || (words.length > 1 && !noWrap),
tooLong,
actualWidth,
rest = [],
dy = getLineHeight(tspan),
rotation = wrapper.rotation,
wordStr = span, // for ellipsis
cursor = wordStr.length, // binary search cursor
bBox;
while ((hasWhiteSpace || ellipsis) && (words.length || rest.length)) {
wrapper.rotation = 0; // discard rotation when computing box
bBox = wrapper.getBBox(true);
actualWidth = bBox.width;
// Old IE cannot measure the actualWidth for SVG elements (#2314)
if (!svg && renderer.forExport) {
actualWidth = renderer.measureSpanWidth(tspan.firstChild.data, wrapper.styles);
}
tooLong = actualWidth > width;
// For ellipsis, do a binary search for the correct string length
if (wasTooLong === undefined) {
wasTooLong = tooLong; // First time
}
if (ellipsis && wasTooLong) {
cursor /= 2;
if (wordStr === '' || (!tooLong && cursor < 0.5)) {
words = []; // All ok, break out
} else {
wordStr = span.substring(0, wordStr.length + (tooLong ? -1 : 1) * Math.ceil(cursor));
words = [wordStr + (width > 3 ? '\u2026' : '')];
tspan.removeChild(tspan.firstChild);
}
// Looping down, this is the first word sequence that is not too long,
// so we can move on to build the next line.
} else if (!tooLong || words.length === 1) {
words = rest;
rest = [];
if (words.length && !noWrap) {
tspan = doc.createElementNS(SVG_NS, 'tspan');
attr(tspan, {
dy: dy,
x: parentX
});
if (spanStyle) { // #390
attr(tspan, 'style', spanStyle);
}
textNode.appendChild(tspan);
}
if (actualWidth > width) { // a single word is pressing it out
width = actualWidth;
}
} else { // append to existing line tspan
tspan.removeChild(tspan.firstChild);
rest.unshift(words.pop());
}
if (words.length) {
tspan.appendChild(doc.createTextNode(words.join(' ').replace(/- /g, '-')));
}
}
wrapper.rotation = rotation;
}
spanNo++;
}
}
});
});
if (wasTooLong) {
wrapper.attr('title', wrapper.textStr);
}
if (tempParent) {
tempParent.removeChild(textNode); // attach it to the DOM to read offset width
}
// Apply the text shadow
if (textShadow && wrapper.applyTextShadow) {
wrapper.applyTextShadow(textShadow);
}
}
},
/*
breakText: function (wrapper, width) {
var bBox = wrapper.getBBox(),
node = wrapper.element,
textLength = node.textContent.length,
pos = Math.round(width * textLength / bBox.width), // try this position first, based on average character width
increment = 0,
finalPos;
if (bBox.width > width) {
while (finalPos === undefined) {
textLength = node.getSubStringLength(0, pos);
if (textLength <= width) {
if (increment === -1) {
finalPos = pos;
} else {
increment = 1;
}
} else {
if (increment === 1) {
finalPos = pos - 1;
} else {
increment = -1;
}
}
pos += increment;
}
}
console.log('width', width, 'stringWidth', node.getSubStringLength(0, finalPos))
},
*/
/**
* Returns white for dark colors and black for bright colors
*/
getContrast: function(rgba) {
rgba = color(rgba).rgba;
return rgba[0] + rgba[1] + rgba[2] > 2 * 255 ? '#000000' : '#FFFFFF';
},
/**
* Create a button with preset states
* @param {String} text
* @param {Number} x
* @param {Number} y
* @param {Function} callback
* @param {Object} normalState
* @param {Object} hoverState
* @param {Object} pressedState
*/
button: function(text, x, y, callback, normalState, hoverState, pressedState, disabledState, shape) {
var label = this.label(text, x, y, shape, null, null, null, null, 'button'),
curState = 0;
// Default, non-stylable attributes
label.attr(merge({
'padding': 8,
'r': 2
}, normalState));
// Add the events. IE9 and IE10 need mouseover and mouseout to funciton (#667).
addEvent(label.element, isMS ? 'mouseover' : 'mouseenter', function() {
if (curState !== 3) {
label.setState(1);
}
});
addEvent(label.element, isMS ? 'mouseout' : 'mouseleave', function() {
if (curState !== 3) {
label.setState(curState);
}
});
label.setState = function(state) {
// Hover state is temporary, don't record it
if (state !== 1) {
label.state = curState = state;
}
// Update visuals
label.removeClass(/highcharts-button-(normal|hover|pressed|disabled)/)
.addClass('highcharts-button-' + ['normal', 'hover', 'pressed', 'disabled'][state || 0]);
};
return label
.on('click', function(e) {
if (curState !== 3) {
callback.call(label, e);
}
});
},
/**
* Make a straight line crisper by not spilling out to neighbour pixels
* @param {Array} points
* @param {Number} width
*/
crispLine: function(points, width) {
// points format: ['M', 0, 0, 'L', 100, 0]
// normalize to a crisp line
if (points[1] === points[4]) {
// Substract due to #1129. Now bottom and left axis gridlines behave the same.
points[1] = points[4] = Math.round(points[1]) - (width % 2 / 2);
}
if (points[2] === points[5]) {
points[2] = points[5] = Math.round(points[2]) + (width % 2 / 2);
}
return points;
},
/**
* Draw a path
* @param {Array} path An SVG path in array form
*/
path: function(path) {
var attribs = {
};
if (isArray(path)) {
attribs.d = path;
} else if (isObject(path)) { // attributes
extend(attribs, path);
}
return this.createElement('path').attr(attribs);
},
/**
* Draw and return an SVG circle
* @param {Number} x The x position
* @param {Number} y The y position
* @param {Number} r The radius
*/
circle: function(x, y, r) {
var attribs = isObject(x) ? x : {
x: x,
y: y,
r: r
},
wrapper = this.createElement('circle');
// Setting x or y translates to cx and cy
wrapper.xSetter = wrapper.ySetter = function(value, key, element) {
element.setAttribute('c' + key, value);
};
return wrapper.attr(attribs);
},
/**
* Draw and return an arc
* @param {Number} x X position
* @param {Number} y Y position
* @param {Number} r Radius
* @param {Number} innerR Inner radius like used in donut charts
* @param {Number} start Starting angle
* @param {Number} end Ending angle
*/
arc: function(x, y, r, innerR, start, end) {
var arc;
if (isObject(x)) {
y = x.y;
r = x.r;
innerR = x.innerR;
start = x.start;
end = x.end;
x = x.x;
}
// Arcs are defined as symbols for the ability to set
// attributes in attr and animate
arc = this.symbol('arc', x || 0, y || 0, r || 0, r || 0, {
innerR: innerR || 0,
start: start || 0,
end: end || 0
});
arc.r = r; // #959
return arc;
},
/**
* Draw and return a rectangle
* @param {Number} x Left position
* @param {Number} y Top position
* @param {Number} width
* @param {Number} height
* @param {Number} r Border corner radius
* @param {Number} strokeWidth A stroke width can be supplied to allow crisp drawing
*/
rect: function(x, y, width, height, r, strokeWidth) {
r = isObject(x) ? x.r : r;
var wrapper = this.createElement('rect'),
attribs = isObject(x) ? x : x === undefined ? {} : {
x: x,
y: y,
width: Math.max(width, 0),
height: Math.max(height, 0)
};
if (r) {
attribs.r = r;
}
wrapper.rSetter = function(value, key, element) {
attr(element, {
rx: value,
ry: value
});
};
return wrapper.attr(attribs);
},
/**
* Resize the box and re-align all aligned elements
* @param {Object} width
* @param {Object} height
* @param {Boolean} animate
*
*/
setSize: function(width, height, animate) {
var renderer = this,
alignedObjects = renderer.alignedObjects,
i = alignedObjects.length;
renderer.width = width;
renderer.height = height;
renderer.boxWrapper.animate({
width: width,
height: height
}, {
step: function() {
this.attr({
viewBox: '0 0 ' + this.attr('width') + ' ' + this.attr('height')
});
},
duration: pick(animate, true) ? undefined : 0
});
while (i--) {
alignedObjects[i].align();
}
},
/**
* Create a group
* @param {String} name The group will be given a class name of 'highcharts-{name}'.
* This can be used for styling and scripting.
*/
g: function(name) {
var elem = this.createElement('g');
return name ? elem.attr({
'class': 'highcharts-' + name
}) : elem;
},
/**
* Display an image
* @param {String} src
* @param {Number} x
* @param {Number} y
* @param {Number} width
* @param {Number} height
*/
image: function(src, x, y, width, height) {
var attribs = {
preserveAspectRatio: 'none'
},
elemWrapper;
// optional properties
if (arguments.length > 1) {
extend(attribs, {
x: x,
y: y,
width: width,
height: height
});
}
elemWrapper = this.createElement('image').attr(attribs);
// set the href in the xlink namespace
if (elemWrapper.element.setAttributeNS) {
elemWrapper.element.setAttributeNS('http://www.w3.org/1999/xlink',
'href', src);
} else {
// could be exporting in IE
// using href throws "not supported" in ie7 and under, requries regex shim to fix later
elemWrapper.element.setAttribute('hc-svg-href', src);
}
return elemWrapper;
},
/**
* Draw a symbol out of pre-defined shape paths from the namespace 'symbol' object.
*
* @param {Object} symbol
* @param {Object} x
* @param {Object} y
* @param {Object} radius
* @param {Object} options
*/
symbol: function(symbol, x, y, width, height, options) {
var ren = this,
obj,
// get the symbol definition function
symbolFn = this.symbols[symbol],
// check if there's a path defined for this symbol
path = defined(x) && symbolFn && symbolFn(
Math.round(x),
Math.round(y),
width,
height,
options
),
imageRegex = /^url\((.*?)\)$/,
imageSrc,
centerImage;
if (symbolFn) {
obj = this.path(path);
// expando properties for use in animate and attr
extend(obj, {
symbolName: symbol,
x: x,
y: y,
width: width,
height: height
});
if (options) {
extend(obj, options);
}
// image symbols
} else if (imageRegex.test(symbol)) {
imageSrc = symbol.match(imageRegex)[1];
// Create the image synchronously, add attribs async
obj = this.image(imageSrc);
// The image width is not always the same as the symbol width. The
// image may be centered within the symbol, as is the case when
// image shapes are used as label backgrounds, for example in flags.
obj.imgwidth = pick(
symbolSizes[imageSrc] && symbolSizes[imageSrc].width,
options && options.width
);
obj.imgheight = pick(
symbolSizes[imageSrc] && symbolSizes[imageSrc].height,
options && options.height
);
/**
* Set the size and position
*/
centerImage = function() {
obj.attr({
width: obj.width,
height: obj.height
});
};
/**
* Width and height setters that take both the image's physical size
* and the label size into consideration, and translates the image
* to center within the label.
*/
each(['width', 'height'], function(key) {
obj[key + 'Setter'] = function(value, key) {
var attribs = {},
imgSize = this['img' + key],
trans = key === 'width' ? 'translateX' : 'translateY';
this[key] = value;
if (defined(imgSize)) {
if (this.element) {
this.element.setAttribute(key, imgSize);
}
if (!this.alignByTranslate) {
attribs[trans] = ((this[key] || 0) - imgSize) / 2;
this.attr(attribs);
}
}
};
});
if (defined(x)) {
obj.attr({
x: x,
y: y
});
}
obj.isImg = true;
if (defined(obj.imgwidth) && defined(obj.imgheight)) {
centerImage();
} else {
// Initialize image to be 0 size so export will still function if there's no cached sizes.
obj.attr({
width: 0,
height: 0
});
// Create a dummy JavaScript image to get the width and height. Due to a bug in IE < 8,
// the created element must be assigned to a variable in order to load (#292).
createElement('img', {
onload: function() {
var chart = charts[ren.chartIndex];
// Special case for SVGs on IE11, the width is not accessible until the image is
// part of the DOM (#2854).
if (this.width === 0) {
css(this, {
position: 'absolute',
top: '-999em'
});
doc.body.appendChild(this);
}
// Center the image
symbolSizes[imageSrc] = { // Cache for next
width: this.width,
height: this.height
};
obj.imgwidth = this.width;
obj.imgheight = this.height;
if (obj.element) {
centerImage();
}
// Clean up after #2854 workaround.
if (this.parentNode) {
this.parentNode.removeChild(this);
}
// Fire the load event when all external images are loaded
ren.imgCount--;
if (!ren.imgCount && chart && chart.onload) {
chart.onload();
}
},
src: imageSrc
});
this.imgCount++;
}
}
return obj;
},
/**
* An extendable collection of functions for defining symbol paths.
*/
symbols: {
'circle': function(x, y, w, h) {
var cpw = 0.166 * w;
return [
'M', x + w / 2, y,
'C', x + w + cpw, y, x + w + cpw, y + h, x + w / 2, y + h,
'C', x - cpw, y + h, x - cpw, y, x + w / 2, y,
'Z'
];
},
'square': function(x, y, w, h) {
return [
'M', x, y,
'L', x + w, y,
x + w, y + h,
x, y + h,
'Z'
];
},
'triangle': function(x, y, w, h) {
return [
'M', x + w / 2, y,
'L', x + w, y + h,
x, y + h,
'Z'
];
},
'triangle-down': function(x, y, w, h) {
return [
'M', x, y,
'L', x + w, y,
x + w / 2, y + h,
'Z'
];
},
'diamond': function(x, y, w, h) {
return [
'M', x + w / 2, y,
'L', x + w, y + h / 2,
x + w / 2, y + h,
x, y + h / 2,
'Z'
];
},
'arc': function(x, y, w, h, options) {
var start = options.start,
radius = options.r || w || h,
end = options.end - 0.001, // to prevent cos and sin of start and end from becoming equal on 360 arcs (related: #1561)
innerRadius = options.innerR,
open = options.open,
cosStart = Math.cos(start),
sinStart = Math.sin(start),
cosEnd = Math.cos(end),
sinEnd = Math.sin(end),
longArc = options.end - start < Math.PI ? 0 : 1;
return [
'M',
x + radius * cosStart,
y + radius * sinStart,
'A', // arcTo
radius, // x radius
radius, // y radius
0, // slanting
longArc, // long or short arc
1, // clockwise
x + radius * cosEnd,
y + radius * sinEnd,
open ? 'M' : 'L',
x + innerRadius * cosEnd,
y + innerRadius * sinEnd,
'A', // arcTo
innerRadius, // x radius
innerRadius, // y radius
0, // slanting
longArc, // long or short arc
0, // clockwise
x + innerRadius * cosStart,
y + innerRadius * sinStart,
open ? '' : 'Z' // close
];
},
/**
* Callout shape used for default tooltips, also used for rounded rectangles in VML
*/
callout: function(x, y, w, h, options) {
var arrowLength = 6,
halfDistance = 6,
r = Math.min((options && options.r) || 0, w, h),
safeDistance = r + halfDistance,
anchorX = options && options.anchorX,
anchorY = options && options.anchorY,
path;
path = [
'M', x + r, y,
'L', x + w - r, y, // top side
'C', x + w, y, x + w, y, x + w, y + r, // top-right corner
'L', x + w, y + h - r, // right side
'C', x + w, y + h, x + w, y + h, x + w - r, y + h, // bottom-right corner
'L', x + r, y + h, // bottom side
'C', x, y + h, x, y + h, x, y + h - r, // bottom-left corner
'L', x, y + r, // left side
'C', x, y, x, y, x + r, y // top-right corner
];
if (anchorX && anchorX > w && anchorY > y + safeDistance && anchorY < y + h - safeDistance) { // replace right side
path.splice(13, 3,
'L', x + w, anchorY - halfDistance,
x + w + arrowLength, anchorY,
x + w, anchorY + halfDistance,
x + w, y + h - r
);
} else if (anchorX && anchorX < 0 && anchorY > y + safeDistance && anchorY < y + h - safeDistance) { // replace left side
path.splice(33, 3,
'L', x, anchorY + halfDistance,
x - arrowLength, anchorY,
x, anchorY - halfDistance,
x, y + r
);
} else if (anchorY && anchorY > h && anchorX > x + safeDistance && anchorX < x + w - safeDistance) { // replace bottom
path.splice(23, 3,
'L', anchorX + halfDistance, y + h,
anchorX, y + h + arrowLength,
anchorX - halfDistance, y + h,
x + r, y + h
);
} else if (anchorY && anchorY < 0 && anchorX > x + safeDistance && anchorX < x + w - safeDistance) { // replace top
path.splice(3, 3,
'L', anchorX - halfDistance, y,
anchorX, y - arrowLength,
anchorX + halfDistance, y,
w - r, y
);
}
return path;
}
},
/**
* Define a clipping rectangle
* @param {String} id
* @param {Number} x
* @param {Number} y
* @param {Number} width
* @param {Number} height
*/
clipRect: function(x, y, width, height) {
var wrapper,
id = 'highcharts-' + H.idCounter++,
clipPath = this.createElement('clipPath').attr({
id: id
}).add(this.defs);
wrapper = this.rect(x, y, width, height, 0).add(clipPath);
wrapper.id = id;
wrapper.clipPath = clipPath;
wrapper.count = 0;
return wrapper;
},
/**
* Add text to the SVG object
* @param {String} str
* @param {Number} x Left position
* @param {Number} y Top position
* @param {Boolean} useHTML Use HTML to render the text
*/
text: function(str, x, y, useHTML) {
// declare variables
var renderer = this,
fakeSVG = !svg && renderer.forExport,
wrapper,
attribs = {};
if (useHTML && (renderer.allowHTML || !renderer.forExport)) {
return renderer.html(str, x, y);
}
attribs.x = Math.round(x || 0); // X is always needed for line-wrap logic
if (y) {
attribs.y = Math.round(y);
}
if (str || str === 0) {
attribs.text = str;
}
wrapper = renderer.createElement('text')
.attr(attribs);
// Prevent wrapping from creating false offsetWidths in export in legacy IE (#1079, #1063)
if (fakeSVG) {
wrapper.css({
position: 'absolute'
});
}
if (!useHTML) {
wrapper.xSetter = function(value, key, element) {
var tspans = element.getElementsByTagName('tspan'),
tspan,
parentVal = element.getAttribute(key),
i;
for (i = 0; i < tspans.length; i++) {
tspan = tspans[i];
// If the x values are equal, the tspan represents a linebreak
if (tspan.getAttribute(key) === parentVal) {
tspan.setAttribute(key, value);
}
}
element.setAttribute(key, value);
};
}
return wrapper;
},
/**
* Utility to return the baseline offset and total line height from the font size
*/
fontMetrics: function(fontSize, elem) { // eslint-disable-line no-unused-vars
var lineHeight,
baseline;
fontSize = elem && SVGElement.prototype.getStyle.call(elem, 'font-size');
fontSize = /px/.test(fontSize) ? pInt(fontSize) : /em/.test(fontSize) ? parseFloat(fontSize) * 12 : 12;
// Empirical values found by comparing font size and bounding box height.
// Applies to the default font family. http://jsfiddle.net/highcharts/7xvn7/
lineHeight = fontSize < 24 ? fontSize + 3 : Math.round(fontSize * 1.2);
baseline = Math.round(lineHeight * 0.8);
return {
h: lineHeight,
b: baseline,
f: fontSize
};
},
/**
* Correct X and Y positioning of a label for rotation (#1764)
*/
rotCorr: function(baseline, rotation, alterY) {
var y = baseline;
if (rotation && alterY) {
y = Math.max(y * Math.cos(rotation * deg2rad), 4);
}
return {
x: (-baseline / 3) * Math.sin(rotation * deg2rad),
y: y
};
},
/**
* Add a label, a text item that can hold a colored or gradient background
* as well as a border and shadow.
* @param {string} str
* @param {Number} x
* @param {Number} y
* @param {String} shape
* @param {Number} anchorX In case the shape has a pointer, like a flag, this is the
* coordinates it should be pinned to
* @param {Number} anchorY
* @param {Boolean} baseline Whether to position the label relative to the text baseline,
* like renderer.text, or to the upper border of the rectangle.
* @param {String} className Class name for the group
*/
label: function(str, x, y, shape, anchorX, anchorY, useHTML, baseline, className) {
var renderer = this,
wrapper = renderer.g(className !== 'button' && 'label'),
text = wrapper.text = renderer.text('', 0, 0, useHTML)
.attr({
zIndex: 1
}),
box,
bBox,
alignFactor = 0,
padding = 3,
paddingLeft = 0,
width,
height,
wrapperX,
wrapperY,
textAlign,
deferredAttr = {},
strokeWidth,
baselineOffset,
hasBGImage = /^url\((.*?)\)$/.test(shape),
needsBox = hasBGImage,
getCrispAdjust,
updateBoxSize,
updateTextPadding,
boxAttr;
if (className) {
wrapper.addClass('highcharts-' + className);
}
needsBox = true; // for styling
getCrispAdjust = function() {
return box.strokeWidth() % 2 / 2;
};
/**
* This function runs after the label is added to the DOM (when the bounding box is
* available), and after the text of the label is updated to detect the new bounding
* box and reflect it in the border box.
*/
updateBoxSize = function() {
var style = text.element.style,
crispAdjust,
attribs = {};
bBox = (width === undefined || height === undefined || textAlign) && defined(text.textStr) &&
text.getBBox(); //#3295 && 3514 box failure when string equals 0
wrapper.width = (width || bBox.width || 0) + 2 * padding + paddingLeft;
wrapper.height = (height || bBox.height || 0) + 2 * padding;
// Update the label-scoped y offset
baselineOffset = padding + renderer.fontMetrics(style && style.fontSize, text).b;
if (needsBox) {
// Create the border box if it is not already present
if (!box) {
wrapper.box = box = renderer.symbols[shape] || hasBGImage ? // Symbol definition exists (#5324)
renderer.symbol(shape) :
renderer.rect();
box.addClass(
(className === 'button' ? '' : 'highcharts-label-box') + // Don't use label className for buttons
(className ? ' highcharts-' + className + '-box' : '')
);
box.add(wrapper);
crispAdjust = getCrispAdjust();
attribs.x = crispAdjust;
attribs.y = (baseline ? -baselineOffset : 0) + crispAdjust;
}
// Apply the box attributes
attribs.width = Math.round(wrapper.width);
attribs.height = Math.round(wrapper.height);
box.attr(extend(attribs, deferredAttr));
deferredAttr = {};
}
};
/**
* This function runs after setting text or padding, but only if padding is changed
*/
updateTextPadding = function() {
var textX = paddingLeft + padding,
textY;
// determin y based on the baseline
textY = baseline ? 0 : baselineOffset;
// compensate for alignment
if (defined(width) && bBox && (textAlign === 'center' || textAlign === 'right')) {
textX += {
center: 0.5,
right: 1
}[textAlign] * (width - bBox.width);
}
// update if anything changed
if (textX !== text.x || textY !== text.y) {
text.attr('x', textX);
if (textY !== undefined) {
text.attr('y', textY);
}
}
// record current values
text.x = textX;
text.y = textY;
};
/**
* Set a box attribute, or defer it if the box is not yet created
* @param {Object} key
* @param {Object} value
*/
boxAttr = function(key, value) {
if (box) {
box.attr(key, value);
} else {
deferredAttr[key] = value;
}
};
/**
* After the text element is added, get the desired size of the border box
* and add it before the text in the DOM.
*/
wrapper.onAdd = function() {
text.add(wrapper);
wrapper.attr({
text: (str || str === 0) ? str : '', // alignment is available now // #3295: 0 not rendered if given as a value
x: x,
y: y
});
if (box && defined(anchorX)) {
wrapper.attr({
anchorX: anchorX,
anchorY: anchorY
});
}
};
/*
* Add specific attribute setters.
*/
// only change local variables
wrapper.widthSetter = function(value) {
width = value;
};
wrapper.heightSetter = function(value) {
height = value;
};
wrapper['text-alignSetter'] = function(value) {
textAlign = value;
};
wrapper.paddingSetter = function(value) {
if (defined(value) && value !== padding) {
padding = wrapper.padding = value;
updateTextPadding();
}
};
wrapper.paddingLeftSetter = function(value) {
if (defined(value) && value !== paddingLeft) {
paddingLeft = value;
updateTextPadding();
}
};
// change local variable and prevent setting attribute on the group
wrapper.alignSetter = function(value) {
value = {
left: 0,
center: 0.5,
right: 1
}[value];
if (value !== alignFactor) {
alignFactor = value;
if (bBox) { // Bounding box exists, means we're dynamically changing
wrapper.attr({
x: wrapperX
}); // #5134
}
}
};
// apply these to the box and the text alike
wrapper.textSetter = function(value) {
if (value !== undefined) {
text.textSetter(value);
}
updateBoxSize();
updateTextPadding();
};
// apply these to the box but not to the text
wrapper['stroke-widthSetter'] = function(value, key) {
if (value) {
needsBox = true;
}
strokeWidth = this['stroke-width'] = value;
boxAttr(key, value);
};
wrapper.rSetter = function(value, key) {
boxAttr(key, value);
};
wrapper.anchorXSetter = function(value, key) {
anchorX = value;
boxAttr(key, Math.round(value) - getCrispAdjust() - wrapperX);
};
wrapper.anchorYSetter = function(value, key) {
anchorY = value;
boxAttr(key, value - wrapperY);
};
// rename attributes
wrapper.xSetter = function(value) {
wrapper.x = value; // for animation getter
if (alignFactor) {
value -= alignFactor * ((width || bBox.width) + 2 * padding);
}
wrapperX = Math.round(value);
wrapper.attr('translateX', wrapperX);
};
wrapper.ySetter = function(value) {
wrapperY = wrapper.y = Math.round(value);
wrapper.attr('translateY', wrapperY);
};
// Redirect certain methods to either the box or the text
var baseCss = wrapper.css;
return extend(wrapper, {
/**
* Pick up some properties and apply them to the text instead of the wrapper
*/
css: function(styles) {
if (styles) {
var textStyles = {};
styles = merge(styles); // create a copy to avoid altering the original object (#537)
each(wrapper.textProps, function(prop) {
if (styles[prop] !== undefined) {
textStyles[prop] = styles[prop];
delete styles[prop];
}
});
text.css(textStyles);
}
return baseCss.call(wrapper, styles);
},
/**
* Return the bounding box of the box, not the group
*/
getBBox: function() {
return {
width: bBox.width + 2 * padding,
height: bBox.height + 2 * padding,
x: bBox.x - padding,
y: bBox.y - padding
};
},
/**
* Destroy and release memory.
*/
destroy: function() {
// Added by button implementation
removeEvent(wrapper.element, 'mouseenter');
removeEvent(wrapper.element, 'mouseleave');
if (text) {
text = text.destroy();
}
if (box) {
box = box.destroy();
}
// Call base implementation to destroy the rest
SVGElement.prototype.destroy.call(wrapper);
// Release local pointers (#1298)
wrapper = renderer = updateBoxSize = updateTextPadding = boxAttr = null;
}
});
}
}; // end SVGRenderer
// general renderer
H.Renderer = SVGRenderer;
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var attr = H.attr,
createElement = H.createElement,
css = H.css,
defined = H.defined,
each = H.each,
extend = H.extend,
isFirefox = H.isFirefox,
isMS = H.isMS,
isWebKit = H.isWebKit,
pInt = H.pInt,
SVGElement = H.SVGElement,
SVGRenderer = H.SVGRenderer,
win = H.win,
wrap = H.wrap;
// extend SvgElement for useHTML option
extend(SVGElement.prototype, {
/**
* Apply CSS to HTML elements. This is used in text within SVG rendering and
* by the VML renderer
*/
htmlCss: function(styles) {
var wrapper = this,
element = wrapper.element,
textWidth = styles && element.tagName === 'SPAN' && styles.width;
if (textWidth) {
delete styles.width;
wrapper.textWidth = textWidth;
wrapper.updateTransform();
}
if (styles && styles.textOverflow === 'ellipsis') {
styles.whiteSpace = 'nowrap';
styles.overflow = 'hidden';
}
wrapper.styles = extend(wrapper.styles, styles);
css(wrapper.element, styles);
return wrapper;
},
/**
* VML and useHTML method for calculating the bounding box based on offsets
* @param {Boolean} refresh Whether to force a fresh value from the DOM or to
* use the cached value
*
* @return {Object} A hash containing values for x, y, width and height
*/
htmlGetBBox: function() {
var wrapper = this,
element = wrapper.element;
// faking getBBox in exported SVG in legacy IE
// faking getBBox in exported SVG in legacy IE (is this a duplicate of the fix for #1079?)
if (element.nodeName === 'text') {
element.style.position = 'absolute';
}
return {
x: element.offsetLeft,
y: element.offsetTop,
width: element.offsetWidth,
height: element.offsetHeight
};
},
/**
* VML override private method to update elements based on internal
* properties based on SVG transform
*/
htmlUpdateTransform: function() {
// aligning non added elements is expensive
if (!this.added) {
this.alignOnAdd = true;
return;
}
var wrapper = this,
renderer = wrapper.renderer,
elem = wrapper.element,
translateX = wrapper.translateX || 0,
translateY = wrapper.translateY || 0,
x = wrapper.x || 0,
y = wrapper.y || 0,
align = wrapper.textAlign || 'left',
alignCorrection = {
left: 0,
center: 0.5,
right: 1
}[align],
styles = wrapper.styles;
// apply translate
css(elem, {
marginLeft: translateX,
marginTop: translateY
});
// apply inversion
if (wrapper.inverted) { // wrapper is a group
each(elem.childNodes, function(child) {
renderer.invertChild(child, elem);
});
}
if (elem.tagName === 'SPAN') {
var rotation = wrapper.rotation,
baseline,
textWidth = pInt(wrapper.textWidth),
whiteSpace = styles && styles.whiteSpace,
currentTextTransform = [rotation, align, elem.innerHTML, wrapper.textWidth, wrapper.textAlign].join(',');
if (currentTextTransform !== wrapper.cTT) { // do the calculations and DOM access only if properties changed
baseline = renderer.fontMetrics(elem.style.fontSize).b;
// Renderer specific handling of span rotation
if (defined(rotation)) {
wrapper.setSpanRotation(rotation, alignCorrection, baseline);
}
// Reset multiline/ellipsis in order to read width (#4928, #5417)
css(elem, {
width: '',
whiteSpace: whiteSpace || 'nowrap'
});
// Update textWidth
if (elem.offsetWidth > textWidth && /[ \-]/.test(elem.textContent || elem.innerText)) { // #983, #1254
css(elem, {
width: textWidth + 'px',
display: 'block',
whiteSpace: whiteSpace || 'normal' // #3331
});
}
wrapper.getSpanCorrection(elem.offsetWidth, baseline, alignCorrection, rotation, align);
}
// apply position with correction
css(elem, {
left: (x + (wrapper.xCorr || 0)) + 'px',
top: (y + (wrapper.yCorr || 0)) + 'px'
});
// force reflow in webkit to apply the left and top on useHTML element (#1249)
if (isWebKit) {
baseline = elem.offsetHeight; // assigned to baseline for lint purpose
}
// record current text transform
wrapper.cTT = currentTextTransform;
}
},
/**
* Set the rotation of an individual HTML span
*/
setSpanRotation: function(rotation, alignCorrection, baseline) {
var rotationStyle = {},
cssTransformKey = isMS ? '-ms-transform' : isWebKit ? '-webkit-transform' : isFirefox ? 'MozTransform' : win.opera ? '-o-transform' : '';
rotationStyle[cssTransformKey] = rotationStyle.transform = 'rotate(' + rotation + 'deg)';
rotationStyle[cssTransformKey + (isFirefox ? 'Origin' : '-origin')] = rotationStyle.transformOrigin = (alignCorrection * 100) + '% ' + baseline + 'px';
css(this.element, rotationStyle);
},
/**
* Get the correction in X and Y positioning as the element is rotated.
*/
getSpanCorrection: function(width, baseline, alignCorrection) {
this.xCorr = -width * alignCorrection;
this.yCorr = -baseline;
}
});
// Extend SvgRenderer for useHTML option.
extend(SVGRenderer.prototype, {
/**
* Create HTML text node. This is used by the VML renderer as well as the SVG
* renderer through the useHTML option.
*
* @param {String} str
* @param {Number} x
* @param {Number} y
*/
html: function(str, x, y) {
var wrapper = this.createElement('span'),
element = wrapper.element,
renderer = wrapper.renderer,
isSVG = renderer.isSVG,
addSetters = function(element, style) {
// These properties are set as attributes on the SVG group, and as
// identical CSS properties on the div. (#3542)
each(['opacity', 'visibility'], function(prop) {
wrap(element, prop + 'Setter', function(proceed, value, key, elem) {
proceed.call(this, value, key, elem);
style[key] = value;
});
});
};
// Text setter
wrapper.textSetter = function(value) {
if (value !== element.innerHTML) {
delete this.bBox;
}
element.innerHTML = this.textStr = value;
wrapper.htmlUpdateTransform();
};
// Add setters for the element itself (#4938)
if (isSVG) { // #4938, only for HTML within SVG
addSetters(wrapper, wrapper.element.style);
}
// Various setters which rely on update transform
wrapper.xSetter = wrapper.ySetter = wrapper.alignSetter = wrapper.rotationSetter = function(value, key) {
if (key === 'align') {
key = 'textAlign'; // Do not overwrite the SVGElement.align method. Same as VML.
}
wrapper[key] = value;
wrapper.htmlUpdateTransform();
};
// Set the default attributes
wrapper
.attr({
text: str,
x: Math.round(x),
y: Math.round(y)
})
.css({
position: 'absolute'
});
// Keep the whiteSpace style outside the wrapper.styles collection
element.style.whiteSpace = 'nowrap';
// Use the HTML specific .css method
wrapper.css = wrapper.htmlCss;
// This is specific for HTML within SVG
if (isSVG) {
wrapper.add = function(svgGroupWrapper) {
var htmlGroup,
container = renderer.box.parentNode,
parentGroup,
parents = [];
this.parentGroup = svgGroupWrapper;
// Create a mock group to hold the HTML elements
if (svgGroupWrapper) {
htmlGroup = svgGroupWrapper.div;
if (!htmlGroup) {
// Read the parent chain into an array and read from top down
parentGroup = svgGroupWrapper;
while (parentGroup) {
parents.push(parentGroup);
// Move up to the next parent group
parentGroup = parentGroup.parentGroup;
}
// Ensure dynamically updating position when any parent is translated
each(parents.reverse(), function(parentGroup) {
var htmlGroupStyle,
cls = attr(parentGroup.element, 'class');
if (cls) {
cls = {
className: cls
};
} // else null
// Create a HTML div and append it to the parent div to emulate
// the SVG group structure
htmlGroup = parentGroup.div = parentGroup.div || createElement('div', cls, {
position: 'absolute',
left: (parentGroup.translateX || 0) + 'px',
top: (parentGroup.translateY || 0) + 'px',
display: parentGroup.display,
opacity: parentGroup.opacity, // #5075
pointerEvents: parentGroup.styles && parentGroup.styles.pointerEvents // #5595
}, htmlGroup || container); // the top group is appended to container
// Shortcut
htmlGroupStyle = htmlGroup.style;
// Set listeners to update the HTML div's position whenever the SVG group
// position is changed
extend(parentGroup, {
translateXSetter: function(value, key) {
htmlGroupStyle.left = value + 'px';
parentGroup[key] = value;
parentGroup.doTransform = true;
},
translateYSetter: function(value, key) {
htmlGroupStyle.top = value + 'px';
parentGroup[key] = value;
parentGroup.doTransform = true;
}
});
addSetters(parentGroup, htmlGroupStyle);
});
}
} else {
htmlGroup = container;
}
htmlGroup.appendChild(element);
// Shared with VML:
wrapper.added = true;
if (wrapper.alignOnAdd) {
wrapper.htmlUpdateTransform();
}
return wrapper;
};
}
return wrapper;
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var correctFloat = H.correctFloat,
defined = H.defined,
destroyObjectProperties = H.destroyObjectProperties,
isNumber = H.isNumber,
merge = H.merge,
pick = H.pick,
stop = H.stop,
deg2rad = H.deg2rad;
/**
* The Tick class
*/
H.Tick = function(axis, pos, type, noLabel) {
this.axis = axis;
this.pos = pos;
this.type = type || '';
this.isNew = true;
if (!type && !noLabel) {
this.addLabel();
}
};
H.Tick.prototype = {
/**
* Write the tick label
*/
addLabel: function() {
var tick = this,
axis = tick.axis,
options = axis.options,
chart = axis.chart,
categories = axis.categories,
names = axis.names,
pos = tick.pos,
labelOptions = options.labels,
str,
tickPositions = axis.tickPositions,
isFirst = pos === tickPositions[0],
isLast = pos === tickPositions[tickPositions.length - 1],
value = categories ?
pick(categories[pos], names[pos], pos) :
pos,
label = tick.label,
tickPositionInfo = tickPositions.info,
dateTimeLabelFormat;
// Set the datetime label format. If a higher rank is set for this position, use that. If not,
// use the general format.
if (axis.isDatetimeAxis && tickPositionInfo) {
dateTimeLabelFormat =
options.dateTimeLabelFormats[
tickPositionInfo.higherRanks[pos] || tickPositionInfo.unitName
];
}
// set properties for access in render method
tick.isFirst = isFirst;
tick.isLast = isLast;
// get the string
str = axis.labelFormatter.call({
axis: axis,
chart: chart,
isFirst: isFirst,
isLast: isLast,
dateTimeLabelFormat: dateTimeLabelFormat,
value: axis.isLog ? correctFloat(axis.lin2log(value)) : value
});
// prepare CSS
//css = width && { width: Math.max(1, Math.round(width - 2 * (labelOptions.padding || 10))) + 'px' };
// first call
if (!defined(label)) {
tick.label = label =
defined(str) && labelOptions.enabled ?
chart.renderer.text(
str,
0,
0,
labelOptions.useHTML
)
.add(axis.labelGroup):
null;
tick.labelLength = label && label.getBBox().width; // Un-rotated length
tick.rotation = 0; // Base value to detect change for new calls to getBBox
// update
} else if (label) {
label.attr({
text: str
});
}
},
/**
* Get the offset height or width of the label
*/
getLabelSize: function() {
return this.label ?
this.label.getBBox()[this.axis.horiz ? 'height' : 'width'] :
0;
},
/**
* Handle the label overflow by adjusting the labels to the left and right edge, or
* hide them if they collide into the neighbour label.
*/
handleOverflow: function(xy) {
var axis = this.axis,
pxPos = xy.x,
chartWidth = axis.chart.chartWidth,
spacing = axis.chart.spacing,
leftBound = pick(axis.labelLeft, Math.min(axis.pos, spacing[3])),
rightBound = pick(axis.labelRight, Math.max(axis.pos + axis.len, chartWidth - spacing[1])),
label = this.label,
rotation = this.rotation,
factor = {
left: 0,
center: 0.5,
right: 1
}[axis.labelAlign],
labelWidth = label.getBBox().width,
slotWidth = axis.getSlotWidth(),
modifiedSlotWidth = slotWidth,
xCorrection = factor,
goRight = 1,
leftPos,
rightPos,
textWidth,
css = {};
// Check if the label overshoots the chart spacing box. If it does, move it.
// If it now overshoots the slotWidth, add ellipsis.
if (!rotation) {
leftPos = pxPos - factor * labelWidth;
rightPos = pxPos + (1 - factor) * labelWidth;
if (leftPos < leftBound) {
modifiedSlotWidth = xy.x + modifiedSlotWidth * (1 - factor) - leftBound;
} else if (rightPos > rightBound) {
modifiedSlotWidth = rightBound - xy.x + modifiedSlotWidth * factor;
goRight = -1;
}
modifiedSlotWidth = Math.min(slotWidth, modifiedSlotWidth); // #4177
if (modifiedSlotWidth < slotWidth && axis.labelAlign === 'center') {
xy.x += goRight * (slotWidth - modifiedSlotWidth - xCorrection *
(slotWidth - Math.min(labelWidth, modifiedSlotWidth)));
}
// If the label width exceeds the available space, set a text width to be
// picked up below. Also, if a width has been set before, we need to set a new
// one because the reported labelWidth will be limited by the box (#3938).
if (labelWidth > modifiedSlotWidth || (axis.autoRotation && (label.styles || {}).width)) {
textWidth = modifiedSlotWidth;
}
// Add ellipsis to prevent rotated labels to be clipped against the edge of the chart
} else if (rotation < 0 && pxPos - factor * labelWidth < leftBound) {
textWidth = Math.round(pxPos / Math.cos(rotation * deg2rad) - leftBound);
} else if (rotation > 0 && pxPos + factor * labelWidth > rightBound) {
textWidth = Math.round((chartWidth - pxPos) / Math.cos(rotation * deg2rad));
}
if (textWidth) {
css.width = textWidth;
if (!(axis.options.labels.style || {}).textOverflow) {
css.textOverflow = 'ellipsis';
}
label.css(css);
}
},
/**
* Get the x and y position for ticks and labels
*/
getPosition: function(horiz, pos, tickmarkOffset, old) {
var axis = this.axis,
chart = axis.chart,
cHeight = (old && chart.oldChartHeight) || chart.chartHeight;
return {
x: horiz ?
axis.translate(pos + tickmarkOffset, null, null, old) + axis.transB : axis.left + axis.offset +
(axis.opposite ?
((old && chart.oldChartWidth) || chart.chartWidth) - axis.right - axis.left :
0
),
y: horiz ?
cHeight - axis.bottom + axis.offset - (axis.opposite ? axis.height : 0) : cHeight - axis.translate(pos + tickmarkOffset, null, null, old) - axis.transB
};
},
/**
* Get the x, y position of the tick label
*/
getLabelPosition: function(x, y, label, horiz, labelOptions, tickmarkOffset, index, step) {
var axis = this.axis,
transA = axis.transA,
reversed = axis.reversed,
staggerLines = axis.staggerLines,
rotCorr = axis.tickRotCorr || {
x: 0,
y: 0
},
yOffset = labelOptions.y,
line;
if (!defined(yOffset)) {
if (axis.side === 0) {
yOffset = label.rotation ? -8 : -label.getBBox().height;
} else if (axis.side === 2) {
yOffset = rotCorr.y + 8;
} else {
// #3140, #3140
yOffset = Math.cos(label.rotation * deg2rad) * (rotCorr.y - label.getBBox(false, 0).height / 2);
}
}
x = x + labelOptions.x + rotCorr.x - (tickmarkOffset && horiz ?
tickmarkOffset * transA * (reversed ? -1 : 1) : 0);
y = y + yOffset - (tickmarkOffset && !horiz ?
tickmarkOffset * transA * (reversed ? 1 : -1) : 0);
// Correct for staggered labels
if (staggerLines) {
line = (index / (step || 1) % staggerLines);
if (axis.opposite) {
line = staggerLines - line - 1;
}
y += line * (axis.labelOffset / staggerLines);
}
return {
x: x,
y: Math.round(y)
};
},
/**
* Extendible method to return the path of the marker
*/
getMarkPath: function(x, y, tickLength, tickWidth, horiz, renderer) {
return renderer.crispLine([
'M',
x,
y,
'L',
x + (horiz ? 0 : -tickLength),
y + (horiz ? tickLength : 0)
], tickWidth);
},
/**
* Put everything in place
*
* @param index {Number}
* @param old {Boolean} Use old coordinates to prepare an animation into new position
*/
render: function(index, old, opacity) {
var tick = this,
axis = tick.axis,
options = axis.options,
chart = axis.chart,
renderer = chart.renderer,
horiz = axis.horiz,
type = tick.type,
label = tick.label,
pos = tick.pos,
labelOptions = options.labels,
gridLine = tick.gridLine,
tickPrefix = type ? type + 'Tick' : 'tick',
tickSize = axis.tickSize(tickPrefix),
gridLinePath,
mark = tick.mark,
isNewMark = !mark,
step = labelOptions.step,
attribs = {},
show = true,
tickmarkOffset = axis.tickmarkOffset,
xy = tick.getPosition(horiz, pos, tickmarkOffset, old),
x = xy.x,
y = xy.y,
reverseCrisp = ((horiz && x === axis.pos + axis.len) ||
(!horiz && y === axis.pos)) ? -1 : 1; // #1480, #1687
opacity = pick(opacity, 1);
this.isActive = true;
// Create the grid line
if (!gridLine) {
if (!type) {
attribs.zIndex = 1;
}
if (old) {
attribs.opacity = 0;
}
tick.gridLine = gridLine = renderer.path()
.attr(attribs)
.addClass('highcharts-' + (type ? type + '-' : '') + 'grid-line')
.add(axis.gridGroup);
}
// If the parameter 'old' is set, the current call will be followed
// by another call, therefore do not do any animations this time
if (!old && gridLine) {
gridLinePath = axis.getPlotLinePath(pos + tickmarkOffset, gridLine.strokeWidth() * reverseCrisp, old, true);
if (gridLinePath) {
gridLine[tick.isNew ? 'attr' : 'animate']({
d: gridLinePath,
opacity: opacity
});
}
}
// create the tick mark
if (tickSize) {
// negate the length
if (axis.opposite) {
tickSize[0] = -tickSize[0];
}
// First time, create it
if (isNewMark) {
tick.mark = mark = renderer.path()
.addClass('highcharts-' + (type ? type + '-' : '') + 'tick')
.add(axis.axisGroup);
}
mark[isNewMark ? 'attr' : 'animate']({
d: tick.getMarkPath(x, y, tickSize[0], mark.strokeWidth() * reverseCrisp, horiz, renderer),
opacity: opacity
});
}
// the label is created on init - now move it into place
if (label && isNumber(x)) {
label.xy = xy = tick.getLabelPosition(x, y, label, horiz, labelOptions, tickmarkOffset, index, step);
// Apply show first and show last. If the tick is both first and last, it is
// a single centered tick, in which case we show the label anyway (#2100).
if ((tick.isFirst && !tick.isLast && !pick(options.showFirstLabel, 1)) ||
(tick.isLast && !tick.isFirst && !pick(options.showLastLabel, 1))) {
show = false;
// Handle label overflow and show or hide accordingly
} else if (horiz && !axis.isRadial && !labelOptions.step &&
!labelOptions.rotation && !old && opacity !== 0) {
tick.handleOverflow(xy);
}
// apply step
if (step && index % step) {
// show those indices dividable by step
show = false;
}
// Set the new position, and show or hide
if (show && isNumber(xy.y)) {
xy.opacity = opacity;
label[tick.isNew ? 'attr' : 'animate'](xy);
} else {
stop(label); // #5332
label.attr('y', -9999); // #1338
}
tick.isNew = false;
}
},
/**
* Destructor for the tick prototype
*/
destroy: function() {
destroyObjectProperties(this, this.axis);
}
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var arrayMax = H.arrayMax,
arrayMin = H.arrayMin,
defined = H.defined,
destroyObjectProperties = H.destroyObjectProperties,
each = H.each,
erase = H.erase,
merge = H.merge,
pick = H.pick;
/*
* The object wrapper for plot lines and plot bands
* @param {Object} options
*/
H.PlotLineOrBand = function(axis, options) {
this.axis = axis;
if (options) {
this.options = options;
this.id = options.id;
}
};
H.PlotLineOrBand.prototype = {
/**
* Render the plot line or plot band. If it is already existing,
* move it.
*/
render: function() {
var plotLine = this,
axis = plotLine.axis,
horiz = axis.horiz,
options = plotLine.options,
optionsLabel = options.label,
label = plotLine.label,
to = options.to,
from = options.from,
value = options.value,
isBand = defined(from) && defined(to),
isLine = defined(value),
svgElem = plotLine.svgElem,
isNew = !svgElem,
path = [],
addEvent,
eventType,
color = options.color,
zIndex = pick(options.zIndex, 0),
events = options.events,
attribs = {
'class': 'highcharts-plot-' + (isBand ? 'band ' : 'line ') + (options.className || '')
},
groupAttribs = {},
renderer = axis.chart.renderer,
groupName = isBand ? 'bands' : 'lines',
group,
log2lin = axis.log2lin;
// logarithmic conversion
if (axis.isLog) {
from = log2lin(from);
to = log2lin(to);
value = log2lin(value);
}
// Grouping and zIndex
groupAttribs.zIndex = zIndex;
groupName += '-' + zIndex;
group = axis[groupName];
if (!group) {
axis[groupName] = group = renderer.g('plot-' + groupName)
.attr(groupAttribs).add();
}
// Create the path
if (isNew) {
plotLine.svgElem = svgElem =
renderer
.path()
.attr(attribs).add(group);
}
// Set the path or return
if (isLine) {
path = axis.getPlotLinePath(value, svgElem.strokeWidth());
} else if (isBand) { // plot band
path = axis.getPlotBandPath(from, to, options);
} else {
return;
}
// common for lines and bands
if (isNew && path && path.length) {
svgElem.attr({
d: path
});
// events
if (events) {
addEvent = function(eventType) {
svgElem.on(eventType, function(e) {
events[eventType].apply(plotLine, [e]);
});
};
for (eventType in events) {
addEvent(eventType);
}
}
} else if (svgElem) {
if (path) {
svgElem.show();
svgElem.animate({
d: path
});
} else {
svgElem.hide();
if (label) {
plotLine.label = label = label.destroy();
}
}
}
// the plot band/line label
if (optionsLabel && defined(optionsLabel.text) && path && path.length &&
axis.width > 0 && axis.height > 0 && !path.flat) {
// apply defaults
optionsLabel = merge({
align: horiz && isBand && 'center',
x: horiz ? !isBand && 4 : 10,
verticalAlign: !horiz && isBand && 'middle',
y: horiz ? isBand ? 16 : 10 : isBand ? 6 : -4,
rotation: horiz && !isBand && 90
}, optionsLabel);
this.renderLabel(optionsLabel, path, isBand, zIndex);
} else if (label) { // move out of sight
label.hide();
}
// chainable
return plotLine;
},
/**
* Render and align label for plot line or band.
*/
renderLabel: function(optionsLabel, path, isBand, zIndex) {
var plotLine = this,
label = plotLine.label,
renderer = plotLine.axis.chart.renderer,
attribs,
xs,
ys,
x,
y;
// add the SVG element
if (!label) {
attribs = {
align: optionsLabel.textAlign || optionsLabel.align,
rotation: optionsLabel.rotation,
'class': 'highcharts-plot-' + (isBand ? 'band' : 'line') + '-label ' + (optionsLabel.className || '')
};
attribs.zIndex = zIndex;
plotLine.label = label = renderer.text(
optionsLabel.text,
0,
0,
optionsLabel.useHTML
)
.attr(attribs)
.add();
}
// get the bounding box and align the label
// #3000 changed to better handle choice between plotband or plotline
xs = [path[1], path[4], (isBand ? path[6] : path[1])];
ys = [path[2], path[5], (isBand ? path[7] : path[2])];
x = arrayMin(xs);
y = arrayMin(ys);
label.align(optionsLabel, false, {
x: x,
y: y,
width: arrayMax(xs) - x,
height: arrayMax(ys) - y
});
label.show();
},
/**
* Remove the plot line or band
*/
destroy: function() {
// remove it from the lookup
erase(this.axis.plotLinesAndBands, this);
delete this.axis;
destroyObjectProperties(this);
}
};
/**
* Object with members for extending the Axis prototype
* @todo Extend directly instead of adding object to Highcharts first
*/
H.AxisPlotLineOrBandExtension = {
/**
* Create the path for a plot band
*/
getPlotBandPath: function(from, to) {
var toPath = this.getPlotLinePath(to, null, null, true),
path = this.getPlotLinePath(from, null, null, true);
if (path && toPath) {
// Flat paths don't need labels (#3836)
path.flat = path.toString() === toPath.toString();
path.push(
toPath[4],
toPath[5],
toPath[1],
toPath[2]
);
} else { // outside the axis area
path = null;
}
return path;
},
addPlotBand: function(options) {
return this.addPlotBandOrLine(options, 'plotBands');
},
addPlotLine: function(options) {
return this.addPlotBandOrLine(options, 'plotLines');
},
/**
* Add a plot band or plot line after render time
*
* @param options {Object} The plotBand or plotLine configuration object
*/
addPlotBandOrLine: function(options, coll) {
var obj = new H.PlotLineOrBand(this, options).render(),
userOptions = this.userOptions;
if (obj) { // #2189
// Add it to the user options for exporting and Axis.update
if (coll) {
userOptions[coll] = userOptions[coll] || [];
userOptions[coll].push(options);
}
this.plotLinesAndBands.push(obj);
}
return obj;
},
/**
* Remove a plot band or plot line from the chart by id
* @param {Object} id
*/
removePlotBandOrLine: function(id) {
var plotLinesAndBands = this.plotLinesAndBands,
options = this.options,
userOptions = this.userOptions,
i = plotLinesAndBands.length;
while (i--) {
if (plotLinesAndBands[i].id === id) {
plotLinesAndBands[i].destroy();
}
}
each([options.plotLines || [], userOptions.plotLines || [], options.plotBands || [], userOptions.plotBands || []], function(arr) {
i = arr.length;
while (i--) {
if (arr[i].id === id) {
erase(arr, arr[i]);
}
}
});
}
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
animObject = H.animObject,
arrayMax = H.arrayMax,
arrayMin = H.arrayMin,
AxisPlotLineOrBandExtension = H.AxisPlotLineOrBandExtension,
color = H.color,
correctFloat = H.correctFloat,
defaultOptions = H.defaultOptions,
defined = H.defined,
deg2rad = H.deg2rad,
destroyObjectProperties = H.destroyObjectProperties,
each = H.each,
error = H.error,
extend = H.extend,
fireEvent = H.fireEvent,
format = H.format,
getMagnitude = H.getMagnitude,
grep = H.grep,
inArray = H.inArray,
isArray = H.isArray,
isNumber = H.isNumber,
isString = H.isString,
merge = H.merge,
normalizeTickInterval = H.normalizeTickInterval,
pick = H.pick,
PlotLineOrBand = H.PlotLineOrBand,
removeEvent = H.removeEvent,
splat = H.splat,
syncTimeout = H.syncTimeout,
Tick = H.Tick;
/**
* Create a new axis object
* @param {Object} chart
* @param {Object} options
*/
H.Axis = function() {
this.init.apply(this, arguments);
};
H.Axis.prototype = {
/**
* Default options for the X axis - the Y axis has extended defaults
*/
defaultOptions: {
// allowDecimals: null,
// alternateGridColor: null,
// categories: [],
dateTimeLabelFormats: {
millisecond: '%H:%M:%S.%L',
second: '%H:%M:%S',
minute: '%H:%M',
hour: '%H:%M',
day: '%e. %b',
week: '%e. %b',
month: '%b \'%y',
year: '%Y'
},
endOnTick: false,
// reversed: false,
labels: {
enabled: true,
// rotation: 0,
// align: 'center',
// step: null,
x: 0
//y: undefined
/*formatter: function () {
return this.value;
},*/
},
//linkedTo: null,
//max: undefined,
//min: undefined,
minPadding: 0.01,
maxPadding: 0.01,
//minRange: null,
//minorTickInterval: null,
minorTickLength: 2,
minorTickPosition: 'outside', // inside or outside
//opposite: false,
//offset: 0,
//plotBands: [{
// events: {},
// zIndex: 1,
// labels: { align, x, verticalAlign, y, style, rotation, textAlign }
//}],
//plotLines: [{
// events: {}
// dashStyle: {}
// zIndex:
// labels: { align, x, verticalAlign, y, style, rotation, textAlign }
//}],
//reversed: false,
// showFirstLabel: true,
// showLastLabel: true,
startOfWeek: 1,
startOnTick: false,
//tickInterval: null,
tickLength: 10,
tickmarkPlacement: 'between', // on or between
tickPixelInterval: 100,
tickPosition: 'outside',
title: {
//text: null,
align: 'middle', // low, middle or high
//margin: 0 for horizontal, 10 for vertical axes,
//rotation: 0,
//side: 'outside',
//x: 0,
//y: 0
},
type: 'linear', // linear, logarithmic or datetime
//visible: true
},
/**
* This options set extends the defaultOptions for Y axes
*/
defaultYAxisOptions: {
endOnTick: true,
tickPixelInterval: 72,
showLastLabel: true,
labels: {
x: -8
},
maxPadding: 0.05,
minPadding: 0.05,
startOnTick: true,
title: {
rotation: 270,
text: 'Values'
},
stackLabels: {
enabled: false,
//align: dynamic,
//y: dynamic,
//x: dynamic,
//verticalAlign: dynamic,
//textAlign: dynamic,
//rotation: 0,
formatter: function() {
return H.numberFormat(this.total, -1);
}
}
},
/**
* These options extend the defaultOptions for left axes
*/
defaultLeftAxisOptions: {
labels: {
x: -15
},
title: {
rotation: 270
}
},
/**
* These options extend the defaultOptions for right axes
*/
defaultRightAxisOptions: {
labels: {
x: 15
},
title: {
rotation: 90
}
},
/**
* These options extend the defaultOptions for bottom axes
*/
defaultBottomAxisOptions: {
labels: {
autoRotation: [-45],
x: 0
// overflow: undefined,
// staggerLines: null
},
title: {
rotation: 0
}
},
/**
* These options extend the defaultOptions for top axes
*/
defaultTopAxisOptions: {
labels: {
autoRotation: [-45],
x: 0
// overflow: undefined
// staggerLines: null
},
title: {
rotation: 0
}
},
/**
* Initialize the axis
*/
init: function(chart, userOptions) {
var isXAxis = userOptions.isX,
axis = this;
axis.chart = chart;
// Flag, is the axis horizontal
axis.horiz = chart.inverted ? !isXAxis : isXAxis;
// Flag, isXAxis
axis.isXAxis = isXAxis;
axis.coll = axis.coll || (isXAxis ? 'xAxis' : 'yAxis');
axis.opposite = userOptions.opposite; // needed in setOptions
axis.side = userOptions.side || (axis.horiz ?
(axis.opposite ? 0 : 2) : // top : bottom
(axis.opposite ? 1 : 3)); // right : left
axis.setOptions(userOptions);
var options = this.options,
type = options.type,
isDatetimeAxis = type === 'datetime';
axis.labelFormatter = options.labels.formatter || axis.defaultLabelFormatter; // can be overwritten by dynamic format
// Flag, stagger lines or not
axis.userOptions = userOptions;
//axis.axisTitleMargin = undefined,// = options.title.margin,
axis.minPixelPadding = 0;
axis.reversed = options.reversed;
axis.visible = options.visible !== false;
axis.zoomEnabled = options.zoomEnabled !== false;
// Initial categories
axis.hasNames = type === 'category' || options.categories === true;
axis.categories = options.categories || axis.hasNames;
axis.names = axis.names || []; // Preserve on update (#3830)
// Elements
//axis.axisGroup = undefined;
//axis.gridGroup = undefined;
//axis.axisTitle = undefined;
//axis.axisLine = undefined;
// Shorthand types
axis.isLog = type === 'logarithmic';
axis.isDatetimeAxis = isDatetimeAxis;
// Flag, if axis is linked to another axis
axis.isLinked = defined(options.linkedTo);
// Linked axis.
//axis.linkedParent = undefined;
// Tick positions
//axis.tickPositions = undefined; // array containing predefined positions
// Tick intervals
//axis.tickInterval = undefined;
//axis.minorTickInterval = undefined;
// Major ticks
axis.ticks = {};
axis.labelEdge = [];
// Minor ticks
axis.minorTicks = {};
// List of plotLines/Bands
axis.plotLinesAndBands = [];
// Alternate bands
axis.alternateBands = {};
// Axis metrics
//axis.left = undefined;
//axis.top = undefined;
//axis.width = undefined;
//axis.height = undefined;
//axis.bottom = undefined;
//axis.right = undefined;
//axis.transA = undefined;
//axis.transB = undefined;
//axis.oldTransA = undefined;
axis.len = 0;
//axis.oldMin = undefined;
//axis.oldMax = undefined;
//axis.oldUserMin = undefined;
//axis.oldUserMax = undefined;
//axis.oldAxisLength = undefined;
axis.minRange = axis.userMinRange = options.minRange || options.maxZoom;
axis.range = options.range;
axis.offset = options.offset || 0;
// Dictionary for stacks
axis.stacks = {};
axis.oldStacks = {};
axis.stacksTouched = 0;
// Min and max in the data
//axis.dataMin = undefined,
//axis.dataMax = undefined,
// The axis range
axis.max = null;
axis.min = null;
// User set min and max
//axis.userMin = undefined,
//axis.userMax = undefined,
// Crosshair options
axis.crosshair = pick(options.crosshair, splat(chart.options.tooltip.crosshairs)[isXAxis ? 0 : 1], false);
// Run Axis
var eventType,
events = axis.options.events;
// Register
if (inArray(axis, chart.axes) === -1) { // don't add it again on Axis.update()
if (isXAxis) { // #2713
chart.axes.splice(chart.xAxis.length, 0, axis);
} else {
chart.axes.push(axis);
}
chart[axis.coll].push(axis);
}
axis.series = axis.series || []; // populated by Series
// inverted charts have reversed xAxes as default
if (chart.inverted && isXAxis && axis.reversed === undefined) {
axis.reversed = true;
}
axis.removePlotBand = axis.removePlotBandOrLine;
axis.removePlotLine = axis.removePlotBandOrLine;
// register event listeners
for (eventType in events) {
addEvent(axis, eventType, events[eventType]);
}
// extend logarithmic axis
if (axis.isLog) {
axis.val2lin = axis.log2lin;
axis.lin2val = axis.lin2log;
}
},
/**
* Merge and set options
*/
setOptions: function(userOptions) {
this.options = merge(
this.defaultOptions,
this.coll === 'yAxis' && this.defaultYAxisOptions, [this.defaultTopAxisOptions, this.defaultRightAxisOptions,
this.defaultBottomAxisOptions, this.defaultLeftAxisOptions
][this.side],
merge(
defaultOptions[this.coll], // if set in setOptions (#1053)
userOptions
)
);
},
/**
* The default label formatter. The context is a special config object for the label.
*/
defaultLabelFormatter: function() {
var axis = this.axis,
value = this.value,
categories = axis.categories,
dateTimeLabelFormat = this.dateTimeLabelFormat,
numericSymbols = defaultOptions.lang.numericSymbols,
i = numericSymbols && numericSymbols.length,
multi,
ret,
formatOption = axis.options.labels.format,
// make sure the same symbol is added for all labels on a linear axis
numericSymbolDetector = axis.isLog ? value : axis.tickInterval;
if (formatOption) {
ret = format(formatOption, this);
} else if (categories) {
ret = value;
} else if (dateTimeLabelFormat) { // datetime axis
ret = H.dateFormat(dateTimeLabelFormat, value);
} else if (i && numericSymbolDetector >= 1000) {
// Decide whether we should add a numeric symbol like k (thousands) or M (millions).
// If we are to enable this in tooltip or other places as well, we can move this
// logic to the numberFormatter and enable it by a parameter.
while (i-- && ret === undefined) {
multi = Math.pow(1000, i + 1);
if (numericSymbolDetector >= multi && (value * 10) % multi === 0 && numericSymbols[i] !== null && value !== 0) { // #5480
ret = H.numberFormat(value / multi, -1) + numericSymbols[i];
}
}
}
if (ret === undefined) {
if (Math.abs(value) >= 10000) { // add thousands separators
ret = H.numberFormat(value, -1);
} else { // small numbers
ret = H.numberFormat(value, -1, undefined, ''); // #2466
}
}
return ret;
},
/**
* Get the minimum and maximum for the series of each axis
*/
getSeriesExtremes: function() {
var axis = this,
chart = axis.chart;
axis.hasVisibleSeries = false;
// Reset properties in case we're redrawing (#3353)
axis.dataMin = axis.dataMax = axis.threshold = null;
axis.softThreshold = !axis.isXAxis;
if (axis.buildStacks) {
axis.buildStacks();
}
// loop through this axis' series
each(axis.series, function(series) {
if (series.visible || !chart.options.chart.ignoreHiddenSeries) {
var seriesOptions = series.options,
xData,
threshold = seriesOptions.threshold,
seriesDataMin,
seriesDataMax;
axis.hasVisibleSeries = true;
// Validate threshold in logarithmic axes
if (axis.isLog && threshold <= 0) {
threshold = null;
}
// Get dataMin and dataMax for X axes
if (axis.isXAxis) {
xData = series.xData;
if (xData.length) {
// If xData contains values which is not numbers, then filter them out.
// To prevent performance hit, we only do this after we have already
// found seriesDataMin because in most cases all data is valid. #5234.
seriesDataMin = arrayMin(xData);
if (!isNumber(seriesDataMin) && !(seriesDataMin instanceof Date)) { // Date for #5010
xData = grep(xData, function(x) {
return isNumber(x);
});
seriesDataMin = arrayMin(xData); // Do it again with valid data
}
axis.dataMin = Math.min(pick(axis.dataMin, xData[0]), seriesDataMin);
axis.dataMax = Math.max(pick(axis.dataMax, xData[0]), arrayMax(xData));
}
// Get dataMin and dataMax for Y axes, as well as handle stacking and processed data
} else {
// Get this particular series extremes
series.getExtremes();
seriesDataMax = series.dataMax;
seriesDataMin = series.dataMin;
// Get the dataMin and dataMax so far. If percentage is used, the min and max are
// always 0 and 100. If seriesDataMin and seriesDataMax is null, then series
// doesn't have active y data, we continue with nulls
if (defined(seriesDataMin) && defined(seriesDataMax)) {
axis.dataMin = Math.min(pick(axis.dataMin, seriesDataMin), seriesDataMin);
axis.dataMax = Math.max(pick(axis.dataMax, seriesDataMax), seriesDataMax);
}
// Adjust to threshold
if (defined(threshold)) {
axis.threshold = threshold;
}
// If any series has a hard threshold, it takes precedence
if (!seriesOptions.softThreshold || axis.isLog) {
axis.softThreshold = false;
}
}
}
});
},
/**
* Translate from axis value to pixel position on the chart, or back
*
*/
translate: function(val, backwards, cvsCoord, old, handleLog, pointPlacement) {
var axis = this.linkedParent || this, // #1417
sign = 1,
cvsOffset = 0,
localA = old ? axis.oldTransA : axis.transA,
localMin = old ? axis.oldMin : axis.min,
returnValue,
minPixelPadding = axis.minPixelPadding,
doPostTranslate = (axis.isOrdinal || axis.isBroken || (axis.isLog && handleLog)) && axis.lin2val;
if (!localA) {
localA = axis.transA;
}
// In vertical axes, the canvas coordinates start from 0 at the top like in
// SVG.
if (cvsCoord) {
sign *= -1; // canvas coordinates inverts the value
cvsOffset = axis.len;
}
// Handle reversed axis
if (axis.reversed) {
sign *= -1;
cvsOffset -= sign * (axis.sector || axis.len);
}
// From pixels to value
if (backwards) { // reverse translation
val = val * sign + cvsOffset;
val -= minPixelPadding;
returnValue = val / localA + localMin; // from chart pixel to value
if (doPostTranslate) { // log and ordinal axes
returnValue = axis.lin2val(returnValue);
}
// From value to pixels
} else {
if (doPostTranslate) { // log and ordinal axes
val = axis.val2lin(val);
}
if (pointPlacement === 'between') {
pointPlacement = 0.5;
}
returnValue = sign * (val - localMin) * localA + cvsOffset + (sign * minPixelPadding) +
(isNumber(pointPlacement) ? localA * pointPlacement * axis.pointRange : 0);
}
return returnValue;
},
/**
* Utility method to translate an axis value to pixel position.
* @param {Number} value A value in terms of axis units
* @param {Boolean} paneCoordinates Whether to return the pixel coordinate relative to the chart
* or just the axis/pane itself.
*/
toPixels: function(value, paneCoordinates) {
return this.translate(value, false, !this.horiz, null, true) + (paneCoordinates ? 0 : this.pos);
},
/*
* Utility method to translate a pixel position in to an axis value
* @param {Number} pixel The pixel value coordinate
* @param {Boolean} paneCoordiantes Whether the input pixel is relative to the chart or just the
* axis/pane itself.
*/
toValue: function(pixel, paneCoordinates) {
return this.translate(pixel - (paneCoordinates ? 0 : this.pos), true, !this.horiz, null, true);
},
/**
* Create the path for a plot line that goes from the given value on
* this axis, across the plot to the opposite side
* @param {Number} value
* @param {Number} lineWidth Used for calculation crisp line
* @param {Number] old Use old coordinates (for resizing and rescaling)
*/
getPlotLinePath: function(value, lineWidth, old, force, translatedValue) {
var axis = this,
chart = axis.chart,
axisLeft = axis.left,
axisTop = axis.top,
x1,
y1,
x2,
y2,
cHeight = (old && chart.oldChartHeight) || chart.chartHeight,
cWidth = (old && chart.oldChartWidth) || chart.chartWidth,
skip,
transB = axis.transB,
/**
* Check if x is between a and b. If not, either move to a/b or skip,
* depending on the force parameter.
*/
between = function(x, a, b) {
if (x < a || x > b) {
if (force) {
x = Math.min(Math.max(a, x), b);
} else {
skip = true;
}
}
return x;
};
translatedValue = pick(translatedValue, axis.translate(value, null, null, old));
x1 = x2 = Math.round(translatedValue + transB);
y1 = y2 = Math.round(cHeight - translatedValue - transB);
if (!isNumber(translatedValue)) { // no min or max
skip = true;
} else if (axis.horiz) {
y1 = axisTop;
y2 = cHeight - axis.bottom;
x1 = x2 = between(x1, axisLeft, axisLeft + axis.width);
} else {
x1 = axisLeft;
x2 = cWidth - axis.right;
y1 = y2 = between(y1, axisTop, axisTop + axis.height);
}
return skip && !force ?
null :
chart.renderer.crispLine(['M', x1, y1, 'L', x2, y2], lineWidth || 1);
},
/**
* Set the tick positions of a linear axis to round values like whole tens or every five.
*/
getLinearTickPositions: function(tickInterval, min, max) {
var pos,
lastPos,
roundedMin = correctFloat(Math.floor(min / tickInterval) * tickInterval),
roundedMax = correctFloat(Math.ceil(max / tickInterval) * tickInterval),
tickPositions = [];
// For single points, add a tick regardless of the relative position (#2662)
if (min === max && isNumber(min)) {
return [min];
}
// Populate the intermediate values
pos = roundedMin;
while (pos <= roundedMax) {
// Place the tick on the rounded value
tickPositions.push(pos);
// Always add the raw tickInterval, not the corrected one.
pos = correctFloat(pos + tickInterval);
// If the interval is not big enough in the current min - max range to actually increase
// the loop variable, we need to break out to prevent endless loop. Issue #619
if (pos === lastPos) {
break;
}
// Record the last value
lastPos = pos;
}
return tickPositions;
},
/**
* Return the minor tick positions. For logarithmic axes, reuse the same logic
* as for major ticks.
*/
getMinorTickPositions: function() {
var axis = this,
options = axis.options,
tickPositions = axis.tickPositions,
minorTickInterval = axis.minorTickInterval,
minorTickPositions = [],
pos,
i,
pointRangePadding = axis.pointRangePadding || 0,
min = axis.min - pointRangePadding, // #1498
max = axis.max + pointRangePadding, // #1498
range = max - min,
len;
// If minor ticks get too dense, they are hard to read, and may cause long running script. So we don't draw them.
if (range && range / minorTickInterval < axis.len / 3) { // #3875
if (axis.isLog) {
len = tickPositions.length;
for (i = 1; i < len; i++) {
minorTickPositions = minorTickPositions.concat(
axis.getLogTickPositions(minorTickInterval, tickPositions[i - 1], tickPositions[i], true)
);
}
} else if (axis.isDatetimeAxis && options.minorTickInterval === 'auto') { // #1314
minorTickPositions = minorTickPositions.concat(
axis.getTimeTicks(
axis.normalizeTimeTickInterval(minorTickInterval),
min,
max,
options.startOfWeek
)
);
} else {
for (pos = min + (tickPositions[0] - min) % minorTickInterval; pos <= max; pos += minorTickInterval) {
minorTickPositions.push(pos);
}
}
}
if (minorTickPositions.length !== 0) { // don't change the extremes, when there is no minor ticks
axis.trimTicks(minorTickPositions, options.startOnTick, options.endOnTick); // #3652 #3743 #1498
}
return minorTickPositions;
},
/**
* Adjust the min and max for the minimum range. Keep in mind that the series data is
* not yet processed, so we don't have information on data cropping and grouping, or
* updated axis.pointRange or series.pointRange. The data can't be processed until
* we have finally established min and max.
*/
adjustForMinRange: function() {
var axis = this,
options = axis.options,
min = axis.min,
max = axis.max,
zoomOffset,
spaceAvailable = axis.dataMax - axis.dataMin >= axis.minRange,
closestDataRange,
i,
distance,
xData,
loopLength,
minArgs,
maxArgs,
minRange;
// Set the automatic minimum range based on the closest point distance
if (axis.isXAxis && axis.minRange === undefined && !axis.isLog) {
if (defined(options.min) || defined(options.max)) {
axis.minRange = null; // don't do this again
} else {
// Find the closest distance between raw data points, as opposed to
// closestPointRange that applies to processed points (cropped and grouped)
each(axis.series, function(series) {
xData = series.xData;
loopLength = series.xIncrement ? 1 : xData.length - 1;
for (i = loopLength; i > 0; i--) {
distance = xData[i] - xData[i - 1];
if (closestDataRange === undefined || distance < closestDataRange) {
closestDataRange = distance;
}
}
});
axis.minRange = Math.min(closestDataRange * 5, axis.dataMax - axis.dataMin);
}
}
// if minRange is exceeded, adjust
if (max - min < axis.minRange) {
minRange = axis.minRange;
zoomOffset = (minRange - max + min) / 2;
// if min and max options have been set, don't go beyond it
minArgs = [min - zoomOffset, pick(options.min, min - zoomOffset)];
if (spaceAvailable) { // if space is available, stay within the data range
minArgs[2] = axis.isLog ? axis.log2lin(axis.dataMin) : axis.dataMin;
}
min = arrayMax(minArgs);
maxArgs = [min + minRange, pick(options.max, min + minRange)];
if (spaceAvailable) { // if space is availabe, stay within the data range
maxArgs[2] = axis.isLog ? axis.log2lin(axis.dataMax) : axis.dataMax;
}
max = arrayMin(maxArgs);
// now if the max is adjusted, adjust the min back
if (max - min < minRange) {
minArgs[0] = max - minRange;
minArgs[1] = pick(options.min, max - minRange);
min = arrayMax(minArgs);
}
}
// Record modified extremes
axis.min = min;
axis.max = max;
},
/**
* Find the closestPointRange across all series
*/
getClosest: function() {
var ret;
if (this.categories) {
ret = 1;
} else {
each(this.series, function(series) {
var seriesClosest = series.closestPointRange;
if (!series.noSharedTooltip && defined(seriesClosest)) {
ret = defined(ret) ?
Math.min(ret, seriesClosest) :
seriesClosest;
}
});
}
return ret;
},
/**
* When a point name is given and no x, search for the name in the existing categories,
* or if categories aren't provided, search names or create a new category (#2522).
*/
nameToX: function(point) {
var explicitCategories = isArray(this.categories),
names = explicitCategories ? this.categories : this.names,
nameX = point.options.x,
x;
point.series.requireSorting = false;
if (!defined(nameX)) {
nameX = this.options.uniqueNames === false ?
point.series.autoIncrement() :
inArray(point.name, names);
}
if (nameX === -1) { // The name is not found in currenct categories
if (!explicitCategories) {
x = names.length;
}
} else {
x = nameX;
}
// Write the last point's name to the names array
this.names[x] = point.name;
return x;
},
/**
* When changes have been done to series data, update the axis.names.
*/
updateNames: function() {
var axis = this;
if (this.names.length > 0) {
this.names.length = 0;
this.minRange = undefined;
each(this.series || [], function(series) {
// When adding a series, points are not yet generated
if (!series.points || series.isDirtyData) {
series.processData();
series.generatePoints();
}
each(series.points, function(point, i) {
var x;
if (point.options && point.options.x === undefined) {
x = axis.nameToX(point);
if (x !== point.x) {
point.x = x;
series.xData[i] = x;
}
}
});
});
}
},
/**
* Update translation information
*/
setAxisTranslation: function(saveOld) {
var axis = this,
range = axis.max - axis.min,
pointRange = axis.axisPointRange || 0,
closestPointRange,
minPointOffset = 0,
pointRangePadding = 0,
linkedParent = axis.linkedParent,
ordinalCorrection,
hasCategories = !!axis.categories,
transA = axis.transA,
isXAxis = axis.isXAxis;
// Adjust translation for padding. Y axis with categories need to go through the same (#1784).
if (isXAxis || hasCategories || pointRange) {
if (linkedParent) {
minPointOffset = linkedParent.minPointOffset;
pointRangePadding = linkedParent.pointRangePadding;
} else {
// Get the closest points
closestPointRange = axis.getClosest();
each(axis.series, function(series) {
var seriesPointRange = hasCategories ?
1 :
(isXAxis ?
pick(series.options.pointRange, closestPointRange, 0) :
(axis.axisPointRange || 0)), // #2806
pointPlacement = series.options.pointPlacement;
pointRange = Math.max(pointRange, seriesPointRange);
if (!axis.single) {
// minPointOffset is the value padding to the left of the axis in order to make
// room for points with a pointRange, typically columns. When the pointPlacement option
// is 'between' or 'on', this padding does not apply.
minPointOffset = Math.max(
minPointOffset,
isString(pointPlacement) ? 0 : seriesPointRange / 2
);
// Determine the total padding needed to the length of the axis to make room for the
// pointRange. If the series' pointPlacement is 'on', no padding is added.
pointRangePadding = Math.max(
pointRangePadding,
pointPlacement === 'on' ? 0 : seriesPointRange
);
}
});
}
// Record minPointOffset and pointRangePadding
ordinalCorrection = axis.ordinalSlope && closestPointRange ? axis.ordinalSlope / closestPointRange : 1; // #988, #1853
axis.minPointOffset = minPointOffset = minPointOffset * ordinalCorrection;
axis.pointRangePadding = pointRangePadding = pointRangePadding * ordinalCorrection;
// pointRange means the width reserved for each point, like in a column chart
axis.pointRange = Math.min(pointRange, range);
// closestPointRange means the closest distance between points. In columns
// it is mostly equal to pointRange, but in lines pointRange is 0 while closestPointRange
// is some other value
if (isXAxis) {
axis.closestPointRange = closestPointRange;
}
}
// Secondary values
if (saveOld) {
axis.oldTransA = transA;
}
axis.translationSlope = axis.transA = transA = axis.len / ((range + pointRangePadding) || 1);
axis.transB = axis.horiz ? axis.left : axis.bottom; // translation addend
axis.minPixelPadding = transA * minPointOffset;
},
minFromRange: function() {
return this.max - this.range;
},
/**
* Set the tick positions to round values and optionally extend the extremes
* to the nearest tick
*/
setTickInterval: function(secondPass) {
var axis = this,
chart = axis.chart,
options = axis.options,
isLog = axis.isLog,
log2lin = axis.log2lin,
isDatetimeAxis = axis.isDatetimeAxis,
isXAxis = axis.isXAxis,
isLinked = axis.isLinked,
maxPadding = options.maxPadding,
minPadding = options.minPadding,
length,
linkedParentExtremes,
tickIntervalOption = options.tickInterval,
minTickInterval,
tickPixelIntervalOption = options.tickPixelInterval,
categories = axis.categories,
threshold = axis.threshold,
softThreshold = axis.softThreshold,
thresholdMin,
thresholdMax,
hardMin,
hardMax;
if (!isDatetimeAxis && !categories && !isLinked) {
this.getTickAmount();
}
// Min or max set either by zooming/setExtremes or initial options
hardMin = pick(axis.userMin, options.min);
hardMax = pick(axis.userMax, options.max);
// Linked axis gets the extremes from the parent axis
if (isLinked) {
axis.linkedParent = chart[axis.coll][options.linkedTo];
linkedParentExtremes = axis.linkedParent.getExtremes();
axis.min = pick(linkedParentExtremes.min, linkedParentExtremes.dataMin);
axis.max = pick(linkedParentExtremes.max, linkedParentExtremes.dataMax);
if (options.type !== axis.linkedParent.options.type) {
error(11, 1); // Can't link axes of different type
}
// Initial min and max from the extreme data values
} else {
// Adjust to hard threshold
if (!softThreshold && defined(threshold)) {
if (axis.dataMin >= threshold) {
thresholdMin = threshold;
minPadding = 0;
} else if (axis.dataMax <= threshold) {
thresholdMax = threshold;
maxPadding = 0;
}
}
axis.min = pick(hardMin, thresholdMin, axis.dataMin);
axis.max = pick(hardMax, thresholdMax, axis.dataMax);
}
if (isLog) {
if (!secondPass && Math.min(axis.min, pick(axis.dataMin, axis.min)) <= 0) { // #978
error(10, 1); // Can't plot negative values on log axis
}
// The correctFloat cures #934, float errors on full tens. But it
// was too aggressive for #4360 because of conversion back to lin,
// therefore use precision 15.
axis.min = correctFloat(log2lin(axis.min), 15);
axis.max = correctFloat(log2lin(axis.max), 15);
}
// handle zoomed range
if (axis.range && defined(axis.max)) {
axis.userMin = axis.min = hardMin = Math.max(axis.min, axis.minFromRange()); // #618
axis.userMax = hardMax = axis.max;
axis.range = null; // don't use it when running setExtremes
}
// Hook for Highstock Scroller. Consider combining with beforePadding.
fireEvent(axis, 'foundExtremes');
// Hook for adjusting this.min and this.max. Used by bubble series.
if (axis.beforePadding) {
axis.beforePadding();
}
// adjust min and max for the minimum range
axis.adjustForMinRange();
// Pad the values to get clear of the chart's edges. To avoid tickInterval taking the padding
// into account, we do this after computing tick interval (#1337).
if (!categories && !axis.axisPointRange && !axis.usePercentage && !isLinked && defined(axis.min) && defined(axis.max)) {
length = axis.max - axis.min;
if (length) {
if (!defined(hardMin) && minPadding) {
axis.min -= length * minPadding;
}
if (!defined(hardMax) && maxPadding) {
axis.max += length * maxPadding;
}
}
}
// Handle options for floor, ceiling, softMin and softMax
if (isNumber(options.floor)) {
axis.min = Math.max(axis.min, options.floor);
} else if (isNumber(options.softMin)) {
axis.min = Math.min(axis.min, options.softMin);
}
if (isNumber(options.ceiling)) {
axis.max = Math.min(axis.max, options.ceiling);
} else if (isNumber(options.softMax)) {
axis.max = Math.max(axis.max, options.softMax);
}
// When the threshold is soft, adjust the extreme value only if
// the data extreme and the padded extreme land on either side of the threshold. For example,
// a series of [0, 1, 2, 3] would make the yAxis add a tick for -1 because of the
// default minPadding and startOnTick options. This is prevented by the softThreshold
// option.
if (softThreshold && defined(axis.dataMin)) {
threshold = threshold || 0;
if (!defined(hardMin) && axis.min < threshold && axis.dataMin >= threshold) {
axis.min = threshold;
} else if (!defined(hardMax) && axis.max > threshold && axis.dataMax <= threshold) {
axis.max = threshold;
}
}
// get tickInterval
if (axis.min === axis.max || axis.min === undefined || axis.max === undefined) {
axis.tickInterval = 1;
} else if (isLinked && !tickIntervalOption &&
tickPixelIntervalOption === axis.linkedParent.options.tickPixelInterval) {
axis.tickInterval = tickIntervalOption = axis.linkedParent.tickInterval;
} else {
axis.tickInterval = pick(
tickIntervalOption,
this.tickAmount ? ((axis.max - axis.min) / Math.max(this.tickAmount - 1, 1)) : undefined,
categories ? // for categoried axis, 1 is default, for linear axis use tickPix
1 :
// don't let it be more than the data range
(axis.max - axis.min) * tickPixelIntervalOption / Math.max(axis.len, tickPixelIntervalOption)
);
}
// Now we're finished detecting min and max, crop and group series data. This
// is in turn needed in order to find tick positions in ordinal axes.
if (isXAxis && !secondPass) {
each(axis.series, function(series) {
series.processData(axis.min !== axis.oldMin || axis.max !== axis.oldMax);
});
}
// set the translation factor used in translate function
axis.setAxisTranslation(true);
// hook for ordinal axes and radial axes
if (axis.beforeSetTickPositions) {
axis.beforeSetTickPositions();
}
// hook for extensions, used in Highstock ordinal axes
if (axis.postProcessTickInterval) {
axis.tickInterval = axis.postProcessTickInterval(axis.tickInterval);
}
// In column-like charts, don't cramp in more ticks than there are points (#1943, #4184)
if (axis.pointRange && !tickIntervalOption) {
axis.tickInterval = Math.max(axis.pointRange, axis.tickInterval);
}
// Before normalizing the tick interval, handle minimum tick interval. This applies only if tickInterval is not defined.
minTickInterval = pick(options.minTickInterval, axis.isDatetimeAxis && axis.closestPointRange);
if (!tickIntervalOption && axis.tickInterval < minTickInterval) {
axis.tickInterval = minTickInterval;
}
// for linear axes, get magnitude and normalize the interval
if (!isDatetimeAxis && !isLog && !tickIntervalOption) {
axis.tickInterval = normalizeTickInterval(
axis.tickInterval,
null,
getMagnitude(axis.tickInterval),
// If the tick interval is between 0.5 and 5 and the axis max is in the order of
// thousands, chances are we are dealing with years. Don't allow decimals. #3363.
pick(options.allowDecimals, !(axis.tickInterval > 0.5 && axis.tickInterval < 5 && axis.max > 1000 && axis.max < 9999)), !!this.tickAmount
);
}
// Prevent ticks from getting so close that we can't draw the labels
if (!this.tickAmount) {
axis.tickInterval = axis.unsquish();
}
this.setTickPositions();
},
/**
* Now we have computed the normalized tickInterval, get the tick positions
*/
setTickPositions: function() {
var options = this.options,
tickPositions,
tickPositionsOption = options.tickPositions,
tickPositioner = options.tickPositioner,
startOnTick = options.startOnTick,
endOnTick = options.endOnTick,
single;
// Set the tickmarkOffset
this.tickmarkOffset = (this.categories && options.tickmarkPlacement === 'between' &&
this.tickInterval === 1) ? 0.5 : 0; // #3202
// get minorTickInterval
this.minorTickInterval = options.minorTickInterval === 'auto' && this.tickInterval ?
this.tickInterval / 5 : options.minorTickInterval;
// Find the tick positions
this.tickPositions = tickPositions = tickPositionsOption && tickPositionsOption.slice(); // Work on a copy (#1565)
if (!tickPositions) {
if (this.isDatetimeAxis) {
tickPositions = this.getTimeTicks(
this.normalizeTimeTickInterval(this.tickInterval, options.units),
this.min,
this.max,
options.startOfWeek,
this.ordinalPositions,
this.closestPointRange,
true
);
} else if (this.isLog) {
tickPositions = this.getLogTickPositions(this.tickInterval, this.min, this.max);
} else {
tickPositions = this.getLinearTickPositions(this.tickInterval, this.min, this.max);
}
// Too dense ticks, keep only the first and last (#4477)
if (tickPositions.length > this.len) {
tickPositions = [tickPositions[0], tickPositions.pop()];
}
this.tickPositions = tickPositions;
// Run the tick positioner callback, that allows modifying auto tick positions.
if (tickPositioner) {
tickPositioner = tickPositioner.apply(this, [this.min, this.max]);
if (tickPositioner) {
this.tickPositions = tickPositions = tickPositioner;
}
}
}
if (!this.isLinked) {
// reset min/max or remove extremes based on start/end on tick
this.trimTicks(tickPositions, startOnTick, endOnTick);
// When there is only one point, or all points have the same value on this axis, then min
// and max are equal and tickPositions.length is 0 or 1. In this case, add some padding
// in order to center the point, but leave it with one tick. #1337.
if (this.min === this.max && defined(this.min) && !this.tickAmount) {
// Substract half a unit (#2619, #2846, #2515, #3390)
single = true;
this.min -= 0.5;
this.max += 0.5;
}
this.single = single;
if (!tickPositionsOption && !tickPositioner) {
this.adjustTickAmount();
}
}
},
/**
* Handle startOnTick and endOnTick by either adapting to padding min/max or rounded min/max
*/
trimTicks: function(tickPositions, startOnTick, endOnTick) {
var roundedMin = tickPositions[0],
roundedMax = tickPositions[tickPositions.length - 1],
minPointOffset = this.minPointOffset || 0;
if (startOnTick) {
this.min = roundedMin;
} else {
while (this.min - minPointOffset > tickPositions[0]) {
tickPositions.shift();
}
}
if (endOnTick) {
this.max = roundedMax;
} else {
while (this.max + minPointOffset < tickPositions[tickPositions.length - 1]) {
tickPositions.pop();
}
}
// If no tick are left, set one tick in the middle (#3195)
if (tickPositions.length === 0 && defined(roundedMin)) {
tickPositions.push((roundedMax + roundedMin) / 2);
}
},
/**
* Check if there are multiple axes in the same pane
* @returns {Boolean} There are other axes
*/
alignToOthers: function() {
var others = {}, // Whether there is another axis to pair with this one
hasOther,
options = this.options;
if (this.chart.options.chart.alignTicks !== false && options.alignTicks !== false) {
each(this.chart[this.coll], function(axis) {
var otherOptions = axis.options,
horiz = axis.horiz,
key = [
horiz ? otherOptions.left : otherOptions.top,
otherOptions.width,
otherOptions.height,
otherOptions.pane
].join(',');
if (axis.series.length) { // #4442
if (others[key]) {
hasOther = true; // #4201
} else {
others[key] = 1;
}
}
});
}
return hasOther;
},
/**
* Set the max ticks of either the x and y axis collection
*/
getTickAmount: function() {
var options = this.options,
tickAmount = options.tickAmount,
tickPixelInterval = options.tickPixelInterval;
if (!defined(options.tickInterval) && this.len < tickPixelInterval && !this.isRadial &&
!this.isLog && options.startOnTick && options.endOnTick) {
tickAmount = 2;
}
if (!tickAmount && this.alignToOthers()) {
// Add 1 because 4 tick intervals require 5 ticks (including first and last)
tickAmount = Math.ceil(this.len / tickPixelInterval) + 1;
}
// For tick amounts of 2 and 3, compute five ticks and remove the intermediate ones. This
// prevents the axis from adding ticks that are too far away from the data extremes.
if (tickAmount < 4) {
this.finalTickAmt = tickAmount;
tickAmount = 5;
}
this.tickAmount = tickAmount;
},
/**
* When using multiple axes, adjust the number of ticks to match the highest
* number of ticks in that group
*/
adjustTickAmount: function() {
var tickInterval = this.tickInterval,
tickPositions = this.tickPositions,
tickAmount = this.tickAmount,
finalTickAmt = this.finalTickAmt,
currentTickAmount = tickPositions && tickPositions.length,
i,
len;
if (currentTickAmount < tickAmount) {
while (tickPositions.length < tickAmount) {
tickPositions.push(correctFloat(
tickPositions[tickPositions.length - 1] + tickInterval
));
}
this.transA *= (currentTickAmount - 1) / (tickAmount - 1);
this.max = tickPositions[tickPositions.length - 1];
// We have too many ticks, run second pass to try to reduce ticks
} else if (currentTickAmount > tickAmount) {
this.tickInterval *= 2;
this.setTickPositions();
}
// The finalTickAmt property is set in getTickAmount
if (defined(finalTickAmt)) {
i = len = tickPositions.length;
while (i--) {
if (
(finalTickAmt === 3 && i % 2 === 1) || // Remove every other tick
(finalTickAmt <= 2 && i > 0 && i < len - 1) // Remove all but first and last
) {
tickPositions.splice(i, 1);
}
}
this.finalTickAmt = undefined;
}
},
/**
* Set the scale based on data min and max, user set min and max or options
*
*/
setScale: function() {
var axis = this,
isDirtyData,
isDirtyAxisLength;
axis.oldMin = axis.min;
axis.oldMax = axis.max;
axis.oldAxisLength = axis.len;
// set the new axisLength
axis.setAxisSize();
//axisLength = horiz ? axisWidth : axisHeight;
isDirtyAxisLength = axis.len !== axis.oldAxisLength;
// is there new data?
each(axis.series, function(series) {
if (series.isDirtyData || series.isDirty ||
series.xAxis.isDirty) { // when x axis is dirty, we need new data extremes for y as well
isDirtyData = true;
}
});
// do we really need to go through all this?
if (isDirtyAxisLength || isDirtyData || axis.isLinked || axis.forceRedraw ||
axis.userMin !== axis.oldUserMin || axis.userMax !== axis.oldUserMax || axis.alignToOthers()) {
if (axis.resetStacks) {
axis.resetStacks();
}
axis.forceRedraw = false;
// get data extremes if needed
axis.getSeriesExtremes();
// get fixed positions based on tickInterval
axis.setTickInterval();
// record old values to decide whether a rescale is necessary later on (#540)
axis.oldUserMin = axis.userMin;
axis.oldUserMax = axis.userMax;
// Mark as dirty if it is not already set to dirty and extremes have changed. #595.
if (!axis.isDirty) {
axis.isDirty = isDirtyAxisLength || axis.min !== axis.oldMin || axis.max !== axis.oldMax;
}
} else if (axis.cleanStacks) {
axis.cleanStacks();
}
},
/**
* Set the extremes and optionally redraw
* @param {Number} newMin
* @param {Number} newMax
* @param {Boolean} redraw
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
* @param {Object} eventArguments
*
*/
setExtremes: function(newMin, newMax, redraw, animation, eventArguments) {
var axis = this,
chart = axis.chart;
redraw = pick(redraw, true); // defaults to true
each(axis.series, function(serie) {
delete serie.kdTree;
});
// Extend the arguments with min and max
eventArguments = extend(eventArguments, {
min: newMin,
max: newMax
});
// Fire the event
fireEvent(axis, 'setExtremes', eventArguments, function() { // the default event handler
axis.userMin = newMin;
axis.userMax = newMax;
axis.eventArgs = eventArguments;
if (redraw) {
chart.redraw(animation);
}
});
},
/**
* Overridable method for zooming chart. Pulled out in a separate method to allow overriding
* in stock charts.
*/
zoom: function(newMin, newMax) {
var dataMin = this.dataMin,
dataMax = this.dataMax,
options = this.options,
min = Math.min(dataMin, pick(options.min, dataMin)),
max = Math.max(dataMax, pick(options.max, dataMax));
if (newMin !== this.min || newMax !== this.max) { // #5790
// Prevent pinch zooming out of range. Check for defined is for #1946. #1734.
if (!this.allowZoomOutside) {
if (defined(dataMin) && newMin <= min) {
newMin = min;
}
if (defined(dataMax) && newMax >= max) {
newMax = max;
}
}
// In full view, displaying the reset zoom button is not required
this.displayBtn = newMin !== undefined || newMax !== undefined;
// Do it
this.setExtremes(
newMin,
newMax,
false,
undefined, {
trigger: 'zoom'
}
);
}
return true;
},
/**
* Update the axis metrics
*/
setAxisSize: function() {
var chart = this.chart,
options = this.options,
offsetLeft = options.offsetLeft || 0,
offsetRight = options.offsetRight || 0,
horiz = this.horiz,
width = pick(options.width, chart.plotWidth - offsetLeft + offsetRight),
height = pick(options.height, chart.plotHeight),
top = pick(options.top, chart.plotTop),
left = pick(options.left, chart.plotLeft + offsetLeft),
percentRegex = /%$/;
// Check for percentage based input values. Rounding fixes problems with
// column overflow and plot line filtering (#4898, #4899)
if (percentRegex.test(height)) {
height = Math.round(parseFloat(height) / 100 * chart.plotHeight);
}
if (percentRegex.test(top)) {
top = Math.round(parseFloat(top) / 100 * chart.plotHeight + chart.plotTop);
}
// Expose basic values to use in Series object and navigator
this.left = left;
this.top = top;
this.width = width;
this.height = height;
this.bottom = chart.chartHeight - height - top;
this.right = chart.chartWidth - width - left;
// Direction agnostic properties
this.len = Math.max(horiz ? width : height, 0); // Math.max fixes #905
this.pos = horiz ? left : top; // distance from SVG origin
},
/**
* Get the actual axis extremes
*/
getExtremes: function() {
var axis = this,
isLog = axis.isLog,
lin2log = axis.lin2log;
return {
min: isLog ? correctFloat(lin2log(axis.min)) : axis.min,
max: isLog ? correctFloat(lin2log(axis.max)) : axis.max,
dataMin: axis.dataMin,
dataMax: axis.dataMax,
userMin: axis.userMin,
userMax: axis.userMax
};
},
/**
* Get the zero plane either based on zero or on the min or max value.
* Used in bar and area plots
*/
getThreshold: function(threshold) {
var axis = this,
isLog = axis.isLog,
lin2log = axis.lin2log,
realMin = isLog ? lin2log(axis.min) : axis.min,
realMax = isLog ? lin2log(axis.max) : axis.max;
if (threshold === null) {
threshold = realMin;
} else if (realMin > threshold) {
threshold = realMin;
} else if (realMax < threshold) {
threshold = realMax;
}
return axis.translate(threshold, 0, 1, 0, 1);
},
/**
* Compute auto alignment for the axis label based on which side the axis is on
* and the given rotation for the label
*/
autoLabelAlign: function(rotation) {
var ret,
angle = (pick(rotation, 0) - (this.side * 90) + 720) % 360;
if (angle > 15 && angle < 165) {
ret = 'right';
} else if (angle > 195 && angle < 345) {
ret = 'left';
} else {
ret = 'center';
}
return ret;
},
/**
* Get the tick length and width for the axis.
* @param {String} prefix 'tick' or 'minorTick'
* @returns {Array} An array of tickLength and tickWidth
*/
tickSize: function(prefix) {
var options = this.options,
tickLength = options[prefix + 'Length'],
tickWidth = pick(options[prefix + 'Width'], prefix === 'tick' && this.isXAxis ? 1 : 0); // X axis defaults to 1
if (tickWidth && tickLength) {
// Negate the length
if (options[prefix + 'Position'] === 'inside') {
tickLength = -tickLength;
}
return [tickLength, tickWidth];
}
},
/**
* Return the size of the labels
*/
labelMetrics: function() {
return this.chart.renderer.fontMetrics(
this.options.labels.style && this.options.labels.style.fontSize,
this.ticks[0] && this.ticks[0].label
);
},
/**
* Prevent the ticks from getting so close we can't draw the labels. On a horizontal
* axis, this is handled by rotating the labels, removing ticks and adding ellipsis.
* On a vertical axis remove ticks and add ellipsis.
*/
unsquish: function() {
var labelOptions = this.options.labels,
horiz = this.horiz,
tickInterval = this.tickInterval,
newTickInterval = tickInterval,
slotSize = this.len / (((this.categories ? 1 : 0) + this.max - this.min) / tickInterval),
rotation,
rotationOption = labelOptions.rotation,
labelMetrics = this.labelMetrics(),
step,
bestScore = Number.MAX_VALUE,
autoRotation,
// Return the multiple of tickInterval that is needed to avoid collision
getStep = function(spaceNeeded) {
var step = spaceNeeded / (slotSize || 1);
step = step > 1 ? Math.ceil(step) : 1;
return step * tickInterval;
};
if (horiz) {
autoRotation = !labelOptions.staggerLines && !labelOptions.step && ( // #3971
defined(rotationOption) ? [rotationOption] :
slotSize < pick(labelOptions.autoRotationLimit, 80) && labelOptions.autoRotation
);
if (autoRotation) {
// Loop over the given autoRotation options, and determine which gives the best score. The
// best score is that with the lowest number of steps and a rotation closest to horizontal.
each(autoRotation, function(rot) {
var score;
if (rot === rotationOption || (rot && rot >= -90 && rot <= 90)) { // #3891
step = getStep(Math.abs(labelMetrics.h / Math.sin(deg2rad * rot)));
score = step + Math.abs(rot / 360);
if (score < bestScore) {
bestScore = score;
rotation = rot;
newTickInterval = step;
}
}
});
}
} else if (!labelOptions.step) { // #4411
newTickInterval = getStep(labelMetrics.h);
}
this.autoRotation = autoRotation;
this.labelRotation = pick(rotation, rotationOption);
return newTickInterval;
},
/**
* Get the general slot width for this axis. This may change between the pre-render (from Axis.getOffset)
* and the final tick rendering and placement (#5086).
*/
getSlotWidth: function() {
var chart = this.chart,
horiz = this.horiz,
labelOptions = this.options.labels,
slotCount = Math.max(this.tickPositions.length - (this.categories ? 0 : 1), 1),
marginLeft = chart.margin[3];
return (horiz && (labelOptions.step || 0) < 2 && !labelOptions.rotation && // #4415
((this.staggerLines || 1) * chart.plotWidth) / slotCount) ||
(!horiz && ((marginLeft && (marginLeft - chart.spacing[3])) || chart.chartWidth * 0.33)); // #1580, #1931
},
/**
* Render the axis labels and determine whether ellipsis or rotation need to be applied
*/
renderUnsquish: function() {
var chart = this.chart,
renderer = chart.renderer,
tickPositions = this.tickPositions,
ticks = this.ticks,
labelOptions = this.options.labels,
horiz = this.horiz,
slotWidth = this.getSlotWidth(),
innerWidth = Math.max(1, Math.round(slotWidth - 2 * (labelOptions.padding || 5))),
attr = {},
labelMetrics = this.labelMetrics(),
textOverflowOption = labelOptions.style && labelOptions.style.textOverflow,
css,
maxLabelLength = 0,
label,
i,
pos;
// Set rotation option unless it is "auto", like in gauges
if (!isString(labelOptions.rotation)) {
attr.rotation = labelOptions.rotation || 0; // #4443
}
// Get the longest label length
each(tickPositions, function(tick) {
tick = ticks[tick];
if (tick && tick.labelLength > maxLabelLength) {
maxLabelLength = tick.labelLength;
}
});
this.maxLabelLength = maxLabelLength;
// Handle auto rotation on horizontal axis
if (this.autoRotation) {
// Apply rotation only if the label is too wide for the slot, and
// the label is wider than its height.
if (maxLabelLength > innerWidth && maxLabelLength > labelMetrics.h) {
attr.rotation = this.labelRotation;
} else {
this.labelRotation = 0;
}
// Handle word-wrap or ellipsis on vertical axis
} else if (slotWidth) {
// For word-wrap or ellipsis
css = {
width: innerWidth + 'px'
};
if (!textOverflowOption) {
css.textOverflow = 'clip';
// On vertical axis, only allow word wrap if there is room for more lines.
i = tickPositions.length;
while (!horiz && i--) {
pos = tickPositions[i];
label = ticks[pos].label;
if (label) {
// Reset ellipsis in order to get the correct bounding box (#4070)
if (label.styles && label.styles.textOverflow === 'ellipsis') {
label.css({
textOverflow: 'clip'
});
// Set the correct width in order to read the bounding box height (#4678, #5034)
} else if (ticks[pos].labelLength > slotWidth) {
label.css({
width: slotWidth + 'px'
});
}
if (label.getBBox().height > this.len / tickPositions.length - (labelMetrics.h - labelMetrics.f)) {
label.specCss = {
textOverflow: 'ellipsis'
};
}
}
}
}
}
// Add ellipsis if the label length is significantly longer than ideal
if (attr.rotation) {
css = {
width: (maxLabelLength > chart.chartHeight * 0.5 ? chart.chartHeight * 0.33 : chart.chartHeight) + 'px'
};
if (!textOverflowOption) {
css.textOverflow = 'ellipsis';
}
}
// Set the explicit or automatic label alignment
this.labelAlign = labelOptions.align || this.autoLabelAlign(this.labelRotation);
if (this.labelAlign) {
attr.align = this.labelAlign;
}
// Apply general and specific CSS
each(tickPositions, function(pos) {
var tick = ticks[pos],
label = tick && tick.label;
if (label) {
label.attr(attr); // This needs to go before the CSS in old IE (#4502)
if (css) {
label.css(merge(css, label.specCss));
}
delete label.specCss;
tick.rotation = attr.rotation;
}
});
// Note: Why is this not part of getLabelPosition?
this.tickRotCorr = renderer.rotCorr(labelMetrics.b, this.labelRotation || 0, this.side !== 0);
},
/**
* Return true if the axis has associated data
*/
hasData: function() {
return this.hasVisibleSeries || (defined(this.min) && defined(this.max) && !!this.tickPositions);
},
/**
* Render the tick labels to a preliminary position to get their sizes
*/
getOffset: function() {
var axis = this,
chart = axis.chart,
renderer = chart.renderer,
options = axis.options,
tickPositions = axis.tickPositions,
ticks = axis.ticks,
horiz = axis.horiz,
side = axis.side,
invertedSide = chart.inverted ? [1, 0, 3, 2][side] : side,
hasData,
showAxis,
titleOffset = 0,
titleOffsetOption,
titleMargin = 0,
axisTitleOptions = options.title,
labelOptions = options.labels,
labelOffset = 0, // reset
labelOffsetPadded,
opposite = axis.opposite,
axisOffset = chart.axisOffset,
clipOffset = chart.clipOffset,
clip,
directionFactor = [-1, 1, 1, -1][side],
n,
className = options.className,
textAlign,
axisParent = axis.axisParent, // Used in color axis
lineHeightCorrection,
tickSize = this.tickSize('tick');
// For reuse in Axis.render
hasData = axis.hasData();
axis.showAxis = showAxis = hasData || pick(options.showEmpty, true);
// Set/reset staggerLines
axis.staggerLines = axis.horiz && labelOptions.staggerLines;
// Create the axisGroup and gridGroup elements on first iteration
if (!axis.axisGroup) {
axis.gridGroup = renderer.g('grid')
.attr({
zIndex: options.gridZIndex || 1
})
.addClass('highcharts-' + this.coll.toLowerCase() + '-grid ' + (className || ''))
.add(axisParent);
axis.axisGroup = renderer.g('axis')
.attr({
zIndex: options.zIndex || 2
})
.addClass('highcharts-' + this.coll.toLowerCase() + ' ' + (className || ''))
.add(axisParent);
axis.labelGroup = renderer.g('axis-labels')
.attr({
zIndex: labelOptions.zIndex || 7
})
.addClass('highcharts-' + axis.coll.toLowerCase() + '-labels ' + (className || ''))
.add(axisParent);
}
if (hasData || axis.isLinked) {
// Generate ticks
each(tickPositions, function(pos) {
if (!ticks[pos]) {
ticks[pos] = new Tick(axis, pos);
} else {
ticks[pos].addLabel(); // update labels depending on tick interval
}
});
axis.renderUnsquish();
// Left side must be align: right and right side must have align: left for labels
if (labelOptions.reserveSpace !== false && (side === 0 || side === 2 || {
1: 'left',
3: 'right'
}[side] === axis.labelAlign || axis.labelAlign === 'center')) {
each(tickPositions, function(pos) {
// get the highest offset
labelOffset = Math.max(
ticks[pos].getLabelSize(),
labelOffset
);
});
}
if (axis.staggerLines) {
labelOffset *= axis.staggerLines;
axis.labelOffset = labelOffset * (axis.opposite ? -1 : 1);
}
} else { // doesn't have data
for (n in ticks) {
ticks[n].destroy();
delete ticks[n];
}
}
if (axisTitleOptions && axisTitleOptions.text && axisTitleOptions.enabled !== false) {
if (!axis.axisTitle) {
textAlign = axisTitleOptions.textAlign;
if (!textAlign) {
textAlign = (horiz ? {
low: 'left',
middle: 'center',
high: 'right'
} : {
low: opposite ? 'right' : 'left',
middle: 'center',
high: opposite ? 'left' : 'right'
})[axisTitleOptions.align];
}
axis.axisTitle = renderer.text(
axisTitleOptions.text,
0,
0,
axisTitleOptions.useHTML
)
.attr({
zIndex: 7,
rotation: axisTitleOptions.rotation || 0,
align: textAlign
})
.addClass('highcharts-axis-title')
.add(axis.axisGroup);
axis.axisTitle.isNew = true;
}
if (showAxis) {
titleOffset = axis.axisTitle.getBBox()[horiz ? 'height' : 'width'];
titleOffsetOption = axisTitleOptions.offset;
titleMargin = defined(titleOffsetOption) ? 0 : pick(axisTitleOptions.margin, horiz ? 5 : 10);
}
// hide or show the title depending on whether showEmpty is set
axis.axisTitle[showAxis ? 'show' : 'hide'](true);
}
// Render the axis line
axis.renderLine();
// handle automatic or user set offset
axis.offset = directionFactor * pick(options.offset, axisOffset[side]);
axis.tickRotCorr = axis.tickRotCorr || {
x: 0,
y: 0
}; // polar
if (side === 0) {
lineHeightCorrection = -axis.labelMetrics().h;
} else if (side === 2) {
lineHeightCorrection = axis.tickRotCorr.y;
} else {
lineHeightCorrection = 0;
}
// Find the padded label offset
labelOffsetPadded = Math.abs(labelOffset) + titleMargin;
if (labelOffset) {
labelOffsetPadded -= lineHeightCorrection;
labelOffsetPadded += directionFactor * (horiz ? pick(labelOptions.y, axis.tickRotCorr.y + directionFactor * 8) : labelOptions.x);
}
axis.axisTitleMargin = pick(titleOffsetOption, labelOffsetPadded);
axisOffset[side] = Math.max(
axisOffset[side],
axis.axisTitleMargin + titleOffset + directionFactor * axis.offset,
labelOffsetPadded, // #3027
hasData && tickPositions.length && tickSize ? tickSize[0] : 0 // #4866
);
// Decide the clipping needed to keep the graph inside the plot area and axis lines
clip = options.offset ? 0 : Math.floor(axis.axisLine.strokeWidth() / 2) * 2; // #4308, #4371
clipOffset[invertedSide] = Math.max(clipOffset[invertedSide], clip);
},
/**
* Get the path for the axis line
*/
getLinePath: function(lineWidth) {
var chart = this.chart,
opposite = this.opposite,
offset = this.offset,
horiz = this.horiz,
lineLeft = this.left + (opposite ? this.width : 0) + offset,
lineTop = chart.chartHeight - this.bottom - (opposite ? this.height : 0) + offset;
if (opposite) {
lineWidth *= -1; // crispify the other way - #1480, #1687
}
return chart.renderer
.crispLine([
'M',
horiz ?
this.left :
lineLeft,
horiz ?
lineTop :
this.top,
'L',
horiz ?
chart.chartWidth - this.right :
lineLeft,
horiz ?
lineTop :
chart.chartHeight - this.bottom
], lineWidth);
},
/**
* Render the axis line
* @returns {[type]} [description]
*/
renderLine: function() {
if (!this.axisLine) {
this.axisLine = this.chart.renderer.path()
.addClass('highcharts-axis-line')
.add(this.axisGroup);
}
},
/**
* Position the title
*/
getTitlePosition: function() {
// compute anchor points for each of the title align options
var horiz = this.horiz,
axisLeft = this.left,
axisTop = this.top,
axisLength = this.len,
axisTitleOptions = this.options.title,
margin = horiz ? axisLeft : axisTop,
opposite = this.opposite,
offset = this.offset,
xOption = axisTitleOptions.x || 0,
yOption = axisTitleOptions.y || 0,
fontSize = this.chart.renderer.fontMetrics(axisTitleOptions.style && axisTitleOptions.style.fontSize, this.axisTitle).f,
// the position in the length direction of the axis
alongAxis = {
low: margin + (horiz ? 0 : axisLength),
middle: margin + axisLength / 2,
high: margin + (horiz ? axisLength : 0)
}[axisTitleOptions.align],
// the position in the perpendicular direction of the axis
offAxis = (horiz ? axisTop + this.height : axisLeft) +
(horiz ? 1 : -1) * // horizontal axis reverses the margin
(opposite ? -1 : 1) * // so does opposite axes
this.axisTitleMargin +
(this.side === 2 ? fontSize : 0);
return {
x: horiz ?
alongAxis + xOption : offAxis + (opposite ? this.width : 0) + offset + xOption,
y: horiz ?
offAxis + yOption - (opposite ? this.height : 0) + offset : alongAxis + yOption
};
},
/**
* Render the axis
*/
render: function() {
var axis = this,
chart = axis.chart,
renderer = chart.renderer,
options = axis.options,
isLog = axis.isLog,
lin2log = axis.lin2log,
isLinked = axis.isLinked,
tickPositions = axis.tickPositions,
axisTitle = axis.axisTitle,
ticks = axis.ticks,
minorTicks = axis.minorTicks,
alternateBands = axis.alternateBands,
stackLabelOptions = options.stackLabels,
alternateGridColor = options.alternateGridColor,
tickmarkOffset = axis.tickmarkOffset,
axisLine = axis.axisLine,
hasRendered = chart.hasRendered,
slideInTicks = hasRendered && isNumber(axis.oldMin),
showAxis = axis.showAxis,
animation = animObject(renderer.globalAnimation),
from,
to;
// Reset
axis.labelEdge.length = 0;
//axis.justifyToPlot = overflow === 'justify';
axis.overlap = false;
// Mark all elements inActive before we go over and mark the active ones
each([ticks, minorTicks, alternateBands], function(coll) {
var pos;
for (pos in coll) {
coll[pos].isActive = false;
}
});
// If the series has data draw the ticks. Else only the line and title
if (axis.hasData() || isLinked) {
// minor ticks
if (axis.minorTickInterval && !axis.categories) {
each(axis.getMinorTickPositions(), function(pos) {
if (!minorTicks[pos]) {
minorTicks[pos] = new Tick(axis, pos, 'minor');
}
// render new ticks in old position
if (slideInTicks && minorTicks[pos].isNew) {
minorTicks[pos].render(null, true);
}
minorTicks[pos].render(null, false, 1);
});
}
// Major ticks. Pull out the first item and render it last so that
// we can get the position of the neighbour label. #808.
if (tickPositions.length) { // #1300
each(tickPositions, function(pos, i) {
// linked axes need an extra check to find out if
if (!isLinked || (pos >= axis.min && pos <= axis.max)) {
if (!ticks[pos]) {
ticks[pos] = new Tick(axis, pos);
}
// render new ticks in old position
if (slideInTicks && ticks[pos].isNew) {
ticks[pos].render(i, true, 0.1);
}
ticks[pos].render(i);
}
});
// In a categorized axis, the tick marks are displayed between labels. So
// we need to add a tick mark and grid line at the left edge of the X axis.
if (tickmarkOffset && (axis.min === 0 || axis.single)) {
if (!ticks[-1]) {
ticks[-1] = new Tick(axis, -1, null, true);
}
ticks[-1].render(-1);
}
}
// alternate grid color
if (alternateGridColor) {
each(tickPositions, function(pos, i) {
to = tickPositions[i + 1] !== undefined ? tickPositions[i + 1] + tickmarkOffset : axis.max - tickmarkOffset;
if (i % 2 === 0 && pos < axis.max && to <= axis.max + (chart.polar ? -tickmarkOffset : tickmarkOffset)) { // #2248, #4660
if (!alternateBands[pos]) {
alternateBands[pos] = new PlotLineOrBand(axis);
}
from = pos + tickmarkOffset; // #949
alternateBands[pos].options = {
from: isLog ? lin2log(from) : from,
to: isLog ? lin2log(to) : to,
color: alternateGridColor
};
alternateBands[pos].render();
alternateBands[pos].isActive = true;
}
});
}
// custom plot lines and bands
if (!axis._addedPlotLB) { // only first time
each((options.plotLines || []).concat(options.plotBands || []), function(plotLineOptions) {
axis.addPlotBandOrLine(plotLineOptions);
});
axis._addedPlotLB = true;
}
} // end if hasData
// Remove inactive ticks
each([ticks, minorTicks, alternateBands], function(coll) {
var pos,
i,
forDestruction = [],
delay = animation.duration,
destroyInactiveItems = function() {
i = forDestruction.length;
while (i--) {
// When resizing rapidly, the same items may be destroyed in different timeouts,
// or the may be reactivated
if (coll[forDestruction[i]] && !coll[forDestruction[i]].isActive) {
coll[forDestruction[i]].destroy();
delete coll[forDestruction[i]];
}
}
};
for (pos in coll) {
if (!coll[pos].isActive) {
// Render to zero opacity
coll[pos].render(pos, false, 0);
coll[pos].isActive = false;
forDestruction.push(pos);
}
}
// When the objects are finished fading out, destroy them
syncTimeout(
destroyInactiveItems,
coll === alternateBands || !chart.hasRendered || !delay ? 0 : delay
);
});
// Set the axis line path
if (axisLine) {
axisLine[axisLine.isPlaced ? 'animate' : 'attr']({
d: this.getLinePath(axisLine.strokeWidth())
});
axisLine.isPlaced = true;
// Show or hide the line depending on options.showEmpty
axisLine[showAxis ? 'show' : 'hide'](true);
}
if (axisTitle && showAxis) {
axisTitle[axisTitle.isNew ? 'attr' : 'animate'](
axis.getTitlePosition()
);
axisTitle.isNew = false;
}
// Stacked totals:
if (stackLabelOptions && stackLabelOptions.enabled) {
axis.renderStackTotals();
}
// End stacked totals
axis.isDirty = false;
},
/**
* Redraw the axis to reflect changes in the data or axis extremes
*/
redraw: function() {
if (this.visible) {
// render the axis
this.render();
// move plot lines and bands
each(this.plotLinesAndBands, function(plotLine) {
plotLine.render();
});
}
// mark associated series as dirty and ready for redraw
each(this.series, function(series) {
series.isDirty = true;
});
},
/**
* Destroys an Axis instance.
*/
destroy: function(keepEvents) {
var axis = this,
stacks = axis.stacks,
stackKey,
plotLinesAndBands = axis.plotLinesAndBands,
i,
n,
keepProps;
// Remove the events
if (!keepEvents) {
removeEvent(axis);
}
// Destroy each stack total
for (stackKey in stacks) {
destroyObjectProperties(stacks[stackKey]);
stacks[stackKey] = null;
}
// Destroy collections
each([axis.ticks, axis.minorTicks, axis.alternateBands], function(coll) {
destroyObjectProperties(coll);
});
if (plotLinesAndBands) {
i = plotLinesAndBands.length;
while (i--) { // #1975
plotLinesAndBands[i].destroy();
}
}
// Destroy local variables
each(['stackTotalGroup', 'axisLine', 'axisTitle', 'axisGroup', 'gridGroup', 'labelGroup', 'cross'], function(prop) {
if (axis[prop]) {
axis[prop] = axis[prop].destroy();
}
});
// Delete all properties and fall back to the prototype.
// Preserve some properties, needed for Axis.update (#4317, #5773).
keepProps = ['extKey', 'hcEvents', 'names', 'series', 'userMax', 'userMin'];
for (n in axis) {
if (axis.hasOwnProperty(n) && inArray(n, keepProps) === -1) {
delete axis[n];
}
}
},
/**
* Draw the crosshair
*
* @param {Object} e The event arguments from the modified pointer event
* @param {Object} point The Point object
*/
drawCrosshair: function(e, point) {
var path,
options = this.crosshair,
snap = pick(options.snap, true),
pos,
categorized,
graphic = this.cross;
// Use last available event when updating non-snapped crosshairs without
// mouse interaction (#5287)
if (!e) {
e = this.cross && this.cross.e;
}
if (
// Disabled in options
!this.crosshair ||
// Snap
((defined(point) || !snap) === false)
) {
this.hideCrosshair();
} else {
// Get the path
if (!snap) {
pos = e && (this.horiz ? e.chartX - this.pos : this.len - e.chartY + this.pos);
} else if (defined(point)) {
pos = this.isXAxis ? point.plotX : this.len - point.plotY; // #3834
}
if (defined(pos)) {
path = this.getPlotLinePath(
// First argument, value, only used on radial
point && (this.isXAxis ? point.x : pick(point.stackY, point.y)),
null,
null,
null,
pos // Translated position
) || null; // #3189
}
if (!defined(path)) {
this.hideCrosshair();
return;
}
categorized = this.categories && !this.isRadial;
// Draw the cross
if (!graphic) {
this.cross = graphic = this.chart.renderer
.path()
.addClass('highcharts-crosshair highcharts-crosshair-' +
(categorized ? 'category ' : 'thin ') + options.className)
.attr({
zIndex: pick(options.zIndex, 2)
})
.add();
}
graphic.show().attr({
d: path
});
if (categorized && !options.width) {
graphic.attr({
'stroke-width': this.transA
});
}
this.cross.e = e;
}
},
/**
* Hide the crosshair.
*/
hideCrosshair: function() {
if (this.cross) {
this.cross.hide();
}
}
}; // end Axis
extend(H.Axis.prototype, AxisPlotLineOrBandExtension);
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Axis = H.Axis,
getMagnitude = H.getMagnitude,
map = H.map,
normalizeTickInterval = H.normalizeTickInterval,
pick = H.pick;
/**
* Methods defined on the Axis prototype
*/
/**
* Set the tick positions of a logarithmic axis
*/
Axis.prototype.getLogTickPositions = function(interval, min, max, minor) {
var axis = this,
options = axis.options,
axisLength = axis.len,
lin2log = axis.lin2log,
log2lin = axis.log2lin,
// Since we use this method for both major and minor ticks,
// use a local variable and return the result
positions = [];
// Reset
if (!minor) {
axis._minorAutoInterval = null;
}
// First case: All ticks fall on whole logarithms: 1, 10, 100 etc.
if (interval >= 0.5) {
interval = Math.round(interval);
positions = axis.getLinearTickPositions(interval, min, max);
// Second case: We need intermediary ticks. For example
// 1, 2, 4, 6, 8, 10, 20, 40 etc.
} else if (interval >= 0.08) {
var roundedMin = Math.floor(min),
intermediate,
i,
j,
len,
pos,
lastPos,
break2;
if (interval > 0.3) {
intermediate = [1, 2, 4];
} else if (interval > 0.15) { // 0.2 equals five minor ticks per 1, 10, 100 etc
intermediate = [1, 2, 4, 6, 8];
} else { // 0.1 equals ten minor ticks per 1, 10, 100 etc
intermediate = [1, 2, 3, 4, 5, 6, 7, 8, 9];
}
for (i = roundedMin; i < max + 1 && !break2; i++) {
len = intermediate.length;
for (j = 0; j < len && !break2; j++) {
pos = log2lin(lin2log(i) * intermediate[j]);
if (pos > min && (!minor || lastPos <= max) && lastPos !== undefined) { // #1670, lastPos is #3113
positions.push(lastPos);
}
if (lastPos > max) {
break2 = true;
}
lastPos = pos;
}
}
// Third case: We are so deep in between whole logarithmic values that
// we might as well handle the tick positions like a linear axis. For
// example 1.01, 1.02, 1.03, 1.04.
} else {
var realMin = lin2log(min),
realMax = lin2log(max),
tickIntervalOption = options[minor ? 'minorTickInterval' : 'tickInterval'],
filteredTickIntervalOption = tickIntervalOption === 'auto' ? null : tickIntervalOption,
tickPixelIntervalOption = options.tickPixelInterval / (minor ? 5 : 1),
totalPixelLength = minor ? axisLength / axis.tickPositions.length : axisLength;
interval = pick(
filteredTickIntervalOption,
axis._minorAutoInterval,
(realMax - realMin) * tickPixelIntervalOption / (totalPixelLength || 1)
);
interval = normalizeTickInterval(
interval,
null,
getMagnitude(interval)
);
positions = map(axis.getLinearTickPositions(
interval,
realMin,
realMax
), log2lin);
if (!minor) {
axis._minorAutoInterval = interval / 5;
}
}
// Set the axis-level tickInterval variable
if (!minor) {
axis.tickInterval = interval;
}
return positions;
};
Axis.prototype.log2lin = function(num) {
return Math.log(num) / Math.LN10;
};
Axis.prototype.lin2log = function(num) {
return Math.pow(10, num);
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var dateFormat = H.dateFormat,
each = H.each,
extend = H.extend,
format = H.format,
isNumber = H.isNumber,
map = H.map,
merge = H.merge,
pick = H.pick,
splat = H.splat,
stop = H.stop,
syncTimeout = H.syncTimeout,
timeUnits = H.timeUnits;
/**
* The tooltip object
* @param {Object} chart The chart instance
* @param {Object} options Tooltip options
*/
H.Tooltip = function() {
this.init.apply(this, arguments);
};
H.Tooltip.prototype = {
init: function(chart, options) {
// Save the chart and options
this.chart = chart;
this.options = options;
// Keep track of the current series
//this.currentSeries = undefined;
// List of crosshairs
this.crosshairs = [];
// Current values of x and y when animating
this.now = {
x: 0,
y: 0
};
// The tooltip is initially hidden
this.isHidden = true;
// Public property for getting the shared state.
this.split = options.split && !chart.inverted;
this.shared = options.shared || this.split;
},
/**
* Destroy the single tooltips in a split tooltip.
* If the tooltip is active then it is not destroyed, unless forced to.
* @param {boolean} force Force destroy all tooltips.
* @return {undefined}
*/
cleanSplit: function(force) {
each(this.chart.series, function(series) {
var tt = series && series.tt;
if (tt) {
if (!tt.isActive || force) {
series.tt = tt.destroy();
} else {
tt.isActive = false;
}
}
});
},
/**
* Create the Tooltip label element if it doesn't exist, then return the
* label.
*/
getLabel: function() {
var renderer = this.chart.renderer,
options = this.options;
if (!this.label) {
// Create the label
if (this.split) {
this.label = renderer.g('tooltip');
} else {
this.label = renderer.label(
'',
0,
0,
options.shape || 'callout',
null,
null,
options.useHTML,
null,
'tooltip'
)
.attr({
padding: options.padding,
r: options.borderRadius
});
}
this.label
.attr({
zIndex: 8
})
.add();
}
return this.label;
},
update: function(options) {
this.destroy();
this.init(this.chart, merge(true, this.options, options));
},
/**
* Destroy the tooltip and its elements.
*/
destroy: function() {
// Destroy and clear local variables
if (this.label) {
this.label = this.label.destroy();
}
if (this.split && this.tt) {
this.cleanSplit(this.chart, true);
this.tt = this.tt.destroy();
}
clearTimeout(this.hideTimer);
clearTimeout(this.tooltipTimeout);
},
/**
* Provide a soft movement for the tooltip
*
* @param {Number} x
* @param {Number} y
* @private
*/
move: function(x, y, anchorX, anchorY) {
var tooltip = this,
now = tooltip.now,
animate = tooltip.options.animation !== false && !tooltip.isHidden &&
// When we get close to the target position, abort animation and land on the right place (#3056)
(Math.abs(x - now.x) > 1 || Math.abs(y - now.y) > 1),
skipAnchor = tooltip.followPointer || tooltip.len > 1;
// Get intermediate values for animation
extend(now, {
x: animate ? (2 * now.x + x) / 3 : x,
y: animate ? (now.y + y) / 2 : y,
anchorX: skipAnchor ? undefined : animate ? (2 * now.anchorX + anchorX) / 3 : anchorX,
anchorY: skipAnchor ? undefined : animate ? (now.anchorY + anchorY) / 2 : anchorY
});
// Move to the intermediate value
tooltip.getLabel().attr(now);
// Run on next tick of the mouse tracker
if (animate) {
// Never allow two timeouts
clearTimeout(this.tooltipTimeout);
// Set the fixed interval ticking for the smooth tooltip
this.tooltipTimeout = setTimeout(function() {
// The interval function may still be running during destroy,
// so check that the chart is really there before calling.
if (tooltip) {
tooltip.move(x, y, anchorX, anchorY);
}
}, 32);
}
},
/**
* Hide the tooltip
*/
hide: function(delay) {
var tooltip = this;
clearTimeout(this.hideTimer); // disallow duplicate timers (#1728, #1766)
delay = pick(delay, this.options.hideDelay, 500);
if (!this.isHidden) {
this.hideTimer = syncTimeout(function() {
tooltip.getLabel()[delay ? 'fadeOut' : 'hide']();
tooltip.isHidden = true;
}, delay);
}
},
/**
* Extendable method to get the anchor position of the tooltip
* from a point or set of points
*/
getAnchor: function(points, mouseEvent) {
var ret,
chart = this.chart,
inverted = chart.inverted,
plotTop = chart.plotTop,
plotLeft = chart.plotLeft,
plotX = 0,
plotY = 0,
yAxis,
xAxis;
points = splat(points);
// Pie uses a special tooltipPos
ret = points[0].tooltipPos;
// When tooltip follows mouse, relate the position to the mouse
if (this.followPointer && mouseEvent) {
if (mouseEvent.chartX === undefined) {
mouseEvent = chart.pointer.normalize(mouseEvent);
}
ret = [
mouseEvent.chartX - chart.plotLeft,
mouseEvent.chartY - plotTop
];
}
// When shared, use the average position
if (!ret) {
each(points, function(point) {
yAxis = point.series.yAxis;
xAxis = point.series.xAxis;
plotX += point.plotX + (!inverted && xAxis ? xAxis.left - plotLeft : 0);
plotY += (point.plotLow ? (point.plotLow + point.plotHigh) / 2 : point.plotY) +
(!inverted && yAxis ? yAxis.top - plotTop : 0); // #1151
});
plotX /= points.length;
plotY /= points.length;
ret = [
inverted ? chart.plotWidth - plotY : plotX,
this.shared && !inverted && points.length > 1 && mouseEvent ?
mouseEvent.chartY - plotTop : // place shared tooltip next to the mouse (#424)
inverted ? chart.plotHeight - plotX : plotY
];
}
return map(ret, Math.round);
},
/**
* Place the tooltip in a chart without spilling over
* and not covering the point it self.
*/
getPosition: function(boxWidth, boxHeight, point) {
var chart = this.chart,
distance = this.distance,
ret = {},
h = point.h || 0, // #4117
swapped,
first = ['y', chart.chartHeight, boxHeight,
point.plotY + chart.plotTop, chart.plotTop,
chart.plotTop + chart.plotHeight
],
second = ['x', chart.chartWidth, boxWidth,
point.plotX + chart.plotLeft, chart.plotLeft,
chart.plotLeft + chart.plotWidth
],
// The far side is right or bottom
preferFarSide = !this.followPointer && pick(point.ttBelow, !chart.inverted === !!point.negative), // #4984
/**
* Handle the preferred dimension. When the preferred dimension is tooltip
* on top or bottom of the point, it will look for space there.
*/
firstDimension = function(dim, outerSize, innerSize, point, min, max) {
var roomLeft = innerSize < point - distance,
roomRight = point + distance + innerSize < outerSize,
alignedLeft = point - distance - innerSize,
alignedRight = point + distance;
if (preferFarSide && roomRight) {
ret[dim] = alignedRight;
} else if (!preferFarSide && roomLeft) {
ret[dim] = alignedLeft;
} else if (roomLeft) {
ret[dim] = Math.min(max - innerSize, alignedLeft - h < 0 ? alignedLeft : alignedLeft - h);
} else if (roomRight) {
ret[dim] = Math.max(
min,
alignedRight + h + innerSize > outerSize ?
alignedRight :
alignedRight + h
);
} else {
return false;
}
},
/**
* Handle the secondary dimension. If the preferred dimension is tooltip
* on top or bottom of the point, the second dimension is to align the tooltip
* above the point, trying to align center but allowing left or right
* align within the chart box.
*/
secondDimension = function(dim, outerSize, innerSize, point) {
var retVal;
// Too close to the edge, return false and swap dimensions
if (point < distance || point > outerSize - distance) {
retVal = false;
// Align left/top
} else if (point < innerSize / 2) {
ret[dim] = 1;
// Align right/bottom
} else if (point > outerSize - innerSize / 2) {
ret[dim] = outerSize - innerSize - 2;
// Align center
} else {
ret[dim] = point - innerSize / 2;
}
return retVal;
},
/**
* Swap the dimensions
*/
swap = function(count) {
var temp = first;
first = second;
second = temp;
swapped = count;
},
run = function() {
if (firstDimension.apply(0, first) !== false) {
if (secondDimension.apply(0, second) === false && !swapped) {
swap(true);
run();
}
} else if (!swapped) {
swap(true);
run();
} else {
ret.x = ret.y = 0;
}
};
// Under these conditions, prefer the tooltip on the side of the point
if (chart.inverted || this.len > 1) {
swap();
}
run();
return ret;
},
/**
* In case no user defined formatter is given, this will be used. Note that the context
* here is an object holding point, series, x, y etc.
*
* @returns {String|Array<String>}
*/
defaultFormatter: function(tooltip) {
var items = this.points || splat(this),
s;
// Build the header
s = [tooltip.tooltipFooterHeaderFormatter(items[0])];
// build the values
s = s.concat(tooltip.bodyFormatter(items));
// footer
s.push(tooltip.tooltipFooterHeaderFormatter(items[0], true));
return s;
},
/**
* Refresh the tooltip's text and position.
* @param {Object} point
*/
refresh: function(point, mouseEvent) {
var tooltip = this,
chart = tooltip.chart,
label = tooltip.getLabel(),
options = tooltip.options,
x,
y,
anchor,
textConfig = {},
text,
pointConfig = [],
formatter = options.formatter || tooltip.defaultFormatter,
hoverPoints = chart.hoverPoints,
shared = tooltip.shared,
currentSeries;
clearTimeout(this.hideTimer);
// get the reference point coordinates (pie charts use tooltipPos)
tooltip.followPointer = splat(point)[0].series.tooltipOptions.followPointer;
anchor = tooltip.getAnchor(point, mouseEvent);
x = anchor[0];
y = anchor[1];
// shared tooltip, array is sent over
if (shared && !(point.series && point.series.noSharedTooltip)) {
// hide previous hoverPoints and set new
chart.hoverPoints = point;
if (hoverPoints) {
each(hoverPoints, function(point) {
point.setState();
});
}
each(point, function(item) {
item.setState('hover');
pointConfig.push(item.getLabelConfig());
});
textConfig = {
x: point[0].category,
y: point[0].y
};
textConfig.points = pointConfig;
this.len = pointConfig.length;
point = point[0];
// single point tooltip
} else {
textConfig = point.getLabelConfig();
}
text = formatter.call(textConfig, tooltip);
// register the current series
currentSeries = point.series;
this.distance = pick(currentSeries.tooltipOptions.distance, 16);
// update the inner HTML
if (text === false) {
this.hide();
} else {
// show it
if (tooltip.isHidden) {
stop(label);
label.attr({
opacity: 1
}).show();
}
// update text
if (tooltip.split) {
this.renderSplit(text, chart.hoverPoints);
} else {
label.attr({
text: text.join ? text.join('') : text
});
// Set the stroke color of the box to reflect the point
label.removeClass(/highcharts-color-[\d]+/g)
.addClass('highcharts-color-' + pick(point.colorIndex, currentSeries.colorIndex));
tooltip.updatePosition({
plotX: x,
plotY: y,
negative: point.negative,
ttBelow: point.ttBelow,
h: anchor[2] || 0
});
}
this.isHidden = false;
}
},
/**
* Render the split tooltip. Loops over each point's text and adds
* a label next to the point, then uses the distribute function to
* find best non-overlapping positions.
*/
renderSplit: function(labels, points) {
var tooltip = this,
boxes = [],
chart = this.chart,
ren = chart.renderer,
rightAligned = true,
options = this.options,
headerHeight,
tooltipLabel = this.getLabel();
// Create the individual labels
each(labels.slice(0, labels.length - 1), function(str, i) {
var point = points[i - 1] ||
// Item 0 is the header. Instead of this, we could also use the crosshair label
{
isHeader: true,
plotX: points[0].plotX
},
owner = point.series || tooltip,
tt = owner.tt,
series = point.series || {},
colorClass = 'highcharts-color-' + pick(point.colorIndex, series.colorIndex, 'none'),
target,
x,
bBox,
boxWidth;
// Store the tooltip referance on the series
if (!tt) {
owner.tt = tt = ren.label(null, null, null, point.isHeader && 'callout')
.addClass('highcharts-tooltip-box ' + colorClass)
.attr({
'padding': options.padding,
'r': options.borderRadius
})
.add(tooltipLabel);
// Add a connector back to the point
if (point.series) {
tt.connector = ren.path()
.addClass('highcharts-tooltip-connector ' + colorClass)
// Add it inside the label group so we will get hide and
// destroy for free
.add(tt);
}
}
tt.isActive = true;
tt.attr({
text: str
});
// Get X position now, so we can move all to the other side in case of overflow
bBox = tt.getBBox();
boxWidth = bBox.width + tt.strokeWidth();
if (point.isHeader) {
headerHeight = bBox.height;
x = Math.max(
0, // No left overflow
Math.min(
point.plotX + chart.plotLeft - boxWidth / 2,
chart.chartWidth - boxWidth // No right overflow (#5794)
)
);
} else {
x = point.plotX + chart.plotLeft - pick(options.distance, 16) -
boxWidth;
}
// If overflow left, we don't use this x in the next loop
if (x < 0) {
rightAligned = false;
}
// Prepare for distribution
target = (point.series && point.series.yAxis && point.series.yAxis.pos) + (point.plotY || 0);
target -= chart.plotTop;
boxes.push({
target: point.isHeader ? chart.plotHeight + headerHeight : target,
rank: point.isHeader ? 1 : 0,
size: owner.tt.getBBox().height + 1,
point: point,
x: x,
tt: tt
});
});
// Clean previous run (for missing points)
this.cleanSplit();
// Distribute and put in place
H.distribute(boxes, chart.plotHeight + headerHeight);
each(boxes, function(box) {
var point = box.point,
tt = box.tt,
attr;
// Put the label in place
attr = {
visibility: box.pos === undefined ? 'hidden' : 'inherit',
x: (rightAligned || point.isHeader ? box.x : point.plotX + chart.plotLeft + pick(options.distance, 16)),
y: box.pos + chart.plotTop
};
if (point.isHeader) {
attr.anchorX = point.plotX + chart.plotLeft;
attr.anchorY = attr.y - 100;
}
tt.attr(attr);
// Draw the connector to the point
if (!point.isHeader) {
tt.connector.attr({
d: [
'M',
point.plotX + chart.plotLeft - attr.x,
point.plotY + point.series.yAxis.pos - attr.y,
'L',
(rightAligned ? -1 : 1) * pick(options.distance, 16) +
point.plotX + chart.plotLeft - attr.x,
box.pos + chart.plotTop + tt.getBBox().height / 2 -
attr.y
]
});
}
});
},
/**
* Find the new position and perform the move
*/
updatePosition: function(point) {
var chart = this.chart,
label = this.getLabel(),
pos = (this.options.positioner || this.getPosition).call(
this,
label.width,
label.height,
point
);
// do the move
this.move(
Math.round(pos.x),
Math.round(pos.y || 0), // can be undefined (#3977)
point.plotX + chart.plotLeft,
point.plotY + chart.plotTop
);
},
/**
* Get the best X date format based on the closest point range on the axis.
*/
getXDateFormat: function(point, options, xAxis) {
var xDateFormat,
dateTimeLabelFormats = options.dateTimeLabelFormats,
closestPointRange = xAxis && xAxis.closestPointRange,
n,
blank = '01-01 00:00:00.000',
strpos = {
millisecond: 15,
second: 12,
minute: 9,
hour: 6,
day: 3
},
date,
lastN = 'millisecond'; // for sub-millisecond data, #4223
if (closestPointRange) {
date = dateFormat('%m-%d %H:%M:%S.%L', point.x);
for (n in timeUnits) {
// If the range is exactly one week and we're looking at a Sunday/Monday, go for the week format
if (closestPointRange === timeUnits.week && +dateFormat('%w', point.x) === xAxis.options.startOfWeek &&
date.substr(6) === blank.substr(6)) {
n = 'week';
break;
}
// The first format that is too great for the range
if (timeUnits[n] > closestPointRange) {
n = lastN;
break;
}
// If the point is placed every day at 23:59, we need to show
// the minutes as well. #2637.
if (strpos[n] && date.substr(strpos[n]) !== blank.substr(strpos[n])) {
break;
}
// Weeks are outside the hierarchy, only apply them on Mondays/Sundays like in the first condition
if (n !== 'week') {
lastN = n;
}
}
if (n) {
xDateFormat = dateTimeLabelFormats[n];
}
} else {
xDateFormat = dateTimeLabelFormats.day;
}
return xDateFormat || dateTimeLabelFormats.year; // #2546, 2581
},
/**
* Format the footer/header of the tooltip
* #3397: abstraction to enable formatting of footer and header
*/
tooltipFooterHeaderFormatter: function(labelConfig, isFooter) {
var footOrHead = isFooter ? 'footer' : 'header',
series = labelConfig.series,
tooltipOptions = series.tooltipOptions,
xDateFormat = tooltipOptions.xDateFormat,
xAxis = series.xAxis,
isDateTime = xAxis && xAxis.options.type === 'datetime' && isNumber(labelConfig.key),
formatString = tooltipOptions[footOrHead + 'Format'];
// Guess the best date format based on the closest point distance (#568, #3418)
if (isDateTime && !xDateFormat) {
xDateFormat = this.getXDateFormat(labelConfig, tooltipOptions, xAxis);
}
// Insert the footer date format if any
if (isDateTime && xDateFormat) {
formatString = formatString.replace('{point.key}', '{point.key:' + xDateFormat + '}');
}
return format(formatString, {
point: labelConfig,
series: series
});
},
/**
* Build the body (lines) of the tooltip by iterating over the items and returning one entry for each item,
* abstracting this functionality allows to easily overwrite and extend it.
*/
bodyFormatter: function(items) {
return map(items, function(item) {
var tooltipOptions = item.series.tooltipOptions;
return (tooltipOptions.pointFormatter || item.point.tooltipFormatter)
.call(item.point, tooltipOptions.pointFormat);
});
}
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
attr = H.attr,
charts = H.charts,
color = H.color,
css = H.css,
defined = H.defined,
doc = H.doc,
each = H.each,
extend = H.extend,
fireEvent = H.fireEvent,
offset = H.offset,
pick = H.pick,
removeEvent = H.removeEvent,
splat = H.splat,
Tooltip = H.Tooltip,
win = H.win;
/**
* The mouse tracker object. All methods starting with "on" are primary DOM event handlers.
* Subsequent methods should be named differently from what they are doing.
* @param {Object} chart The Chart instance
* @param {Object} options The root options object
*/
H.Pointer = function(chart, options) {
this.init(chart, options);
};
H.Pointer.prototype = {
/**
* Initialize Pointer
*/
init: function(chart, options) {
// Store references
this.options = options;
this.chart = chart;
// Do we need to handle click on a touch device?
this.runChartClick = options.chart.events && !!options.chart.events.click;
this.pinchDown = [];
this.lastValidTouch = {};
if (Tooltip && options.tooltip.enabled) {
chart.tooltip = new Tooltip(chart, options.tooltip);
this.followTouchMove = pick(options.tooltip.followTouchMove, true);
}
this.setDOMEvents();
},
/**
* Resolve the zoomType option
*/
zoomOption: function() {
var chart = this.chart,
zoomType = chart.options.chart.zoomType,
zoomX = /x/.test(zoomType),
zoomY = /y/.test(zoomType),
inverted = chart.inverted;
this.zoomX = zoomX;
this.zoomY = zoomY;
this.zoomHor = (zoomX && !inverted) || (zoomY && inverted);
this.zoomVert = (zoomY && !inverted) || (zoomX && inverted);
this.hasZoom = zoomX || zoomY;
},
/**
* Add crossbrowser support for chartX and chartY
* @param {Object} e The event object in standard browsers
*/
normalize: function(e, chartPosition) {
var chartX,
chartY,
ePos;
// IE normalizing
e = e || win.event;
if (!e.target) {
e.target = e.srcElement;
}
// iOS (#2757)
ePos = e.touches ? (e.touches.length ? e.touches.item(0) : e.changedTouches[0]) : e;
// Get mouse position
if (!chartPosition) {
this.chartPosition = chartPosition = offset(this.chart.container);
}
// chartX and chartY
if (ePos.pageX === undefined) { // IE < 9. #886.
chartX = Math.max(e.x, e.clientX - chartPosition.left); // #2005, #2129: the second case is
// for IE10 quirks mode within framesets
chartY = e.y;
} else {
chartX = ePos.pageX - chartPosition.left;
chartY = ePos.pageY - chartPosition.top;
}
return extend(e, {
chartX: Math.round(chartX),
chartY: Math.round(chartY)
});
},
/**
* Get the click position in terms of axis values.
*
* @param {Object} e A pointer event
*/
getCoordinates: function(e) {
var coordinates = {
xAxis: [],
yAxis: []
};
each(this.chart.axes, function(axis) {
coordinates[axis.isXAxis ? 'xAxis' : 'yAxis'].push({
axis: axis,
value: axis.toValue(e[axis.horiz ? 'chartX' : 'chartY'])
});
});
return coordinates;
},
/**
* With line type charts with a single tracker, get the point closest to the mouse.
* Run Point.onMouseOver and display tooltip for the point or points.
*/
runPointActions: function(e) {
var pointer = this,
chart = pointer.chart,
series = chart.series,
tooltip = chart.tooltip,
shared = tooltip ? tooltip.shared : false,
followPointer,
updatePosition = true,
hoverPoint = chart.hoverPoint,
hoverSeries = chart.hoverSeries,
i,
anchor,
noSharedTooltip,
stickToHoverSeries,
directTouch,
kdpoints = [],
kdpointT;
// For hovering over the empty parts of the plot area (hoverSeries is undefined).
// If there is one series with point tracking (combo chart), don't go to nearest neighbour.
if (!shared && !hoverSeries) {
for (i = 0; i < series.length; i++) {
if (series[i].directTouch || !series[i].options.stickyTracking) {
series = [];
}
}
}
// If it has a hoverPoint and that series requires direct touch (like columns, #3899), or we're on
// a noSharedTooltip series among shared tooltip series (#4546), use the hoverPoint . Otherwise,
// search the k-d tree.
stickToHoverSeries = hoverSeries && (shared ? hoverSeries.noSharedTooltip : hoverSeries.directTouch);
if (stickToHoverSeries && hoverPoint) {
kdpoints = [hoverPoint];
// Handle shared tooltip or cases where a series is not yet hovered
} else {
// When we have non-shared tooltip and sticky tracking is disabled,
// search for the closest point only on hovered series: #5533, #5476
if (!shared && hoverSeries && !hoverSeries.options.stickyTracking) {
series = [hoverSeries];
}
// Find nearest points on all series
each(series, function(s) {
// Skip hidden series
noSharedTooltip = s.noSharedTooltip && shared;
directTouch = !shared && s.directTouch;
if (s.visible && !noSharedTooltip && !directTouch && pick(s.options.enableMouseTracking, true)) { // #3821
kdpointT = s.searchPoint(e, !noSharedTooltip && s.kdDimensions === 1); // #3828
if (kdpointT && kdpointT.series) { // Point.series becomes null when reset and before redraw (#5197)
kdpoints.push(kdpointT);
}
}
});
// Sort kdpoints by distance to mouse pointer
kdpoints.sort(function(p1, p2) {
var isCloserX = p1.distX - p2.distX,
isCloser = p1.dist - p2.dist,
isAbove = p1.series.group.zIndex > p2.series.group.zIndex ? -1 : 1;
// We have two points which are not in the same place on xAxis and shared tooltip:
if (isCloserX !== 0 && shared) { // #5721
return isCloserX;
}
// Points are not exactly in the same place on x/yAxis:
if (isCloser !== 0) {
return isCloser;
}
// The same xAxis and yAxis position, sort by z-index:
return isAbove;
});
}
// Remove points with different x-positions, required for shared tooltip and crosshairs (#4645):
if (shared) {
i = kdpoints.length;
while (i--) {
if (kdpoints[i].x !== kdpoints[0].x || kdpoints[i].series.noSharedTooltip) {
kdpoints.splice(i, 1);
}
}
}
// Refresh tooltip for kdpoint if new hover point or tooltip was hidden // #3926, #4200
if (kdpoints[0] && (kdpoints[0] !== this.prevKDPoint || (tooltip && tooltip.isHidden))) {
// Draw tooltip if necessary
if (shared && !kdpoints[0].series.noSharedTooltip) {
// Do mouseover on all points (#3919, #3985, #4410, #5622)
for (i = 0; i < kdpoints.length; i++) {
kdpoints[i].onMouseOver(e, kdpoints[i] !== ((hoverSeries && hoverSeries.directTouch && hoverPoint) || kdpoints[0]));
}
if (kdpoints.length && tooltip) {
// Keep the order of series in tooltip:
tooltip.refresh(kdpoints.sort(function(p1, p2) {
return p1.series.index - p2.series.index;
}), e);
}
} else {
if (tooltip) {
tooltip.refresh(kdpoints[0], e);
}
if (!hoverSeries || !hoverSeries.directTouch) { // #4448
kdpoints[0].onMouseOver(e);
}
}
this.prevKDPoint = kdpoints[0];
updatePosition = false;
}
// Update positions (regardless of kdpoint or hoverPoint)
if (updatePosition) {
followPointer = hoverSeries && hoverSeries.tooltipOptions.followPointer;
if (tooltip && followPointer && !tooltip.isHidden) {
anchor = tooltip.getAnchor([{}], e);
tooltip.updatePosition({
plotX: anchor[0],
plotY: anchor[1]
});
}
}
// Start the event listener to pick up the tooltip and crosshairs
if (!pointer._onDocumentMouseMove) {
pointer._onDocumentMouseMove = function(e) {
if (charts[H.hoverChartIndex]) {
charts[H.hoverChartIndex].pointer.onDocumentMouseMove(e);
}
};
addEvent(doc, 'mousemove', pointer._onDocumentMouseMove);
}
// Crosshair. For each hover point, loop over axes and draw cross if that point
// belongs to the axis (#4927).
each(shared ? kdpoints : [pick(hoverPoint, kdpoints[0])], function drawPointCrosshair(point) { // #5269
each(chart.axes, function drawAxisCrosshair(axis) {
// In case of snap = false, point is undefined, and we draw the crosshair anyway (#5066)
if (!point || point.series && point.series[axis.coll] === axis) { // #5658
axis.drawCrosshair(e, point);
}
});
});
},
/**
* Reset the tracking by hiding the tooltip, the hover series state and the hover point
*
* @param allowMove {Boolean} Instead of destroying the tooltip altogether, allow moving it if possible
*/
reset: function(allowMove, delay) {
var pointer = this,
chart = pointer.chart,
hoverSeries = chart.hoverSeries,
hoverPoint = chart.hoverPoint,
hoverPoints = chart.hoverPoints,
tooltip = chart.tooltip,
tooltipPoints = tooltip && tooltip.shared ? hoverPoints : hoverPoint;
// Check if the points have moved outside the plot area (#1003, #4736, #5101)
if (allowMove && tooltipPoints) {
each(splat(tooltipPoints), function(point) {
if (point.series.isCartesian && point.plotX === undefined) {
allowMove = false;
}
});
}
// Just move the tooltip, #349
if (allowMove) {
if (tooltip && tooltipPoints) {
tooltip.refresh(tooltipPoints);
if (hoverPoint) { // #2500
hoverPoint.setState(hoverPoint.state, true);
each(chart.axes, function(axis) {
if (axis.crosshair) {
axis.drawCrosshair(null, hoverPoint);
}
});
}
}
// Full reset
} else {
if (hoverPoint) {
hoverPoint.onMouseOut();
}
if (hoverPoints) {
each(hoverPoints, function(point) {
point.setState();
});
}
if (hoverSeries) {
hoverSeries.onMouseOut();
}
if (tooltip) {
tooltip.hide(delay);
}
if (pointer._onDocumentMouseMove) {
removeEvent(doc, 'mousemove', pointer._onDocumentMouseMove);
pointer._onDocumentMouseMove = null;
}
// Remove crosshairs
each(chart.axes, function(axis) {
axis.hideCrosshair();
});
pointer.hoverX = pointer.prevKDPoint = chart.hoverPoints = chart.hoverPoint = null;
}
},
/**
* Scale series groups to a certain scale and translation
*/
scaleGroups: function(attribs, clip) {
var chart = this.chart,
seriesAttribs;
// Scale each series
each(chart.series, function(series) {
seriesAttribs = attribs || series.getPlotBox(); // #1701
if (series.xAxis && series.xAxis.zoomEnabled && series.group) {
series.group.attr(seriesAttribs);
if (series.markerGroup) {
series.markerGroup.attr(seriesAttribs);
series.markerGroup.clip(clip ? chart.clipRect : null);
}
if (series.dataLabelsGroup) {
series.dataLabelsGroup.attr(seriesAttribs);
}
}
});
// Clip
chart.clipRect.attr(clip || chart.clipBox);
},
/**
* Start a drag operation
*/
dragStart: function(e) {
var chart = this.chart;
// Record the start position
chart.mouseIsDown = e.type;
chart.cancelClick = false;
chart.mouseDownX = this.mouseDownX = e.chartX;
chart.mouseDownY = this.mouseDownY = e.chartY;
},
/**
* Perform a drag operation in response to a mousemove event while the mouse is down
*/
drag: function(e) {
var chart = this.chart,
chartOptions = chart.options.chart,
chartX = e.chartX,
chartY = e.chartY,
zoomHor = this.zoomHor,
zoomVert = this.zoomVert,
plotLeft = chart.plotLeft,
plotTop = chart.plotTop,
plotWidth = chart.plotWidth,
plotHeight = chart.plotHeight,
clickedInside,
size,
selectionMarker = this.selectionMarker,
mouseDownX = this.mouseDownX,
mouseDownY = this.mouseDownY,
panKey = chartOptions.panKey && e[chartOptions.panKey + 'Key'];
// If the device supports both touch and mouse (like IE11), and we are touch-dragging
// inside the plot area, don't handle the mouse event. #4339.
if (selectionMarker && selectionMarker.touch) {
return;
}
// If the mouse is outside the plot area, adjust to cooordinates
// inside to prevent the selection marker from going outside
if (chartX < plotLeft) {
chartX = plotLeft;
} else if (chartX > plotLeft + plotWidth) {
chartX = plotLeft + plotWidth;
}
if (chartY < plotTop) {
chartY = plotTop;
} else if (chartY > plotTop + plotHeight) {
chartY = plotTop + plotHeight;
}
// determine if the mouse has moved more than 10px
this.hasDragged = Math.sqrt(
Math.pow(mouseDownX - chartX, 2) +
Math.pow(mouseDownY - chartY, 2)
);
if (this.hasDragged > 10) {
clickedInside = chart.isInsidePlot(mouseDownX - plotLeft, mouseDownY - plotTop);
// make a selection
if (chart.hasCartesianSeries && (this.zoomX || this.zoomY) && clickedInside && !panKey) {
if (!selectionMarker) {
this.selectionMarker = selectionMarker = chart.renderer.rect(
plotLeft,
plotTop,
zoomHor ? 1 : plotWidth,
zoomVert ? 1 : plotHeight,
0
)
.attr({
'class': 'highcharts-selection-marker',
'zIndex': 7
})
.add();
}
}
// adjust the width of the selection marker
if (selectionMarker && zoomHor) {
size = chartX - mouseDownX;
selectionMarker.attr({
width: Math.abs(size),
x: (size > 0 ? 0 : size) + mouseDownX
});
}
// adjust the height of the selection marker
if (selectionMarker && zoomVert) {
size = chartY - mouseDownY;
selectionMarker.attr({
height: Math.abs(size),
y: (size > 0 ? 0 : size) + mouseDownY
});
}
// panning
if (clickedInside && !selectionMarker && chartOptions.panning) {
chart.pan(e, chartOptions.panning);
}
}
},
/**
* On mouse up or touch end across the entire document, drop the selection.
*/
drop: function(e) {
var pointer = this,
chart = this.chart,
hasPinched = this.hasPinched;
if (this.selectionMarker) {
var selectionData = {
originalEvent: e, // #4890
xAxis: [],
yAxis: []
},
selectionBox = this.selectionMarker,
selectionLeft = selectionBox.attr ? selectionBox.attr('x') : selectionBox.x,
selectionTop = selectionBox.attr ? selectionBox.attr('y') : selectionBox.y,
selectionWidth = selectionBox.attr ? selectionBox.attr('width') : selectionBox.width,
selectionHeight = selectionBox.attr ? selectionBox.attr('height') : selectionBox.height,
runZoom;
// a selection has been made
if (this.hasDragged || hasPinched) {
// record each axis' min and max
each(chart.axes, function(axis) {
if (axis.zoomEnabled && defined(axis.min) && (hasPinched || pointer[{
xAxis: 'zoomX',
yAxis: 'zoomY'
}[axis.coll]])) { // #859, #3569
var horiz = axis.horiz,
minPixelPadding = e.type === 'touchend' ? axis.minPixelPadding : 0, // #1207, #3075
selectionMin = axis.toValue((horiz ? selectionLeft : selectionTop) + minPixelPadding),
selectionMax = axis.toValue((horiz ? selectionLeft + selectionWidth : selectionTop + selectionHeight) - minPixelPadding);
selectionData[axis.coll].push({
axis: axis,
min: Math.min(selectionMin, selectionMax), // for reversed axes
max: Math.max(selectionMin, selectionMax)
});
runZoom = true;
}
});
if (runZoom) {
fireEvent(chart, 'selection', selectionData, function(args) {
chart.zoom(extend(args, hasPinched ? {
animation: false
} : null));
});
}
}
this.selectionMarker = this.selectionMarker.destroy();
// Reset scaling preview
if (hasPinched) {
this.scaleGroups();
}
}
// Reset all
if (chart) { // it may be destroyed on mouse up - #877
css(chart.container, {
cursor: chart._cursor
});
chart.cancelClick = this.hasDragged > 10; // #370
chart.mouseIsDown = this.hasDragged = this.hasPinched = false;
this.pinchDown = [];
}
},
onContainerMouseDown: function(e) {
e = this.normalize(e);
this.zoomOption();
// issue #295, dragging not always working in Firefox
if (e.preventDefault) {
e.preventDefault();
}
this.dragStart(e);
},
onDocumentMouseUp: function(e) {
if (charts[H.hoverChartIndex]) {
charts[H.hoverChartIndex].pointer.drop(e);
}
},
/**
* Special handler for mouse move that will hide the tooltip when the mouse leaves the plotarea.
* Issue #149 workaround. The mouseleave event does not always fire.
*/
onDocumentMouseMove: function(e) {
var chart = this.chart,
chartPosition = this.chartPosition;
e = this.normalize(e, chartPosition);
// If we're outside, hide the tooltip
if (chartPosition && !this.inClass(e.target, 'highcharts-tracker') &&
!chart.isInsidePlot(e.chartX - chart.plotLeft, e.chartY - chart.plotTop)) {
this.reset();
}
},
/**
* When mouse leaves the container, hide the tooltip.
*/
onContainerMouseLeave: function(e) {
var chart = charts[H.hoverChartIndex];
if (chart && (e.relatedTarget || e.toElement)) { // #4886, MS Touch end fires mouseleave but with no related target
chart.pointer.reset();
chart.pointer.chartPosition = null; // also reset the chart position, used in #149 fix
}
},
// The mousemove, touchmove and touchstart event handler
onContainerMouseMove: function(e) {
var chart = this.chart;
if (!defined(H.hoverChartIndex) || !charts[H.hoverChartIndex] || !charts[H.hoverChartIndex].mouseIsDown) {
H.hoverChartIndex = chart.index;
}
e = this.normalize(e);
e.returnValue = false; // #2251, #3224
if (chart.mouseIsDown === 'mousedown') {
this.drag(e);
}
// Show the tooltip and run mouse over events (#977)
if ((this.inClass(e.target, 'highcharts-tracker') ||
chart.isInsidePlot(e.chartX - chart.plotLeft, e.chartY - chart.plotTop)) && !chart.openMenu) {
this.runPointActions(e);
}
},
/**
* Utility to detect whether an element has, or has a parent with, a specific
* class name. Used on detection of tracker objects and on deciding whether
* hovering the tooltip should cause the active series to mouse out.
*/
inClass: function(element, className) {
var elemClassName;
while (element) {
elemClassName = attr(element, 'class');
if (elemClassName) {
if (elemClassName.indexOf(className) !== -1) {
return true;
}
if (elemClassName.indexOf('highcharts-container') !== -1) {
return false;
}
}
element = element.parentNode;
}
},
onTrackerMouseOut: function(e) {
var series = this.chart.hoverSeries,
relatedTarget = e.relatedTarget || e.toElement;
if (series && relatedTarget && !series.options.stickyTracking &&
!this.inClass(relatedTarget, 'highcharts-tooltip') &&
(!this.inClass(relatedTarget, 'highcharts-series-' + series.index) || // #2499, #4465
!this.inClass(relatedTarget, 'highcharts-tracker') // #5553
)
) {
series.onMouseOut();
}
},
onContainerClick: function(e) {
var chart = this.chart,
hoverPoint = chart.hoverPoint,
plotLeft = chart.plotLeft,
plotTop = chart.plotTop;
e = this.normalize(e);
if (!chart.cancelClick) {
// On tracker click, fire the series and point events. #783, #1583
if (hoverPoint && this.inClass(e.target, 'highcharts-tracker')) {
// the series click event
fireEvent(hoverPoint.series, 'click', extend(e, {
point: hoverPoint
}));
// the point click event
if (chart.hoverPoint) { // it may be destroyed (#1844)
hoverPoint.firePointEvent('click', e);
}
// When clicking outside a tracker, fire a chart event
} else {
extend(e, this.getCoordinates(e));
// fire a click event in the chart
if (chart.isInsidePlot(e.chartX - plotLeft, e.chartY - plotTop)) {
fireEvent(chart, 'click', e);
}
}
}
},
/**
* Set the JS DOM events on the container and document. This method should contain
* a one-to-one assignment between methods and their handlers. Any advanced logic should
* be moved to the handler reflecting the event's name.
*/
setDOMEvents: function() {
var pointer = this,
container = pointer.chart.container;
container.onmousedown = function(e) {
pointer.onContainerMouseDown(e);
};
container.onmousemove = function(e) {
pointer.onContainerMouseMove(e);
};
container.onclick = function(e) {
pointer.onContainerClick(e);
};
addEvent(container, 'mouseleave', pointer.onContainerMouseLeave);
if (H.chartCount === 1) {
addEvent(doc, 'mouseup', pointer.onDocumentMouseUp);
}
if (H.hasTouch) {
container.ontouchstart = function(e) {
pointer.onContainerTouchStart(e);
};
container.ontouchmove = function(e) {
pointer.onContainerTouchMove(e);
};
if (H.chartCount === 1) {
addEvent(doc, 'touchend', pointer.onDocumentTouchEnd);
}
}
},
/**
* Destroys the Pointer object and disconnects DOM events.
*/
destroy: function() {
var prop;
removeEvent(this.chart.container, 'mouseleave', this.onContainerMouseLeave);
if (!H.chartCount) {
removeEvent(doc, 'mouseup', this.onDocumentMouseUp);
removeEvent(doc, 'touchend', this.onDocumentTouchEnd);
}
// memory and CPU leak
clearInterval(this.tooltipTimeout);
for (prop in this) {
this[prop] = null;
}
}
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var charts = H.charts,
each = H.each,
extend = H.extend,
map = H.map,
noop = H.noop,
pick = H.pick,
Pointer = H.Pointer;
/* Support for touch devices */
extend(Pointer.prototype, {
/**
* Run translation operations
*/
pinchTranslate: function(pinchDown, touches, transform, selectionMarker, clip, lastValidTouch) {
if (this.zoomHor || this.pinchHor) {
this.pinchTranslateDirection(true, pinchDown, touches, transform, selectionMarker, clip, lastValidTouch);
}
if (this.zoomVert || this.pinchVert) {
this.pinchTranslateDirection(false, pinchDown, touches, transform, selectionMarker, clip, lastValidTouch);
}
},
/**
* Run translation operations for each direction (horizontal and vertical) independently
*/
pinchTranslateDirection: function(horiz, pinchDown, touches, transform,
selectionMarker, clip, lastValidTouch, forcedScale) {
var chart = this.chart,
xy = horiz ? 'x' : 'y',
XY = horiz ? 'X' : 'Y',
sChartXY = 'chart' + XY,
wh = horiz ? 'width' : 'height',
plotLeftTop = chart['plot' + (horiz ? 'Left' : 'Top')],
selectionWH,
selectionXY,
clipXY,
scale = forcedScale || 1,
inverted = chart.inverted,
bounds = chart.bounds[horiz ? 'h' : 'v'],
singleTouch = pinchDown.length === 1,
touch0Start = pinchDown[0][sChartXY],
touch0Now = touches[0][sChartXY],
touch1Start = !singleTouch && pinchDown[1][sChartXY],
touch1Now = !singleTouch && touches[1][sChartXY],
outOfBounds,
transformScale,
scaleKey,
setScale = function() {
// Don't zoom if fingers are too close on this axis
if (!singleTouch && Math.abs(touch0Start - touch1Start) > 20) {
scale = forcedScale || Math.abs(touch0Now - touch1Now) / Math.abs(touch0Start - touch1Start);
}
clipXY = ((plotLeftTop - touch0Now) / scale) + touch0Start;
selectionWH = chart['plot' + (horiz ? 'Width' : 'Height')] / scale;
};
// Set the scale, first pass
setScale();
selectionXY = clipXY; // the clip position (x or y) is altered if out of bounds, the selection position is not
// Out of bounds
if (selectionXY < bounds.min) {
selectionXY = bounds.min;
outOfBounds = true;
} else if (selectionXY + selectionWH > bounds.max) {
selectionXY = bounds.max - selectionWH;
outOfBounds = true;
}
// Is the chart dragged off its bounds, determined by dataMin and dataMax?
if (outOfBounds) {
// Modify the touchNow position in order to create an elastic drag movement. This indicates
// to the user that the chart is responsive but can't be dragged further.
touch0Now -= 0.8 * (touch0Now - lastValidTouch[xy][0]);
if (!singleTouch) {
touch1Now -= 0.8 * (touch1Now - lastValidTouch[xy][1]);
}
// Set the scale, second pass to adapt to the modified touchNow positions
setScale();
} else {
lastValidTouch[xy] = [touch0Now, touch1Now];
}
// Set geometry for clipping, selection and transformation
if (!inverted) {
clip[xy] = clipXY - plotLeftTop;
clip[wh] = selectionWH;
}
scaleKey = inverted ? (horiz ? 'scaleY' : 'scaleX') : 'scale' + XY;
transformScale = inverted ? 1 / scale : scale;
selectionMarker[wh] = selectionWH;
selectionMarker[xy] = selectionXY;
transform[scaleKey] = scale;
transform['translate' + XY] = (transformScale * plotLeftTop) + (touch0Now - (transformScale * touch0Start));
},
/**
* Handle touch events with two touches
*/
pinch: function(e) {
var self = this,
chart = self.chart,
pinchDown = self.pinchDown,
touches = e.touches,
touchesLength = touches.length,
lastValidTouch = self.lastValidTouch,
hasZoom = self.hasZoom,
selectionMarker = self.selectionMarker,
transform = {},
fireClickEvent = touchesLength === 1 && ((self.inClass(e.target, 'highcharts-tracker') &&
chart.runTrackerClick) || self.runChartClick),
clip = {};
// Don't initiate panning until the user has pinched. This prevents us from
// blocking page scrolling as users scroll down a long page (#4210).
if (touchesLength > 1) {
self.initiated = true;
}
// On touch devices, only proceed to trigger click if a handler is defined
if (hasZoom && self.initiated && !fireClickEvent) {
e.preventDefault();
}
// Normalize each touch
map(touches, function(e) {
return self.normalize(e);
});
// Register the touch start position
if (e.type === 'touchstart') {
each(touches, function(e, i) {
pinchDown[i] = {
chartX: e.chartX,
chartY: e.chartY
};
});
lastValidTouch.x = [pinchDown[0].chartX, pinchDown[1] && pinchDown[1].chartX];
lastValidTouch.y = [pinchDown[0].chartY, pinchDown[1] && pinchDown[1].chartY];
// Identify the data bounds in pixels
each(chart.axes, function(axis) {
if (axis.zoomEnabled) {
var bounds = chart.bounds[axis.horiz ? 'h' : 'v'],
minPixelPadding = axis.minPixelPadding,
min = axis.toPixels(pick(axis.options.min, axis.dataMin)),
max = axis.toPixels(pick(axis.options.max, axis.dataMax)),
absMin = Math.min(min, max),
absMax = Math.max(min, max);
// Store the bounds for use in the touchmove handler
bounds.min = Math.min(axis.pos, absMin - minPixelPadding);
bounds.max = Math.max(axis.pos + axis.len, absMax + minPixelPadding);
}
});
self.res = true; // reset on next move
// Event type is touchmove, handle panning and pinching
} else if (pinchDown.length) { // can be 0 when releasing, if touchend fires first
// Set the marker
if (!selectionMarker) {
self.selectionMarker = selectionMarker = extend({
destroy: noop,
touch: true
}, chart.plotBox);
}
self.pinchTranslate(pinchDown, touches, transform, selectionMarker, clip, lastValidTouch);
self.hasPinched = hasZoom;
// Scale and translate the groups to provide visual feedback during pinching
self.scaleGroups(transform, clip);
// Optionally move the tooltip on touchmove
if (!hasZoom && self.followTouchMove && touchesLength === 1) {
this.runPointActions(self.normalize(e));
} else if (self.res) {
self.res = false;
this.reset(false, 0);
}
}
},
/**
* General touch handler shared by touchstart and touchmove.
*/
touch: function(e, start) {
var chart = this.chart,
hasMoved,
pinchDown;
H.hoverChartIndex = chart.index;
if (e.touches.length === 1) {
e = this.normalize(e);
if (chart.isInsidePlot(e.chartX - chart.plotLeft, e.chartY - chart.plotTop) && !chart.openMenu) {
// Run mouse events and display tooltip etc
if (start) {
this.runPointActions(e);
}
// Android fires touchmove events after the touchstart even if the
// finger hasn't moved, or moved only a pixel or two. In iOS however,
// the touchmove doesn't fire unless the finger moves more than ~4px.
// So we emulate this behaviour in Android by checking how much it
// moved, and cancelling on small distances. #3450.
if (e.type === 'touchmove') {
pinchDown = this.pinchDown;
hasMoved = pinchDown[0] ? Math.sqrt( // #5266
Math.pow(pinchDown[0].chartX - e.chartX, 2) +
Math.pow(pinchDown[0].chartY - e.chartY, 2)
) >= 4 : false;
}
if (pick(hasMoved, true)) {
this.pinch(e);
}
} else if (start) {
// Hide the tooltip on touching outside the plot area (#1203)
this.reset();
}
} else if (e.touches.length === 2) {
this.pinch(e);
}
},
onContainerTouchStart: function(e) {
this.zoomOption();
this.touch(e, true);
},
onContainerTouchMove: function(e) {
this.touch(e);
},
onDocumentTouchEnd: function(e) {
if (charts[H.hoverChartIndex]) {
charts[H.hoverChartIndex].pointer.drop(e);
}
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
charts = H.charts,
css = H.css,
doc = H.doc,
extend = H.extend,
noop = H.noop,
Pointer = H.Pointer,
removeEvent = H.removeEvent,
win = H.win,
wrap = H.wrap;
if (win.PointerEvent || win.MSPointerEvent) {
// The touches object keeps track of the points being touched at all times
var touches = {},
hasPointerEvent = !!win.PointerEvent,
getWebkitTouches = function() {
var key,
fake = [];
fake.item = function(i) {
return this[i];
};
for (key in touches) {
if (touches.hasOwnProperty(key)) {
fake.push({
pageX: touches[key].pageX,
pageY: touches[key].pageY,
target: touches[key].target
});
}
}
return fake;
},
translateMSPointer = function(e, method, wktype, func) {
var p;
if ((e.pointerType === 'touch' || e.pointerType === e.MSPOINTER_TYPE_TOUCH) && charts[H.hoverChartIndex]) {
func(e);
p = charts[H.hoverChartIndex].pointer;
p[method]({
type: wktype,
target: e.currentTarget,
preventDefault: noop,
touches: getWebkitTouches()
});
}
};
/**
* Extend the Pointer prototype with methods for each event handler and more
*/
extend(Pointer.prototype, {
onContainerPointerDown: function(e) {
translateMSPointer(e, 'onContainerTouchStart', 'touchstart', function(e) {
touches[e.pointerId] = {
pageX: e.pageX,
pageY: e.pageY,
target: e.currentTarget
};
});
},
onContainerPointerMove: function(e) {
translateMSPointer(e, 'onContainerTouchMove', 'touchmove', function(e) {
touches[e.pointerId] = {
pageX: e.pageX,
pageY: e.pageY
};
if (!touches[e.pointerId].target) {
touches[e.pointerId].target = e.currentTarget;
}
});
},
onDocumentPointerUp: function(e) {
translateMSPointer(e, 'onDocumentTouchEnd', 'touchend', function(e) {
delete touches[e.pointerId];
});
},
/**
* Add or remove the MS Pointer specific events
*/
batchMSEvents: function(fn) {
fn(this.chart.container, hasPointerEvent ? 'pointerdown' : 'MSPointerDown', this.onContainerPointerDown);
fn(this.chart.container, hasPointerEvent ? 'pointermove' : 'MSPointerMove', this.onContainerPointerMove);
fn(doc, hasPointerEvent ? 'pointerup' : 'MSPointerUp', this.onDocumentPointerUp);
}
});
// Disable default IE actions for pinch and such on chart element
wrap(Pointer.prototype, 'init', function(proceed, chart, options) {
proceed.call(this, chart, options);
if (this.hasZoom) { // #4014
css(chart.container, {
'-ms-touch-action': 'none',
'touch-action': 'none'
});
}
});
// Add IE specific touch events to chart
wrap(Pointer.prototype, 'setDOMEvents', function(proceed) {
proceed.apply(this);
if (this.hasZoom || this.followTouchMove) {
this.batchMSEvents(addEvent);
}
});
// Destroy MS events also
wrap(Pointer.prototype, 'destroy', function(proceed) {
this.batchMSEvents(removeEvent);
proceed.call(this);
});
}
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Legend,
addEvent = H.addEvent,
css = H.css,
discardElement = H.discardElement,
defined = H.defined,
each = H.each,
extend = H.extend,
isFirefox = H.isFirefox,
marginNames = H.marginNames,
merge = H.merge,
pick = H.pick,
setAnimation = H.setAnimation,
stableSort = H.stableSort,
win = H.win,
wrap = H.wrap;
/**
* The overview of the chart's series
*/
Legend = H.Legend = function(chart, options) {
this.init(chart, options);
};
Legend.prototype = {
/**
* Initialize the legend
*/
init: function(chart, options) {
this.chart = chart;
this.setOptions(options);
if (options.enabled) {
// Render it
this.render();
// move checkboxes
addEvent(this.chart, 'endResize', function() {
this.legend.positionCheckboxes();
});
}
},
setOptions: function(options) {
var padding = pick(options.padding, 8);
this.options = options;
this.itemMarginTop = options.itemMarginTop || 0;
this.padding = padding;
this.initialItemX = padding;
this.initialItemY = padding - 5; // 5 is the number of pixels above the text
this.maxItemWidth = 0;
this.itemHeight = 0;
this.symbolWidth = pick(options.symbolWidth, 16);
this.pages = [];
},
/**
* Update the legend with new options. Equivalent to running chart.update with a legend
* configuration option.
* @param {Object} options Legend options
* @param {Boolean} redraw Whether to redraw the chart, defaults to true.
*/
update: function(options, redraw) {
var chart = this.chart;
this.setOptions(merge(true, this.options, options));
this.destroy();
chart.isDirtyLegend = chart.isDirtyBox = true;
if (pick(redraw, true)) {
chart.redraw();
}
},
/**
* Set the colors for the legend item
* @param {Object} item A Series or Point instance
* @param {Object} visible Dimmed or colored
*/
colorizeItem: function(item, visible) {
item.legendGroup[visible ? 'removeClass' : 'addClass']('highcharts-legend-item-hidden');
},
/**
* Position the legend item
* @param {Object} item A Series or Point instance
*/
positionItem: function(item) {
var legend = this,
options = legend.options,
symbolPadding = options.symbolPadding,
ltr = !options.rtl,
legendItemPos = item._legendItemPos,
itemX = legendItemPos[0],
itemY = legendItemPos[1],
checkbox = item.checkbox,
legendGroup = item.legendGroup;
if (legendGroup && legendGroup.element) {
legendGroup.translate(
ltr ? itemX : legend.legendWidth - itemX - 2 * symbolPadding - 4,
itemY
);
}
if (checkbox) {
checkbox.x = itemX;
checkbox.y = itemY;
}
},
/**
* Destroy a single legend item
* @param {Object} item The series or point
*/
destroyItem: function(item) {
var checkbox = item.checkbox;
// destroy SVG elements
each(['legendItem', 'legendLine', 'legendSymbol', 'legendGroup'], function(key) {
if (item[key]) {
item[key] = item[key].destroy();
}
});
if (checkbox) {
discardElement(item.checkbox);
}
},
/**
* Destroys the legend.
*/
destroy: function() {
var legend = this,
legendGroup = legend.group,
box = legend.box;
if (box) {
legend.box = box.destroy();
}
// Destroy items
each(this.getAllItems(), function(item) {
each(['legendItem', 'legendGroup'], function(key) {
if (item[key]) {
item[key] = item[key].destroy();
}
});
});
if (legendGroup) {
legend.group = legendGroup.destroy();
}
},
/**
* Position the checkboxes after the width is determined
*/
positionCheckboxes: function(scrollOffset) {
var alignAttr = this.group.alignAttr,
translateY,
clipHeight = this.clipHeight || this.legendHeight,
titleHeight = this.titleHeight;
if (alignAttr) {
translateY = alignAttr.translateY;
each(this.allItems, function(item) {
var checkbox = item.checkbox,
top;
if (checkbox) {
top = translateY + titleHeight + checkbox.y + (scrollOffset || 0) + 3;
css(checkbox, {
left: (alignAttr.translateX + item.checkboxOffset + checkbox.x - 20) + 'px',
top: top + 'px',
display: top > translateY - 6 && top < translateY + clipHeight - 6 ? '' : 'none'
});
}
});
}
},
/**
* Render the legend title on top of the legend
*/
renderTitle: function() {
var options = this.options,
padding = this.padding,
titleOptions = options.title,
titleHeight = 0,
bBox;
if (titleOptions.text) {
if (!this.title) {
this.title = this.chart.renderer.label(titleOptions.text, padding - 3, padding - 4, null, null, null, null, null, 'legend-title')
.attr({
zIndex: 1
})
.add(this.group);
}
bBox = this.title.getBBox();
titleHeight = bBox.height;
this.offsetWidth = bBox.width; // #1717
this.contentGroup.attr({
translateY: titleHeight
});
}
this.titleHeight = titleHeight;
},
/**
* Set the legend item text
*/
setText: function(item) {
var options = this.options;
item.legendItem.attr({
text: options.labelFormat ? H.format(options.labelFormat, item) : options.labelFormatter.call(item)
});
},
/**
* Render a single specific legend item
* @param {Object} item A series or point
*/
renderItem: function(item) {
var legend = this,
chart = legend.chart,
renderer = chart.renderer,
options = legend.options,
horizontal = options.layout === 'horizontal',
symbolWidth = legend.symbolWidth,
symbolPadding = options.symbolPadding,
padding = legend.padding,
itemDistance = horizontal ? pick(options.itemDistance, 20) : 0,
ltr = !options.rtl,
itemHeight,
widthOption = options.width,
itemMarginBottom = options.itemMarginBottom || 0,
itemMarginTop = legend.itemMarginTop,
initialItemX = legend.initialItemX,
bBox,
itemWidth,
li = item.legendItem,
isSeries = !item.series,
series = !isSeries && item.series.drawLegendSymbol ? item.series : item,
seriesOptions = series.options,
showCheckbox = legend.createCheckboxForItem && seriesOptions && seriesOptions.showCheckbox,
useHTML = options.useHTML,
fontSize = 12;
if (!li) { // generate it once, later move it
// Generate the group box
// A group to hold the symbol and text. Text is to be appended in Legend class.
item.legendGroup = renderer.g('legend-item')
.addClass('highcharts-' + series.type + '-series highcharts-color-' + item.colorIndex + ' ' +
(item.options.className || '') +
(isSeries ? 'highcharts-series-' + item.index : '')
)
.attr({
zIndex: 1
})
.add(legend.scrollGroup);
// Generate the list item text and add it to the group
item.legendItem = li = renderer.text(
'',
ltr ? symbolWidth + symbolPadding : -symbolPadding,
legend.baseline || 0,
useHTML
)
.attr({
align: ltr ? 'left' : 'right',
zIndex: 2
})
.add(item.legendGroup);
// Get the baseline for the first item - the font size is equal for all
if (!legend.baseline) {
legend.fontMetrics = renderer.fontMetrics(
fontSize,
li
);
legend.baseline = legend.fontMetrics.f + 3 + itemMarginTop;
li.attr('y', legend.baseline);
}
// Draw the legend symbol inside the group box
series.drawLegendSymbol(legend, item);
if (legend.setItemEvents) {
legend.setItemEvents(item, li, useHTML);
}
// add the HTML checkbox on top
if (showCheckbox) {
legend.createCheckboxForItem(item);
}
}
// Colorize the items
legend.colorizeItem(item, item.visible);
// Always update the text
legend.setText(item);
// calculate the positions for the next line
bBox = li.getBBox();
itemWidth = item.checkboxOffset =
options.itemWidth ||
item.legendItemWidth ||
symbolWidth + symbolPadding + bBox.width + itemDistance + (showCheckbox ? 20 : 0);
legend.itemHeight = itemHeight = Math.round(item.legendItemHeight || bBox.height);
// if the item exceeds the width, start a new line
if (horizontal && legend.itemX - initialItemX + itemWidth >
(widthOption || (chart.chartWidth - 2 * padding - initialItemX - options.x))) {
legend.itemX = initialItemX;
legend.itemY += itemMarginTop + legend.lastLineHeight + itemMarginBottom;
legend.lastLineHeight = 0; // reset for next line (#915, #3976)
}
// If the item exceeds the height, start a new column
/*if (!horizontal && legend.itemY + options.y + itemHeight > chart.chartHeight - spacingTop - spacingBottom) {
legend.itemY = legend.initialItemY;
legend.itemX += legend.maxItemWidth;
legend.maxItemWidth = 0;
}*/
// Set the edge positions
legend.maxItemWidth = Math.max(legend.maxItemWidth, itemWidth);
legend.lastItemY = itemMarginTop + legend.itemY + itemMarginBottom;
legend.lastLineHeight = Math.max(itemHeight, legend.lastLineHeight); // #915
// cache the position of the newly generated or reordered items
item._legendItemPos = [legend.itemX, legend.itemY];
// advance
if (horizontal) {
legend.itemX += itemWidth;
} else {
legend.itemY += itemMarginTop + itemHeight + itemMarginBottom;
legend.lastLineHeight = itemHeight;
}
// the width of the widest item
legend.offsetWidth = widthOption || Math.max(
(horizontal ? legend.itemX - initialItemX - itemDistance : itemWidth) + padding,
legend.offsetWidth
);
},
/**
* Get all items, which is one item per series for normal series and one item per point
* for pie series.
*/
getAllItems: function() {
var allItems = [];
each(this.chart.series, function(series) {
var seriesOptions = series && series.options;
// Handle showInLegend. If the series is linked to another series, defaults to false.
if (series && pick(seriesOptions.showInLegend, !defined(seriesOptions.linkedTo) ? undefined : false, true)) {
// Use points or series for the legend item depending on legendType
allItems = allItems.concat(
series.legendItems ||
(seriesOptions.legendType === 'point' ?
series.data :
series)
);
}
});
return allItems;
},
/**
* Adjust the chart margins by reserving space for the legend on only one side
* of the chart. If the position is set to a corner, top or bottom is reserved
* for horizontal legends and left or right for vertical ones.
*/
adjustMargins: function(margin, spacing) {
var chart = this.chart,
options = this.options,
// Use the first letter of each alignment option in order to detect the side
alignment = options.align.charAt(0) + options.verticalAlign.charAt(0) + options.layout.charAt(0); // #4189 - use charAt(x) notation instead of [x] for IE7
if (!options.floating) {
each([
/(lth|ct|rth)/,
/(rtv|rm|rbv)/,
/(rbh|cb|lbh)/,
/(lbv|lm|ltv)/
], function(alignments, side) {
if (alignments.test(alignment) && !defined(margin[side])) {
// Now we have detected on which side of the chart we should reserve space for the legend
chart[marginNames[side]] = Math.max(
chart[marginNames[side]],
chart.legend[(side + 1) % 2 ? 'legendHeight' : 'legendWidth'] + [1, -1, -1, 1][side] * options[(side % 2) ? 'x' : 'y'] +
pick(options.margin, 12) +
spacing[side]
);
}
});
}
},
/**
* Render the legend. This method can be called both before and after
* chart.render. If called after, it will only rearrange items instead
* of creating new ones.
*/
render: function() {
var legend = this,
chart = legend.chart,
renderer = chart.renderer,
legendGroup = legend.group,
allItems,
display,
legendWidth,
legendHeight,
box = legend.box,
options = legend.options,
padding = legend.padding;
legend.itemX = legend.initialItemX;
legend.itemY = legend.initialItemY;
legend.offsetWidth = 0;
legend.lastItemY = 0;
if (!legendGroup) {
legend.group = legendGroup = renderer.g('legend')
.attr({
zIndex: 7
})
.add();
legend.contentGroup = renderer.g()
.attr({
zIndex: 1
}) // above background
.add(legendGroup);
legend.scrollGroup = renderer.g()
.add(legend.contentGroup);
}
legend.renderTitle();
// add each series or point
allItems = legend.getAllItems();
// sort by legendIndex
stableSort(allItems, function(a, b) {
return ((a.options && a.options.legendIndex) || 0) - ((b.options && b.options.legendIndex) || 0);
});
// reversed legend
if (options.reversed) {
allItems.reverse();
}
legend.allItems = allItems;
legend.display = display = !!allItems.length;
// render the items
legend.lastLineHeight = 0;
each(allItems, function(item) {
legend.renderItem(item);
});
// Get the box
legendWidth = (options.width || legend.offsetWidth) + padding;
legendHeight = legend.lastItemY + legend.lastLineHeight + legend.titleHeight;
legendHeight = legend.handleOverflow(legendHeight);
legendHeight += padding;
// Draw the border and/or background
if (!box) {
legend.box = box = renderer.rect()
.addClass('highcharts-legend-box')
.attr({
r: options.borderRadius
})
.add(legendGroup);
box.isNew = true;
}
if (legendWidth > 0 && legendHeight > 0) {
box[box.isNew ? 'attr' : 'animate'](
box.crisp({
x: 0,
y: 0,
width: legendWidth,
height: legendHeight
}, box.strokeWidth())
);
box.isNew = false;
}
// hide the border if no items
box[display ? 'show' : 'hide']();
// Open for responsiveness
if (legendGroup.getStyle('display') === 'none') {
legendWidth = legendHeight = 0;
}
legend.legendWidth = legendWidth;
legend.legendHeight = legendHeight;
// Now that the legend width and height are established, put the items in the
// final position
each(allItems, function(item) {
legend.positionItem(item);
});
// 1.x compatibility: positioning based on style
/*var props = ['left', 'right', 'top', 'bottom'],
prop,
i = 4;
while (i--) {
prop = props[i];
if (options.style[prop] && options.style[prop] !== 'auto') {
options[i < 2 ? 'align' : 'verticalAlign'] = prop;
options[i < 2 ? 'x' : 'y'] = pInt(options.style[prop]) * (i % 2 ? -1 : 1);
}
}*/
if (display) {
legendGroup.align(extend({
width: legendWidth,
height: legendHeight
}, options), true, 'spacingBox');
}
if (!chart.isResizing) {
this.positionCheckboxes();
}
},
/**
* Set up the overflow handling by adding navigation with up and down arrows below the
* legend.
*/
handleOverflow: function(legendHeight) {
var legend = this,
chart = this.chart,
renderer = chart.renderer,
options = this.options,
optionsY = options.y,
alignTop = options.verticalAlign === 'top',
spaceHeight = chart.spacingBox.height + (alignTop ? -optionsY : optionsY) - this.padding,
maxHeight = options.maxHeight,
clipHeight,
clipRect = this.clipRect,
navOptions = options.navigation,
animation = pick(navOptions.animation, true),
arrowSize = navOptions.arrowSize || 12,
nav = this.nav,
pages = this.pages,
padding = this.padding,
lastY,
allItems = this.allItems,
clipToHeight = function(height) {
clipRect.attr({
height: height
});
// useHTML
if (legend.contentGroup.div) {
legend.contentGroup.div.style.clip = 'rect(' + padding + 'px,9999px,' + (padding + height) + 'px,0)';
}
};
// Adjust the height
if (options.layout === 'horizontal') {
spaceHeight /= 2;
}
if (maxHeight) {
spaceHeight = Math.min(spaceHeight, maxHeight);
}
// Reset the legend height and adjust the clipping rectangle
pages.length = 0;
if (legendHeight > spaceHeight && navOptions.enabled !== false) {
this.clipHeight = clipHeight = Math.max(spaceHeight - 20 - this.titleHeight - padding, 0);
this.currentPage = pick(this.currentPage, 1);
this.fullHeight = legendHeight;
// Fill pages with Y positions so that the top of each a legend item defines
// the scroll top for each page (#2098)
each(allItems, function(item, i) {
var y = item._legendItemPos[1],
h = Math.round(item.legendItem.getBBox().height),
len = pages.length;
if (!len || (y - pages[len - 1] > clipHeight && (lastY || y) !== pages[len - 1])) {
pages.push(lastY || y);
len++;
}
if (i === allItems.length - 1 && y + h - pages[len - 1] > clipHeight) {
pages.push(y);
}
if (y !== lastY) {
lastY = y;
}
});
// Only apply clipping if needed. Clipping causes blurred legend in PDF export (#1787)
if (!clipRect) {
clipRect = legend.clipRect = renderer.clipRect(0, padding, 9999, 0);
legend.contentGroup.clip(clipRect);
}
clipToHeight(clipHeight);
// Add navigation elements
if (!nav) {
this.nav = nav = renderer.g().attr({
zIndex: 1
}).add(this.group);
this.up = renderer.symbol('triangle', 0, 0, arrowSize, arrowSize)
.on('click', function() {
legend.scroll(-1, animation);
})
.add(nav);
this.pager = renderer.text('', 15, 10)
.addClass('highcharts-legend-navigation')
.add(nav);
this.down = renderer.symbol('triangle-down', 0, 0, arrowSize, arrowSize)
.on('click', function() {
legend.scroll(1, animation);
})
.add(nav);
}
// Set initial position
legend.scroll(0);
legendHeight = spaceHeight;
} else if (nav) {
clipToHeight(chart.chartHeight);
nav.hide();
this.scrollGroup.attr({
translateY: 1
});
this.clipHeight = 0; // #1379
}
return legendHeight;
},
/**
* Scroll the legend by a number of pages
* @param {Object} scrollBy
* @param {Object} animation
*/
scroll: function(scrollBy, animation) {
var pages = this.pages,
pageCount = pages.length,
currentPage = this.currentPage + scrollBy,
clipHeight = this.clipHeight,
navOptions = this.options.navigation,
pager = this.pager,
padding = this.padding,
scrollOffset;
// When resizing while looking at the last page
if (currentPage > pageCount) {
currentPage = pageCount;
}
if (currentPage > 0) {
if (animation !== undefined) {
setAnimation(animation, this.chart);
}
this.nav.attr({
translateX: padding,
translateY: clipHeight + this.padding + 7 + this.titleHeight,
visibility: 'visible'
});
this.up.attr({
'class': currentPage === 1 ? 'highcharts-legend-nav-inactive' : 'highcharts-legend-nav-active'
});
pager.attr({
text: currentPage + '/' + pageCount
});
this.down.attr({
'x': 18 + this.pager.getBBox().width, // adjust to text width
'class': currentPage === pageCount ? 'highcharts-legend-nav-inactive' : 'highcharts-legend-nav-active'
});
scrollOffset = -pages[currentPage - 1] + this.initialItemY;
this.scrollGroup.animate({
translateY: scrollOffset
});
this.currentPage = currentPage;
this.positionCheckboxes(scrollOffset);
}
}
};
/*
* LegendSymbolMixin
*/
H.LegendSymbolMixin = {
/**
* Get the series' symbol in the legend
*
* @param {Object} legend The legend object
* @param {Object} item The series (this) or point
*/
drawRectangle: function(legend, item) {
var options = legend.options,
symbolHeight = options.symbolHeight || legend.fontMetrics.f,
square = options.squareSymbol,
symbolWidth = square ? symbolHeight : legend.symbolWidth;
item.legendSymbol = this.chart.renderer.rect(
square ? (legend.symbolWidth - symbolHeight) / 2 : 0,
legend.baseline - symbolHeight + 1, // #3988
symbolWidth,
symbolHeight,
pick(legend.options.symbolRadius, symbolHeight / 2)
)
.addClass('highcharts-point')
.attr({
zIndex: 3
}).add(item.legendGroup);
},
/**
* Get the series' symbol in the legend. This method should be overridable to create custom
* symbols through Highcharts.seriesTypes[type].prototype.drawLegendSymbols.
*
* @param {Object} legend The legend object
*/
drawLineMarker: function(legend) {
var options = this.options,
markerOptions = options.marker,
radius,
legendSymbol,
symbolWidth = legend.symbolWidth,
renderer = this.chart.renderer,
legendItemGroup = this.legendGroup,
verticalCenter = legend.baseline - Math.round(legend.fontMetrics.b * 0.3),
attr = {};
// Draw the line
this.legendLine = renderer.path([
'M',
0,
verticalCenter,
'L',
symbolWidth,
verticalCenter
])
.addClass('highcharts-graph')
.attr(attr)
.add(legendItemGroup);
// Draw the marker
if (markerOptions && markerOptions.enabled !== false) {
radius = this.symbol.indexOf('url') === 0 ? 0 : markerOptions.radius;
this.legendSymbol = legendSymbol = renderer.symbol(
this.symbol,
(symbolWidth / 2) - radius,
verticalCenter - radius,
2 * radius,
2 * radius,
markerOptions
)
.addClass('highcharts-point')
.add(legendItemGroup);
legendSymbol.isMarker = true;
}
}
};
// Workaround for #2030, horizontal legend items not displaying in IE11 Preview,
// and for #2580, a similar drawing flaw in Firefox 26.
// Explore if there's a general cause for this. The problem may be related
// to nested group elements, as the legend item texts are within 4 group elements.
if (/Trident\/7\.0/.test(win.navigator.userAgent) || isFirefox) {
wrap(Legend.prototype, 'positionItem', function(proceed, item) {
var legend = this,
runPositionItem = function() { // If chart destroyed in sync, this is undefined (#2030)
if (item._legendItemPos) {
proceed.call(legend, item);
}
};
// Do it now, for export and to get checkbox placement
runPositionItem();
// Do it after to work around the core issue
setTimeout(runPositionItem);
});
}
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
animate = H.animate,
animObject = H.animObject,
attr = H.attr,
doc = H.doc,
Axis = H.Axis, // @todo add as requirement
createElement = H.createElement,
defaultOptions = H.defaultOptions,
discardElement = H.discardElement,
charts = H.charts,
css = H.css,
defined = H.defined,
each = H.each,
error = H.error,
extend = H.extend,
fireEvent = H.fireEvent,
getStyle = H.getStyle,
grep = H.grep,
isNumber = H.isNumber,
isObject = H.isObject,
isString = H.isString,
Legend = H.Legend, // @todo add as requirement
marginNames = H.marginNames,
merge = H.merge,
Pointer = H.Pointer, // @todo add as requirement
pick = H.pick,
pInt = H.pInt,
removeEvent = H.removeEvent,
seriesTypes = H.seriesTypes,
splat = H.splat,
svg = H.svg,
syncTimeout = H.syncTimeout,
win = H.win,
Renderer = H.Renderer;
/**
* The Chart class
* @param {String|Object} renderTo The DOM element to render to, or its id
* @param {Object} options
* @param {Function} callback Function to run when the chart has loaded
*/
var Chart = H.Chart = function() {
this.getArgs.apply(this, arguments);
};
H.chart = function(a, b, c) {
return new Chart(a, b, c);
};
Chart.prototype = {
/**
* Hook for modules
*/
callbacks: [],
/**
* Handle the arguments passed to the constructor
* @returns {Array} Arguments without renderTo
*/
getArgs: function() {
var args = [].slice.call(arguments);
// Remove the optional first argument, renderTo, and
// set it on this.
if (isString(args[0]) || args[0].nodeName) {
this.renderTo = args.shift();
}
this.init(args[0], args[1]);
},
/**
* Initialize the chart
*/
init: function(userOptions, callback) {
// Handle regular options
var options,
seriesOptions = userOptions.series; // skip merging data points to increase performance
userOptions.series = null;
options = merge(defaultOptions, userOptions); // do the merge
options.series = userOptions.series = seriesOptions; // set back the series data
this.userOptions = userOptions;
this.respRules = [];
var optionsChart = options.chart;
var chartEvents = optionsChart.events;
this.margin = [];
this.spacing = [];
//this.runChartClick = chartEvents && !!chartEvents.click;
this.bounds = {
h: {},
v: {}
}; // Pixel data bounds for touch zoom
this.callback = callback;
this.isResizing = 0;
this.options = options;
//chartTitleOptions = undefined;
//chartSubtitleOptions = undefined;
this.axes = [];
this.series = [];
this.hasCartesianSeries = optionsChart.showAxes;
//this.axisOffset = undefined;
//this.inverted = undefined;
//this.loadingShown = undefined;
//this.container = undefined;
//this.chartWidth = undefined;
//this.chartHeight = undefined;
//this.marginRight = undefined;
//this.marginBottom = undefined;
//this.containerWidth = undefined;
//this.containerHeight = undefined;
//this.oldChartWidth = undefined;
//this.oldChartHeight = undefined;
//this.renderTo = undefined;
//this.renderToClone = undefined;
//this.spacingBox = undefined
//this.legend = undefined;
// Elements
//this.chartBackground = undefined;
//this.plotBackground = undefined;
//this.plotBGImage = undefined;
//this.plotBorder = undefined;
//this.loadingDiv = undefined;
//this.loadingSpan = undefined;
var chart = this,
eventType;
// Add the chart to the global lookup
chart.index = charts.length;
charts.push(chart);
H.chartCount++;
// Chart event handlers
if (chartEvents) {
for (eventType in chartEvents) {
addEvent(chart, eventType, chartEvents[eventType]);
}
}
chart.xAxis = [];
chart.yAxis = [];
chart.pointCount = chart.colorCounter = chart.symbolCounter = 0;
chart.firstRender();
},
/**
* Initialize an individual series, called internally before render time
*/
initSeries: function(options) {
var chart = this,
optionsChart = chart.options.chart,
type = options.type || optionsChart.type || optionsChart.defaultSeriesType,
series,
Constr = seriesTypes[type];
// No such series type
if (!Constr) {
error(17, true);
}
series = new Constr();
series.init(this, options);
return series;
},
/**
* Check whether a given point is within the plot area
*
* @param {Number} plotX Pixel x relative to the plot area
* @param {Number} plotY Pixel y relative to the plot area
* @param {Boolean} inverted Whether the chart is inverted
*/
isInsidePlot: function(plotX, plotY, inverted) {
var x = inverted ? plotY : plotX,
y = inverted ? plotX : plotY;
return x >= 0 &&
x <= this.plotWidth &&
y >= 0 &&
y <= this.plotHeight;
},
/**
* Redraw legend, axes or series based on updated data
*
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*/
redraw: function(animation) {
var chart = this,
axes = chart.axes,
series = chart.series,
pointer = chart.pointer,
legend = chart.legend,
redrawLegend = chart.isDirtyLegend,
hasStackedSeries,
hasDirtyStacks,
hasCartesianSeries = chart.hasCartesianSeries,
isDirtyBox = chart.isDirtyBox,
seriesLength = series.length,
i = seriesLength,
serie,
renderer = chart.renderer,
isHiddenChart = renderer.isHidden(),
afterRedraw = [];
H.setAnimation(animation, chart);
if (isHiddenChart) {
chart.cloneRenderTo();
}
// Adjust title layout (reflow multiline text)
chart.layOutTitles();
// link stacked series
while (i--) {
serie = series[i];
if (serie.options.stacking) {
hasStackedSeries = true;
if (serie.isDirty) {
hasDirtyStacks = true;
break;
}
}
}
if (hasDirtyStacks) { // mark others as dirty
i = seriesLength;
while (i--) {
serie = series[i];
if (serie.options.stacking) {
serie.isDirty = true;
}
}
}
// Handle updated data in the series
each(series, function(serie) {
if (serie.isDirty) {
if (serie.options.legendType === 'point') {
if (serie.updateTotals) {
serie.updateTotals();
}
redrawLegend = true;
}
}
if (serie.isDirtyData) {
fireEvent(serie, 'updatedData');
}
});
// handle added or removed series
if (redrawLegend && legend.options.enabled) { // series or pie points are added or removed
// draw legend graphics
legend.render();
chart.isDirtyLegend = false;
}
// reset stacks
if (hasStackedSeries) {
chart.getStacks();
}
if (hasCartesianSeries) {
// set axes scales
each(axes, function(axis) {
axis.updateNames();
axis.setScale();
});
}
chart.getMargins(); // #3098
if (hasCartesianSeries) {
// If one axis is dirty, all axes must be redrawn (#792, #2169)
each(axes, function(axis) {
if (axis.isDirty) {
isDirtyBox = true;
}
});
// redraw axes
each(axes, function(axis) {
// Fire 'afterSetExtremes' only if extremes are set
var key = axis.min + ',' + axis.max;
if (axis.extKey !== key) { // #821, #4452
axis.extKey = key;
afterRedraw.push(function() { // prevent a recursive call to chart.redraw() (#1119)
fireEvent(axis, 'afterSetExtremes', extend(axis.eventArgs, axis.getExtremes())); // #747, #751
delete axis.eventArgs;
});
}
if (isDirtyBox || hasStackedSeries) {
axis.redraw();
}
});
}
// the plot areas size has changed
if (isDirtyBox) {
chart.drawChartBox();
}
// redraw affected series
each(series, function(serie) {
if ((isDirtyBox || serie.isDirty) && serie.visible) {
serie.redraw();
}
});
// move tooltip or reset
if (pointer) {
pointer.reset(true);
}
// redraw if canvas
renderer.draw();
// fire the event
fireEvent(chart, 'redraw');
if (isHiddenChart) {
chart.cloneRenderTo(true);
}
// Fire callbacks that are put on hold until after the redraw
each(afterRedraw, function(callback) {
callback.call();
});
},
/**
* Get an axis, series or point object by id.
* @param id {String} The id as given in the configuration options
*/
get: function(id) {
var chart = this,
axes = chart.axes,
series = chart.series;
var i,
j,
points;
// search axes
for (i = 0; i < axes.length; i++) {
if (axes[i].options.id === id) {
return axes[i];
}
}
// search series
for (i = 0; i < series.length; i++) {
if (series[i].options.id === id) {
return series[i];
}
}
// search points
for (i = 0; i < series.length; i++) {
points = series[i].points || [];
for (j = 0; j < points.length; j++) {
if (points[j].id === id) {
return points[j];
}
}
}
return null;
},
/**
* Create the Axis instances based on the config options
*/
getAxes: function() {
var chart = this,
options = this.options,
xAxisOptions = options.xAxis = splat(options.xAxis || {}),
yAxisOptions = options.yAxis = splat(options.yAxis || {}),
optionsArray;
// make sure the options are arrays and add some members
each(xAxisOptions, function(axis, i) {
axis.index = i;
axis.isX = true;
});
each(yAxisOptions, function(axis, i) {
axis.index = i;
});
// concatenate all axis options into one array
optionsArray = xAxisOptions.concat(yAxisOptions);
each(optionsArray, function(axisOptions) {
new Axis(chart, axisOptions); // eslint-disable-line no-new
});
},
/**
* Get the currently selected points from all series
*/
getSelectedPoints: function() {
var points = [];
each(this.series, function(serie) {
points = points.concat(grep(serie.points || [], function(point) {
return point.selected;
}));
});
return points;
},
/**
* Get the currently selected series
*/
getSelectedSeries: function() {
return grep(this.series, function(serie) {
return serie.selected;
});
},
/**
* Show the title and subtitle of the chart
*
* @param titleOptions {Object} New title options
* @param subtitleOptions {Object} New subtitle options
*
*/
setTitle: function(titleOptions, subtitleOptions, redraw) {
var chart = this,
options = chart.options,
chartTitleOptions,
chartSubtitleOptions;
chartTitleOptions = options.title = merge(options.title, titleOptions);
chartSubtitleOptions = options.subtitle = merge(options.subtitle, subtitleOptions);
// add title and subtitle
each([
['title', titleOptions, chartTitleOptions],
['subtitle', subtitleOptions, chartSubtitleOptions]
], function(arr, i) {
var name = arr[0],
title = chart[name],
titleOptions = arr[1],
chartTitleOptions = arr[2];
if (title && titleOptions) {
chart[name] = title = title.destroy(); // remove old
}
if (chartTitleOptions && chartTitleOptions.text && !title) {
chart[name] = chart.renderer.text(
chartTitleOptions.text,
0,
0,
chartTitleOptions.useHTML
)
.attr({
align: chartTitleOptions.align,
'class': 'highcharts-' + name,
zIndex: chartTitleOptions.zIndex || 4
})
.add();
// Update methods, shortcut to Chart.setTitle
chart[name].update = function(o) {
chart.setTitle(!i && o, i && o);
};
}
});
chart.layOutTitles(redraw);
},
/**
* Lay out the chart titles and cache the full offset height for use in getMargins
*/
layOutTitles: function(redraw) {
var titleOffset = 0,
requiresDirtyBox,
renderer = this.renderer,
spacingBox = this.spacingBox;
// Lay out the title and the subtitle respectively
each(['title', 'subtitle'], function(key) {
var title = this[key],
titleOptions = this.options[key],
titleSize;
if (title) {
titleSize = renderer.fontMetrics(titleSize, title).b;
title
.css({
width: (titleOptions.width || spacingBox.width + titleOptions.widthAdjust) + 'px'
})
.align(extend({
y: titleOffset + titleSize + (key === 'title' ? -3 : 2)
}, titleOptions), false, 'spacingBox');
if (!titleOptions.floating && !titleOptions.verticalAlign) {
titleOffset = Math.ceil(titleOffset + title.getBBox().height);
}
}
}, this);
requiresDirtyBox = this.titleOffset !== titleOffset;
this.titleOffset = titleOffset; // used in getMargins
if (!this.isDirtyBox && requiresDirtyBox) {
this.isDirtyBox = requiresDirtyBox;
// Redraw if necessary (#2719, #2744)
if (this.hasRendered && pick(redraw, true) && this.isDirtyBox) {
this.redraw();
}
}
},
/**
* Get chart width and height according to options and container size
*/
getChartSize: function() {
var chart = this,
optionsChart = chart.options.chart,
widthOption = optionsChart.width,
heightOption = optionsChart.height,
renderTo = chart.renderToClone || chart.renderTo;
// Get inner width and height
if (!defined(widthOption)) {
chart.containerWidth = getStyle(renderTo, 'width');
}
if (!defined(heightOption)) {
chart.containerHeight = getStyle(renderTo, 'height');
}
chart.chartWidth = Math.max(0, widthOption || chart.containerWidth || 600); // #1393, 1460
chart.chartHeight = Math.max(0, pick(heightOption,
// the offsetHeight of an empty container is 0 in standard browsers, but 19 in IE7:
chart.containerHeight > 19 ? chart.containerHeight : 400));
},
/**
* Create a clone of the chart's renderTo div and place it outside the viewport to allow
* size computation on chart.render and chart.redraw
*/
cloneRenderTo: function(revert) {
var clone = this.renderToClone,
container = this.container;
// Destroy the clone and bring the container back to the real renderTo div
if (revert) {
if (clone) {
while (clone.childNodes.length) { // #5231
this.renderTo.appendChild(clone.firstChild);
}
discardElement(clone);
delete this.renderToClone;
}
// Set up the clone
} else {
if (container && container.parentNode === this.renderTo) {
this.renderTo.removeChild(container); // do not clone this
}
this.renderToClone = clone = this.renderTo.cloneNode(0);
css(clone, {
position: 'absolute',
top: '-9999px',
display: 'block' // #833
});
if (clone.style.setProperty) { // #2631
clone.style.setProperty('display', 'block', 'important');
}
doc.body.appendChild(clone);
if (container) {
clone.appendChild(container);
}
}
},
/**
* Setter for the chart class name
*/
setClassName: function(className) {
this.container.className = 'highcharts-container ' + (className || '');
},
/**
* Get the containing element, determine the size and create the inner container
* div to hold the chart
*/
getContainer: function() {
var chart = this,
container,
options = chart.options,
optionsChart = options.chart,
chartWidth,
chartHeight,
renderTo = chart.renderTo,
indexAttrName = 'data-highcharts-chart',
oldChartIndex,
Ren,
containerId = 'highcharts-' + H.idCounter++,
containerStyle,
key;
if (!renderTo) {
chart.renderTo = renderTo = optionsChart.renderTo;
}
if (isString(renderTo)) {
chart.renderTo = renderTo = doc.getElementById(renderTo);
}
// Display an error if the renderTo is wrong
if (!renderTo) {
error(13, true);
}
// If the container already holds a chart, destroy it. The check for hasRendered is there
// because web pages that are saved to disk from the browser, will preserve the data-highcharts-chart
// attribute and the SVG contents, but not an interactive chart. So in this case,
// charts[oldChartIndex] will point to the wrong chart if any (#2609).
oldChartIndex = pInt(attr(renderTo, indexAttrName));
if (isNumber(oldChartIndex) && charts[oldChartIndex] && charts[oldChartIndex].hasRendered) {
charts[oldChartIndex].destroy();
}
// Make a reference to the chart from the div
attr(renderTo, indexAttrName, chart.index);
// remove previous chart
renderTo.innerHTML = '';
// If the container doesn't have an offsetWidth, it has or is a child of a node
// that has display:none. We need to temporarily move it out to a visible
// state to determine the size, else the legend and tooltips won't render
// properly. The allowClone option is used in sparklines as a micro optimization,
// saving about 1-2 ms each chart.
if (!optionsChart.skipClone && !renderTo.offsetWidth) {
chart.cloneRenderTo();
}
// get the width and height
chart.getChartSize();
chartWidth = chart.chartWidth;
chartHeight = chart.chartHeight;
// Create the inner container
chart.container = container = createElement(
'div', {
id: containerId
},
containerStyle,
chart.renderToClone || renderTo
);
// cache the cursor (#1650)
chart._cursor = container.style.cursor;
// Initialize the renderer
Ren = H[optionsChart.renderer] || Renderer;
chart.renderer = new Ren(
container,
chartWidth,
chartHeight,
null,
optionsChart.forExport,
options.exporting && options.exporting.allowHTML
);
chart.setClassName(optionsChart.className);
// Initialize definitions
for (key in options.defs) {
this.renderer.definition(options.defs[key]);
}
// Add a reference to the charts index
chart.renderer.chartIndex = chart.index;
},
/**
* Calculate margins by rendering axis labels in a preliminary position. Title,
* subtitle and legend have already been rendered at this stage, but will be
* moved into their final positions
*/
getMargins: function(skipAxes) {
var chart = this,
spacing = chart.spacing,
margin = chart.margin,
titleOffset = chart.titleOffset;
chart.resetMargins();
// Adjust for title and subtitle
if (titleOffset && !defined(margin[0])) {
chart.plotTop = Math.max(chart.plotTop, titleOffset + chart.options.title.margin + spacing[0]);
}
// Adjust for legend
if (chart.legend.display) {
chart.legend.adjustMargins(margin, spacing);
}
// adjust for scroller
if (chart.extraBottomMargin) {
chart.marginBottom += chart.extraBottomMargin;
}
if (chart.extraTopMargin) {
chart.plotTop += chart.extraTopMargin;
}
if (!skipAxes) {
this.getAxisMargins();
}
},
getAxisMargins: function() {
var chart = this,
axisOffset = chart.axisOffset = [0, 0, 0, 0], // top, right, bottom, left
margin = chart.margin;
// pre-render axes to get labels offset width
if (chart.hasCartesianSeries) {
each(chart.axes, function(axis) {
if (axis.visible) {
axis.getOffset();
}
});
}
// Add the axis offsets
each(marginNames, function(m, side) {
if (!defined(margin[side])) {
chart[m] += axisOffset[side];
}
});
chart.setChartSize();
},
/**
* Resize the chart to its container if size is not explicitly set
*/
reflow: function(e) {
var chart = this,
optionsChart = chart.options.chart,
renderTo = chart.renderTo,
hasUserWidth = defined(optionsChart.width),
width = optionsChart.width || getStyle(renderTo, 'width'),
height = optionsChart.height || getStyle(renderTo, 'height'),
target = e ? e.target : win;
// Width and height checks for display:none. Target is doc in IE8 and Opera,
// win in Firefox, Chrome and IE9.
if (!hasUserWidth && !chart.isPrinting && width && height && (target === win || target === doc)) { // #1093
if (width !== chart.containerWidth || height !== chart.containerHeight) {
clearTimeout(chart.reflowTimeout);
// When called from window.resize, e is set, else it's called directly (#2224)
chart.reflowTimeout = syncTimeout(function() {
if (chart.container) { // It may have been destroyed in the meantime (#1257)
chart.setSize(undefined, undefined, false);
}
}, e ? 100 : 0);
}
chart.containerWidth = width;
chart.containerHeight = height;
}
},
/**
* Add the event handlers necessary for auto resizing
*/
initReflow: function() {
var chart = this,
reflow = function(e) {
chart.reflow(e);
};
addEvent(win, 'resize', reflow);
addEvent(chart, 'destroy', function() {
removeEvent(win, 'resize', reflow);
});
// The following will add listeners to re-fit the chart before and after
// printing (#2284). However it only works in WebKit. Should have worked
// in Firefox, but not supported in IE.
/*
if (win.matchMedia) {
win.matchMedia('print').addListener(function reflow() {
chart.reflow();
});
}
*/
},
/**
* Resize the chart to a given width and height
* @param {Number} width
* @param {Number} height
* @param {Object|Boolean} animation
*/
setSize: function(width, height, animation) {
var chart = this,
renderer = chart.renderer,
globalAnimation;
// Handle the isResizing counter
chart.isResizing += 1;
// set the animation for the current process
H.setAnimation(animation, chart);
chart.oldChartHeight = chart.chartHeight;
chart.oldChartWidth = chart.chartWidth;
if (width !== undefined) {
chart.options.chart.width = width;
}
if (height !== undefined) {
chart.options.chart.height = height;
}
chart.getChartSize();
// Resize the container with the global animation applied if enabled (#2503)
chart.setChartSize(true);
renderer.setSize(chart.chartWidth, chart.chartHeight, animation);
// handle axes
each(chart.axes, function(axis) {
axis.isDirty = true;
axis.setScale();
});
chart.isDirtyLegend = true; // force legend redraw
chart.isDirtyBox = true; // force redraw of plot and chart border
chart.layOutTitles(); // #2857
chart.getMargins();
if (chart.setResponsive) {
chart.setResponsive(false);
}
chart.redraw(animation);
chart.oldChartHeight = null;
fireEvent(chart, 'resize');
// Fire endResize and set isResizing back. If animation is disabled, fire without delay
syncTimeout(function() {
if (chart) {
fireEvent(chart, 'endResize', null, function() {
chart.isResizing -= 1;
});
}
}, animObject(globalAnimation).duration);
},
/**
* Set the public chart properties. This is done before and after the pre-render
* to determine margin sizes
*/
setChartSize: function(skipAxes) {
var chart = this,
inverted = chart.inverted,
renderer = chart.renderer,
chartWidth = chart.chartWidth,
chartHeight = chart.chartHeight,
optionsChart = chart.options.chart,
spacing = chart.spacing,
clipOffset = chart.clipOffset,
clipX,
clipY,
plotLeft,
plotTop,
plotWidth,
plotHeight,
plotBorderWidth;
chart.plotLeft = plotLeft = Math.round(chart.plotLeft);
chart.plotTop = plotTop = Math.round(chart.plotTop);
chart.plotWidth = plotWidth = Math.max(0, Math.round(chartWidth - plotLeft - chart.marginRight));
chart.plotHeight = plotHeight = Math.max(0, Math.round(chartHeight - plotTop - chart.marginBottom));
chart.plotSizeX = inverted ? plotHeight : plotWidth;
chart.plotSizeY = inverted ? plotWidth : plotHeight;
chart.plotBorderWidth = optionsChart.plotBorderWidth || 0;
// Set boxes used for alignment
chart.spacingBox = renderer.spacingBox = {
x: spacing[3],
y: spacing[0],
width: chartWidth - spacing[3] - spacing[1],
height: chartHeight - spacing[0] - spacing[2]
};
chart.plotBox = renderer.plotBox = {
x: plotLeft,
y: plotTop,
width: plotWidth,
height: plotHeight
};
plotBorderWidth = 2 * Math.floor(chart.plotBorderWidth / 2);
clipX = Math.ceil(Math.max(plotBorderWidth, clipOffset[3]) / 2);
clipY = Math.ceil(Math.max(plotBorderWidth, clipOffset[0]) / 2);
chart.clipBox = {
x: clipX,
y: clipY,
width: Math.floor(chart.plotSizeX - Math.max(plotBorderWidth, clipOffset[1]) / 2 - clipX),
height: Math.max(0, Math.floor(chart.plotSizeY - Math.max(plotBorderWidth, clipOffset[2]) / 2 - clipY))
};
if (!skipAxes) {
each(chart.axes, function(axis) {
axis.setAxisSize();
axis.setAxisTranslation();
});
}
},
/**
* Initial margins before auto size margins are applied
*/
resetMargins: function() {
var chart = this,
chartOptions = chart.options.chart;
// Create margin and spacing array
each(['margin', 'spacing'], function splashArrays(target) {
var value = chartOptions[target],
values = isObject(value) ? value : [value, value, value, value];
each(['Top', 'Right', 'Bottom', 'Left'], function(sideName, side) {
chart[target][side] = pick(chartOptions[target + sideName], values[side]);
});
});
// Set margin names like chart.plotTop, chart.plotLeft, chart.marginRight, chart.marginBottom.
each(marginNames, function(m, side) {
chart[m] = pick(chart.margin[side], chart.spacing[side]);
});
chart.axisOffset = [0, 0, 0, 0]; // top, right, bottom, left
chart.clipOffset = [0, 0, 0, 0];
},
<|fim▁hole|> */
drawChartBox: function() {
var chart = this,
optionsChart = chart.options.chart,
renderer = chart.renderer,
chartWidth = chart.chartWidth,
chartHeight = chart.chartHeight,
chartBackground = chart.chartBackground,
plotBackground = chart.plotBackground,
plotBorder = chart.plotBorder,
chartBorderWidth,
mgn,
bgAttr,
plotLeft = chart.plotLeft,
plotTop = chart.plotTop,
plotWidth = chart.plotWidth,
plotHeight = chart.plotHeight,
plotBox = chart.plotBox,
clipRect = chart.clipRect,
clipBox = chart.clipBox,
verb = 'animate';
// Chart area
if (!chartBackground) {
chart.chartBackground = chartBackground = renderer.rect()
.addClass('highcharts-background')
.add();
verb = 'attr';
}
chartBorderWidth = mgn = chartBackground.strokeWidth();
chartBackground[verb]({
x: mgn / 2,
y: mgn / 2,
width: chartWidth - mgn - chartBorderWidth % 2,
height: chartHeight - mgn - chartBorderWidth % 2,
r: optionsChart.borderRadius
});
// Plot background
verb = 'animate';
if (!plotBackground) {
verb = 'attr';
chart.plotBackground = plotBackground = renderer.rect()
.addClass('highcharts-plot-background')
.add();
}
plotBackground[verb](plotBox);
// Plot clip
if (!clipRect) {
chart.clipRect = renderer.clipRect(clipBox);
} else {
clipRect.animate({
width: clipBox.width,
height: clipBox.height
});
}
// Plot area border
verb = 'animate';
if (!plotBorder) {
verb = 'attr';
chart.plotBorder = plotBorder = renderer.rect()
.addClass('highcharts-plot-border')
.attr({
zIndex: 1 // Above the grid
})
.add();
}
plotBorder[verb](plotBorder.crisp({
x: plotLeft,
y: plotTop,
width: plotWidth,
height: plotHeight
}, -plotBorder.strokeWidth())); //#3282 plotBorder should be negative;
// reset
chart.isDirtyBox = false;
},
/**
* Detect whether a certain chart property is needed based on inspecting its options
* and series. This mainly applies to the chart.inverted property, and in extensions to
* the chart.angular and chart.polar properties.
*/
propFromSeries: function() {
var chart = this,
optionsChart = chart.options.chart,
klass,
seriesOptions = chart.options.series,
i,
value;
each(['inverted', 'angular', 'polar'], function(key) {
// The default series type's class
klass = seriesTypes[optionsChart.type || optionsChart.defaultSeriesType];
// Get the value from available chart-wide properties
value =
optionsChart[key] || // It is set in the options
(klass && klass.prototype[key]); // The default series class requires it
// 4. Check if any the chart's series require it
i = seriesOptions && seriesOptions.length;
while (!value && i--) {
klass = seriesTypes[seriesOptions[i].type];
if (klass && klass.prototype[key]) {
value = true;
}
}
// Set the chart property
chart[key] = value;
});
},
/**
* Link two or more series together. This is done initially from Chart.render,
* and after Chart.addSeries and Series.remove.
*/
linkSeries: function() {
var chart = this,
chartSeries = chart.series;
// Reset links
each(chartSeries, function(series) {
series.linkedSeries.length = 0;
});
// Apply new links
each(chartSeries, function(series) {
var linkedTo = series.options.linkedTo;
if (isString(linkedTo)) {
if (linkedTo === ':previous') {
linkedTo = chart.series[series.index - 1];
} else {
linkedTo = chart.get(linkedTo);
}
if (linkedTo && linkedTo.linkedParent !== series) { // #3341 avoid mutual linking
linkedTo.linkedSeries.push(series);
series.linkedParent = linkedTo;
series.visible = pick(series.options.visible, linkedTo.options.visible, series.visible); // #3879
}
}
});
},
/**
* Render series for the chart
*/
renderSeries: function() {
each(this.series, function(serie) {
serie.translate();
serie.render();
});
},
/**
* Render labels for the chart
*/
renderLabels: function() {
var chart = this,
labels = chart.options.labels;
if (labels.items) {
each(labels.items, function(label) {
var style = extend(labels.style, label.style),
x = pInt(style.left) + chart.plotLeft,
y = pInt(style.top) + chart.plotTop + 12;
// delete to prevent rewriting in IE
delete style.left;
delete style.top;
chart.renderer.text(
label.html,
x,
y
)
.attr({
zIndex: 2
})
.css(style)
.add();
});
}
},
/**
* Render all graphics for the chart
*/
render: function() {
var chart = this,
axes = chart.axes,
renderer = chart.renderer,
options = chart.options,
tempWidth,
tempHeight,
redoHorizontal,
redoVertical;
// Title
chart.setTitle();
// Legend
chart.legend = new Legend(chart, options.legend);
// Get stacks
if (chart.getStacks) {
chart.getStacks();
}
// Get chart margins
chart.getMargins(true);
chart.setChartSize();
// Record preliminary dimensions for later comparison
tempWidth = chart.plotWidth;
tempHeight = chart.plotHeight = chart.plotHeight - 21; // 21 is the most common correction for X axis labels
// Get margins by pre-rendering axes
each(axes, function(axis) {
axis.setScale();
});
chart.getAxisMargins();
// If the plot area size has changed significantly, calculate tick positions again
redoHorizontal = tempWidth / chart.plotWidth > 1.1;
redoVertical = tempHeight / chart.plotHeight > 1.05; // Height is more sensitive
if (redoHorizontal || redoVertical) {
each(axes, function(axis) {
if ((axis.horiz && redoHorizontal) || (!axis.horiz && redoVertical)) {
axis.setTickInterval(true); // update to reflect the new margins
}
});
chart.getMargins(); // second pass to check for new labels
}
// Draw the borders and backgrounds
chart.drawChartBox();
// Axes
if (chart.hasCartesianSeries) {
each(axes, function(axis) {
if (axis.visible) {
axis.render();
}
});
}
// The series
if (!chart.seriesGroup) {
chart.seriesGroup = renderer.g('series-group')
.attr({
zIndex: 3
})
.add();
}
chart.renderSeries();
// Labels
chart.renderLabels();
// Credits
chart.addCredits();
// Handle responsiveness
if (chart.setResponsive) {
chart.setResponsive();
}
// Set flag
chart.hasRendered = true;
},
/**
* Show chart credits based on config options
*/
addCredits: function(credits) {
var chart = this;
credits = merge(true, this.options.credits, credits);
if (credits.enabled && !this.credits) {
this.credits = this.renderer.text(
credits.text + (this.mapCredits || ''),
0,
0
)
.addClass('highcharts-credits')
.on('click', function() {
if (credits.href) {
win.location.href = credits.href;
}
})
.attr({
align: credits.position.align,
zIndex: 8
})
.add()
.align(credits.position);
// Dynamically update
this.credits.update = function(options) {
chart.credits = chart.credits.destroy();
chart.addCredits(options);
};
}
},
/**
* Clean up memory usage
*/
destroy: function() {
var chart = this,
axes = chart.axes,
series = chart.series,
container = chart.container,
i,
parentNode = container && container.parentNode;
// fire the chart.destoy event
fireEvent(chart, 'destroy');
// Delete the chart from charts lookup array
charts[chart.index] = undefined;
H.chartCount--;
chart.renderTo.removeAttribute('data-highcharts-chart');
// remove events
removeEvent(chart);
// ==== Destroy collections:
// Destroy axes
i = axes.length;
while (i--) {
axes[i] = axes[i].destroy();
}
// Destroy scroller & scroller series before destroying base series
if (this.scroller && this.scroller.destroy) {
this.scroller.destroy();
}
// Destroy each series
i = series.length;
while (i--) {
series[i] = series[i].destroy();
}
// ==== Destroy chart properties:
each(['title', 'subtitle', 'chartBackground', 'plotBackground', 'plotBGImage',
'plotBorder', 'seriesGroup', 'clipRect', 'credits', 'pointer',
'rangeSelector', 'legend', 'resetZoomButton', 'tooltip', 'renderer'
], function(name) {
var prop = chart[name];
if (prop && prop.destroy) {
chart[name] = prop.destroy();
}
});
// remove container and all SVG
if (container) { // can break in IE when destroyed before finished loading
container.innerHTML = '';
removeEvent(container);
if (parentNode) {
discardElement(container);
}
}
// clean it all up
for (i in chart) {
delete chart[i];
}
},
/**
* VML namespaces can't be added until after complete. Listening
* for Perini's doScroll hack is not enough.
*/
isReadyToRender: function() {
var chart = this;
// Note: win == win.top is required
if ((!svg && (win == win.top && doc.readyState !== 'complete'))) { // eslint-disable-line eqeqeq
doc.attachEvent('onreadystatechange', function() {
doc.detachEvent('onreadystatechange', chart.firstRender);
if (doc.readyState === 'complete') {
chart.firstRender();
}
});
return false;
}
return true;
},
/**
* Prepare for first rendering after all data are loaded
*/
firstRender: function() {
var chart = this,
options = chart.options;
// Check whether the chart is ready to render
if (!chart.isReadyToRender()) {
return;
}
// Create the container
chart.getContainer();
// Run an early event after the container and renderer are established
fireEvent(chart, 'init');
chart.resetMargins();
chart.setChartSize();
// Set the common chart properties (mainly invert) from the given series
chart.propFromSeries();
// get axes
chart.getAxes();
// Initialize the series
each(options.series || [], function(serieOptions) {
chart.initSeries(serieOptions);
});
chart.linkSeries();
// Run an event after axes and series are initialized, but before render. At this stage,
// the series data is indexed and cached in the xData and yData arrays, so we can access
// those before rendering. Used in Highstock.
fireEvent(chart, 'beforeRender');
// depends on inverted and on margins being set
if (Pointer) {
chart.pointer = new Pointer(chart, options);
}
chart.render();
// add canvas
chart.renderer.draw();
// Fire the load event if there are no external images
if (!chart.renderer.imgCount && chart.onload) {
chart.onload();
}
// If the chart was rendered outside the top container, put it back in (#3679)
chart.cloneRenderTo(true);
},
/**
* On chart load
*/
onload: function() {
// Run callbacks
each([this.callback].concat(this.callbacks), function(fn) {
if (fn && this.index !== undefined) { // Chart destroyed in its own callback (#3600)
fn.apply(this, [this]);
}
}, this);
fireEvent(this, 'load');
// Set up auto resize
if (this.options.chart.reflow !== false) {
this.initReflow();
}
// Don't run again
this.onload = null;
}
}; // end Chart
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Point,
each = H.each,
extend = H.extend,
erase = H.erase,
fireEvent = H.fireEvent,
format = H.format,
isArray = H.isArray,
isNumber = H.isNumber,
pick = H.pick,
removeEvent = H.removeEvent;
/**
* The Point object and prototype. Inheritable and used as base for PiePoint
*/
Point = H.Point = function() {};
Point.prototype = {
/**
* Initialize the point
* @param {Object} series The series object containing this point
* @param {Object} options The data in either number, array or object format
*/
init: function(series, options, x) {
var point = this,
colors,
colorCount = series.chart.options.chart.colorCount,
colorIndex;
point.series = series;
point.applyOptions(options, x);
if (series.options.colorByPoint) {
colorIndex = series.colorCounter;
series.colorCounter++;
// loop back to zero
if (series.colorCounter === colorCount) {
series.colorCounter = 0;
}
} else {
colorIndex = series.colorIndex;
}
point.colorIndex = pick(point.colorIndex, colorIndex);
series.chart.pointCount++;
return point;
},
/**
* Apply the options containing the x and y data and possible some extra properties.
* This is called on point init or from point.update.
*
* @param {Object} options
*/
applyOptions: function(options, x) {
var point = this,
series = point.series,
pointValKey = series.options.pointValKey || series.pointValKey;
options = Point.prototype.optionsToObject.call(this, options);
// copy options directly to point
extend(point, options);
point.options = point.options ? extend(point.options, options) : options;
// Since options are copied into the Point instance, some accidental options must be shielded (#5681)
if (options.group) {
delete point.group;
}
// For higher dimension series types. For instance, for ranges, point.y is mapped to point.low.
if (pointValKey) {
point.y = point[pointValKey];
}
point.isNull = pick(
point.isValid && !point.isValid(),
point.x === null || !isNumber(point.y, true)
); // #3571, check for NaN
// The point is initially selected by options (#5777)
if (point.selected) {
point.state = 'select';
}
// If no x is set by now, get auto incremented value. All points must have an
// x value, however the y value can be null to create a gap in the series
if ('name' in point && x === undefined && series.xAxis && series.xAxis.hasNames) {
point.x = series.xAxis.nameToX(point);
}
if (point.x === undefined && series) {
if (x === undefined) {
point.x = series.autoIncrement(point);
} else {
point.x = x;
}
}
return point;
},
/**
* Transform number or array configs into objects
*/
optionsToObject: function(options) {
var ret = {},
series = this.series,
keys = series.options.keys,
pointArrayMap = keys || series.pointArrayMap || ['y'],
valueCount = pointArrayMap.length,
firstItemType,
i = 0,
j = 0;
if (isNumber(options) || options === null) {
ret[pointArrayMap[0]] = options;
} else if (isArray(options)) {
// with leading x value
if (!keys && options.length > valueCount) {
firstItemType = typeof options[0];
if (firstItemType === 'string') {
ret.name = options[0];
} else if (firstItemType === 'number') {
ret.x = options[0];
}
i++;
}
while (j < valueCount) {
if (!keys || options[i] !== undefined) { // Skip undefined positions for keys
ret[pointArrayMap[j]] = options[i];
}
i++;
j++;
}
} else if (typeof options === 'object') {
ret = options;
// This is the fastest way to detect if there are individual point dataLabels that need
// to be considered in drawDataLabels. These can only occur in object configs.
if (options.dataLabels) {
series._hasPointLabels = true;
}
// Same approach as above for markers
if (options.marker) {
series._hasPointMarkers = true;
}
}
return ret;
},
/**
* Get the CSS class names for individual points
* @returns {String} The class name
*/
getClassName: function() {
return 'highcharts-point' +
(this.selected ? ' highcharts-point-select' : '') +
(this.negative ? ' highcharts-negative' : '') +
(this.isNull ? ' highcharts-null-point' : '') +
(this.colorIndex !== undefined ? ' highcharts-color-' + this.colorIndex : '') +
(this.options.className ? ' ' + this.options.className : '');
},
/**
* Return the zone that the point belongs to
*/
getZone: function() {
var series = this.series,
zones = series.zones,
zoneAxis = series.zoneAxis || 'y',
i = 0,
zone;
zone = zones[i];
while (this[zoneAxis] >= zone.value) {
zone = zones[++i];
}
if (zone && zone.color && !this.options.color) {
this.color = zone.color;
}
return zone;
},
/**
* Destroy a point to clear memory. Its reference still stays in series.data.
*/
destroy: function() {
var point = this,
series = point.series,
chart = series.chart,
hoverPoints = chart.hoverPoints,
prop;
chart.pointCount--;
if (hoverPoints) {
point.setState();
erase(hoverPoints, point);
if (!hoverPoints.length) {
chart.hoverPoints = null;
}
}
if (point === chart.hoverPoint) {
point.onMouseOut();
}
// remove all events
if (point.graphic || point.dataLabel) { // removeEvent and destroyElements are performance expensive
removeEvent(point);
point.destroyElements();
}
if (point.legendItem) { // pies have legend items
chart.legend.destroyItem(point);
}
for (prop in point) {
point[prop] = null;
}
},
/**
* Destroy SVG elements associated with the point
*/
destroyElements: function() {
var point = this,
props = ['graphic', 'dataLabel', 'dataLabelUpper', 'connector', 'shadowGroup'],
prop,
i = 6;
while (i--) {
prop = props[i];
if (point[prop]) {
point[prop] = point[prop].destroy();
}
}
},
/**
* Return the configuration hash needed for the data label and tooltip formatters
*/
getLabelConfig: function() {
return {
x: this.category,
y: this.y,
color: this.color,
key: this.name || this.category,
series: this.series,
point: this,
percentage: this.percentage,
total: this.total || this.stackTotal
};
},
/**
* Extendable method for formatting each point's tooltip line
*
* @return {String} A string to be concatenated in to the common tooltip text
*/
tooltipFormatter: function(pointFormat) {
// Insert options for valueDecimals, valuePrefix, and valueSuffix
var series = this.series,
seriesTooltipOptions = series.tooltipOptions,
valueDecimals = pick(seriesTooltipOptions.valueDecimals, ''),
valuePrefix = seriesTooltipOptions.valuePrefix || '',
valueSuffix = seriesTooltipOptions.valueSuffix || '';
// Loop over the point array map and replace unformatted values with sprintf formatting markup
each(series.pointArrayMap || ['y'], function(key) {
key = '{point.' + key; // without the closing bracket
if (valuePrefix || valueSuffix) {
pointFormat = pointFormat.replace(key + '}', valuePrefix + key + '}' + valueSuffix);
}
pointFormat = pointFormat.replace(key + '}', key + ':,.' + valueDecimals + 'f}');
});
return format(pointFormat, {
point: this,
series: this.series
});
},
/**
* Fire an event on the Point object.
* @param {String} eventType
* @param {Object} eventArgs Additional event arguments
* @param {Function} defaultFunction Default event handler
*/
firePointEvent: function(eventType, eventArgs, defaultFunction) {
var point = this,
series = this.series,
seriesOptions = series.options;
// load event handlers on demand to save time on mouseover/out
if (seriesOptions.point.events[eventType] || (point.options && point.options.events && point.options.events[eventType])) {
this.importEvents();
}
// add default handler if in selection mode
if (eventType === 'click' && seriesOptions.allowPointSelect) {
defaultFunction = function(event) {
// Control key is for Windows, meta (= Cmd key) for Mac, Shift for Opera
if (point.select) { // Could be destroyed by prior event handlers (#2911)
point.select(null, event.ctrlKey || event.metaKey || event.shiftKey);
}
};
}
fireEvent(this, eventType, eventArgs, defaultFunction);
},
visible: true
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
animObject = H.animObject,
arrayMax = H.arrayMax,
arrayMin = H.arrayMin,
correctFloat = H.correctFloat,
Date = H.Date,
defaultOptions = H.defaultOptions,
defaultPlotOptions = H.defaultPlotOptions,
defined = H.defined,
each = H.each,
erase = H.erase,
error = H.error,
extend = H.extend,
fireEvent = H.fireEvent,
grep = H.grep,
isArray = H.isArray,
isNumber = H.isNumber,
isString = H.isString,
LegendSymbolMixin = H.LegendSymbolMixin, // @todo add as a requirement
merge = H.merge,
pick = H.pick,
Point = H.Point, // @todo add as a requirement
removeEvent = H.removeEvent,
splat = H.splat,
stableSort = H.stableSort,
SVGElement = H.SVGElement,
syncTimeout = H.syncTimeout,
win = H.win;
/**
* @classDescription The base function which all other series types inherit from. The data in the series is stored
* in various arrays.
*
* - First, series.options.data contains all the original config options for
* each point whether added by options or methods like series.addPoint.
* - Next, series.data contains those values converted to points, but in case the series data length
* exceeds the cropThreshold, or if the data is grouped, series.data doesn't contain all the points. It
* only contains the points that have been created on demand.
* - Then there's series.points that contains all currently visible point objects. In case of cropping,
* the cropped-away points are not part of this array. The series.points array starts at series.cropStart
* compared to series.data and series.options.data. If however the series data is grouped, these can't
* be correlated one to one.
* - series.xData and series.processedXData contain clean x values, equivalent to series.data and series.points.
* - series.yData and series.processedYData contain clean x values, equivalent to series.data and series.points.
*
* @param {Object} chart
* @param {Object} options
*/
H.Series = H.seriesType('line', null, { // base series options
allowPointSelect: false,
showCheckbox: false,
animation: {
duration: 1000
},
//clip: true,
//connectNulls: false,
//enableMouseTracking: true,
events: {},
//legendIndex: 0,
// stacking: null,
marker: {
//enabled: true,
//symbol: null,
radius: 4,
states: { // states for a single point
hover: {
animation: {
duration: 50
},
enabled: true,
radiusPlus: 2
}
}
},
point: {
events: {}
},
dataLabels: {
align: 'center',
// defer: true,
// enabled: false,
formatter: function() {
return this.y === null ? '' : H.numberFormat(this.y, -1);
},
/*style: {
color: 'contrast',
textShadow: '0 0 6px contrast, 0 0 3px contrast'
},*/
verticalAlign: 'bottom', // above singular point
x: 0,
y: 0,
// borderRadius: undefined,
padding: 5
},
cropThreshold: 300, // draw points outside the plot area when the number of points is less than this
pointRange: 0,
//pointStart: 0,
//pointInterval: 1,
//showInLegend: null, // auto: true for standalone series, false for linked series
softThreshold: true,
states: { // states for the entire series
hover: {
//enabled: false,
lineWidthPlus: 1,
marker: {
// lineWidth: base + 1,
// radius: base + 1
},
halo: {
size: 10
}
},
select: {
marker: {}
}
},
stickyTracking: true,
//tooltip: {
//pointFormat: '<span style="color:{point.color}">\u25CF</span> {series.name}: <b>{point.y}</b>'
//valueDecimals: null,
//xDateFormat: '%A, %b %e, %Y',
//valuePrefix: '',
//ySuffix: ''
//}
turboThreshold: 1000
// zIndex: null
// Prototype properties
}, {
isCartesian: true,
pointClass: Point,
sorted: true, // requires the data to be sorted
requireSorting: true,
directTouch: false,
axisTypes: ['xAxis', 'yAxis'],
colorCounter: 0,
parallelArrays: ['x', 'y'], // each point's x and y values are stored in this.xData and this.yData
coll: 'series',
init: function(chart, options) {
var series = this,
eventType,
events,
chartSeries = chart.series,
sortByIndex = function(a, b) {
return pick(a.options.index, a._i) - pick(b.options.index, b._i);
};
series.chart = chart;
series.options = options = series.setOptions(options); // merge with plotOptions
series.linkedSeries = [];
// bind the axes
series.bindAxes();
// set some variables
extend(series, {
name: options.name,
state: '',
visible: options.visible !== false, // true by default
selected: options.selected === true // false by default
});
// register event listeners
events = options.events;
for (eventType in events) {
addEvent(series, eventType, events[eventType]);
}
if (
(events && events.click) ||
(options.point && options.point.events && options.point.events.click) ||
options.allowPointSelect
) {
chart.runTrackerClick = true;
}
series.getColor();
series.getSymbol();
// Set the data
each(series.parallelArrays, function(key) {
series[key + 'Data'] = [];
});
series.setData(options.data, false);
// Mark cartesian
if (series.isCartesian) {
chart.hasCartesianSeries = true;
}
// Register it in the chart
chartSeries.push(series);
series._i = chartSeries.length - 1;
// Sort series according to index option (#248, #1123, #2456)
stableSort(chartSeries, sortByIndex);
if (this.yAxis) {
stableSort(this.yAxis.series, sortByIndex);
}
each(chartSeries, function(series, i) {
series.index = i;
series.name = series.name || 'Series ' + (i + 1);
});
},
/**
* Set the xAxis and yAxis properties of cartesian series, and register the series
* in the axis.series array
*/
bindAxes: function() {
var series = this,
seriesOptions = series.options,
chart = series.chart,
axisOptions;
each(series.axisTypes || [], function(AXIS) { // repeat for xAxis and yAxis
each(chart[AXIS], function(axis) { // loop through the chart's axis objects
axisOptions = axis.options;
// apply if the series xAxis or yAxis option mathches the number of the
// axis, or if undefined, use the first axis
if ((seriesOptions[AXIS] === axisOptions.index) ||
(seriesOptions[AXIS] !== undefined && seriesOptions[AXIS] === axisOptions.id) ||
(seriesOptions[AXIS] === undefined && axisOptions.index === 0)) {
// register this series in the axis.series lookup
axis.series.push(series);
// set this series.xAxis or series.yAxis reference
series[AXIS] = axis;
// mark dirty for redraw
axis.isDirty = true;
}
});
// The series needs an X and an Y axis
if (!series[AXIS] && series.optionalAxis !== AXIS) {
error(18, true);
}
});
},
/**
* For simple series types like line and column, the data values are held in arrays like
* xData and yData for quick lookup to find extremes and more. For multidimensional series
* like bubble and map, this can be extended with arrays like zData and valueData by
* adding to the series.parallelArrays array.
*/
updateParallelArrays: function(point, i) {
var series = point.series,
args = arguments,
fn = isNumber(i) ?
// Insert the value in the given position
function(key) {
var val = key === 'y' && series.toYData ? series.toYData(point) : point[key];
series[key + 'Data'][i] = val;
} :
// Apply the method specified in i with the following arguments as arguments
function(key) {
Array.prototype[i].apply(series[key + 'Data'], Array.prototype.slice.call(args, 2));
};
each(series.parallelArrays, fn);
},
/**
* Return an auto incremented x value based on the pointStart and pointInterval options.
* This is only used if an x value is not given for the point that calls autoIncrement.
*/
autoIncrement: function() {
var options = this.options,
xIncrement = this.xIncrement,
date,
pointInterval,
pointIntervalUnit = options.pointIntervalUnit;
xIncrement = pick(xIncrement, options.pointStart, 0);
this.pointInterval = pointInterval = pick(this.pointInterval, options.pointInterval, 1);
// Added code for pointInterval strings
if (pointIntervalUnit) {
date = new Date(xIncrement);
if (pointIntervalUnit === 'day') {
date = +date[Date.hcSetDate](date[Date.hcGetDate]() + pointInterval);
} else if (pointIntervalUnit === 'month') {
date = +date[Date.hcSetMonth](date[Date.hcGetMonth]() + pointInterval);
} else if (pointIntervalUnit === 'year') {
date = +date[Date.hcSetFullYear](date[Date.hcGetFullYear]() + pointInterval);
}
pointInterval = date - xIncrement;
}
this.xIncrement = xIncrement + pointInterval;
return xIncrement;
},
/**
* Set the series options by merging from the options tree
* @param {Object} itemOptions
*/
setOptions: function(itemOptions) {
var chart = this.chart,
chartOptions = chart.options,
plotOptions = chartOptions.plotOptions,
userOptions = chart.userOptions || {},
userPlotOptions = userOptions.plotOptions || {},
typeOptions = plotOptions[this.type],
options,
zones;
this.userOptions = itemOptions;
// General series options take precedence over type options because otherwise, default
// type options like column.animation would be overwritten by the general option.
// But issues have been raised here (#3881), and the solution may be to distinguish
// between default option and userOptions like in the tooltip below.
options = merge(
typeOptions,
plotOptions.series,
itemOptions
);
// The tooltip options are merged between global and series specific options
this.tooltipOptions = merge(
defaultOptions.tooltip,
defaultOptions.plotOptions[this.type].tooltip,
userOptions.tooltip,
userPlotOptions.series && userPlotOptions.series.tooltip,
userPlotOptions[this.type] && userPlotOptions[this.type].tooltip,
itemOptions.tooltip
);
// Delete marker object if not allowed (#1125)
if (typeOptions.marker === null) {
delete options.marker;
}
// Handle color zones
this.zoneAxis = options.zoneAxis;
zones = this.zones = (options.zones || []).slice();
if ((options.negativeColor || options.negativeFillColor) && !options.zones) {
zones.push({
value: options[this.zoneAxis + 'Threshold'] || options.threshold || 0,
className: 'highcharts-negative'
});
}
if (zones.length) { // Push one extra zone for the rest
if (defined(zones[zones.length - 1].value)) {
zones.push({
});
}
}
return options;
},
getCyclic: function(prop, value, defaults) {
var i,
userOptions = this.userOptions,
indexName = prop + 'Index',
counterName = prop + 'Counter',
len = defaults ? defaults.length : pick(this.chart.options.chart[prop + 'Count'], this.chart[prop + 'Count']),
setting;
if (!value) {
// Pick up either the colorIndex option, or the _colorIndex after Series.update()
setting = pick(userOptions[indexName], userOptions['_' + indexName]);
if (defined(setting)) { // after Series.update()
i = setting;
} else {
userOptions['_' + indexName] = i = this.chart[counterName] % len;
this.chart[counterName] += 1;
}
if (defaults) {
value = defaults[i];
}
}
// Set the colorIndex
if (i !== undefined) {
this[indexName] = i;
}
this[prop] = value;
},
/**
* Get the series' color
*/
getColor: function() {
this.getCyclic('color');
},
/**
* Get the series' symbol
*/
getSymbol: function() {
var seriesMarkerOption = this.options.marker;
this.getCyclic('symbol', seriesMarkerOption.symbol, this.chart.options.symbols);
},
drawLegendSymbol: LegendSymbolMixin.drawLineMarker,
/**
* Replace the series data with a new set of data
* @param {Object} data
* @param {Object} redraw
*/
setData: function(data, redraw, animation, updatePoints) {
var series = this,
oldData = series.points,
oldDataLength = (oldData && oldData.length) || 0,
dataLength,
options = series.options,
chart = series.chart,
firstPoint = null,
xAxis = series.xAxis,
i,
turboThreshold = options.turboThreshold,
pt,
xData = this.xData,
yData = this.yData,
pointArrayMap = series.pointArrayMap,
valueCount = pointArrayMap && pointArrayMap.length;
data = data || [];
dataLength = data.length;
redraw = pick(redraw, true);
// If the point count is the same as is was, just run Point.update which is
// cheaper, allows animation, and keeps references to points.
if (updatePoints !== false && dataLength && oldDataLength === dataLength && !series.cropped && !series.hasGroupedData && series.visible) {
each(data, function(point, i) {
// .update doesn't exist on a linked, hidden series (#3709)
if (oldData[i].update && point !== options.data[i]) {
oldData[i].update(point, false, null, false);
}
});
} else {
// Reset properties
series.xIncrement = null;
series.colorCounter = 0; // for series with colorByPoint (#1547)
// Update parallel arrays
each(this.parallelArrays, function(key) {
series[key + 'Data'].length = 0;
});
// In turbo mode, only one- or twodimensional arrays of numbers are allowed. The
// first value is tested, and we assume that all the rest are defined the same
// way. Although the 'for' loops are similar, they are repeated inside each
// if-else conditional for max performance.
if (turboThreshold && dataLength > turboThreshold) {
// find the first non-null point
i = 0;
while (firstPoint === null && i < dataLength) {
firstPoint = data[i];
i++;
}
if (isNumber(firstPoint)) { // assume all points are numbers
for (i = 0; i < dataLength; i++) {
xData[i] = this.autoIncrement();
yData[i] = data[i];
}
} else if (isArray(firstPoint)) { // assume all points are arrays
if (valueCount) { // [x, low, high] or [x, o, h, l, c]
for (i = 0; i < dataLength; i++) {
pt = data[i];
xData[i] = pt[0];
yData[i] = pt.slice(1, valueCount + 1);
}
} else { // [x, y]
for (i = 0; i < dataLength; i++) {
pt = data[i];
xData[i] = pt[0];
yData[i] = pt[1];
}
}
} else {
error(12); // Highcharts expects configs to be numbers or arrays in turbo mode
}
} else {
for (i = 0; i < dataLength; i++) {
if (data[i] !== undefined) { // stray commas in oldIE
pt = {
series: series
};
series.pointClass.prototype.applyOptions.apply(pt, [data[i]]);
series.updateParallelArrays(pt, i);
}
}
}
// Forgetting to cast strings to numbers is a common caveat when handling CSV or JSON
if (isString(yData[0])) {
error(14, true);
}
series.data = [];
series.options.data = series.userOptions.data = data;
// destroy old points
i = oldDataLength;
while (i--) {
if (oldData[i] && oldData[i].destroy) {
oldData[i].destroy();
}
}
// reset minRange (#878)
if (xAxis) {
xAxis.minRange = xAxis.userMinRange;
}
// redraw
series.isDirty = chart.isDirtyBox = true;
series.isDirtyData = !!oldData;
animation = false;
}
// Typically for pie series, points need to be processed and generated
// prior to rendering the legend
if (options.legendType === 'point') {
this.processData();
this.generatePoints();
}
if (redraw) {
chart.redraw(animation);
}
},
/**
* Process the data by cropping away unused data points if the series is longer
* than the crop threshold. This saves computing time for lage series.
*/
processData: function(force) {
var series = this,
processedXData = series.xData, // copied during slice operation below
processedYData = series.yData,
dataLength = processedXData.length,
croppedData,
cropStart = 0,
cropped,
distance,
closestPointRange,
xAxis = series.xAxis,
i, // loop variable
options = series.options,
cropThreshold = options.cropThreshold,
getExtremesFromAll = series.getExtremesFromAll || options.getExtremesFromAll, // #4599
isCartesian = series.isCartesian,
xExtremes,
val2lin = xAxis && xAxis.val2lin,
isLog = xAxis && xAxis.isLog,
min,
max;
// If the series data or axes haven't changed, don't go through this. Return false to pass
// the message on to override methods like in data grouping.
if (isCartesian && !series.isDirty && !xAxis.isDirty && !series.yAxis.isDirty && !force) {
return false;
}
if (xAxis) {
xExtremes = xAxis.getExtremes(); // corrected for log axis (#3053)
min = xExtremes.min;
max = xExtremes.max;
}
// optionally filter out points outside the plot area
if (isCartesian && series.sorted && !getExtremesFromAll && (!cropThreshold || dataLength > cropThreshold || series.forceCrop)) {
// it's outside current extremes
if (processedXData[dataLength - 1] < min || processedXData[0] > max) {
processedXData = [];
processedYData = [];
// only crop if it's actually spilling out
} else if (processedXData[0] < min || processedXData[dataLength - 1] > max) {
croppedData = this.cropData(series.xData, series.yData, min, max);
processedXData = croppedData.xData;
processedYData = croppedData.yData;
cropStart = croppedData.start;
cropped = true;
}
}
// Find the closest distance between processed points
i = processedXData.length || 1;
while (--i) {
distance = isLog ?
val2lin(processedXData[i]) - val2lin(processedXData[i - 1]) :
processedXData[i] - processedXData[i - 1];
if (distance > 0 && (closestPointRange === undefined || distance < closestPointRange)) {
closestPointRange = distance;
// Unsorted data is not supported by the line tooltip, as well as data grouping and
// navigation in Stock charts (#725) and width calculation of columns (#1900)
} else if (distance < 0 && series.requireSorting) {
error(15);
}
}
// Record the properties
series.cropped = cropped; // undefined or true
series.cropStart = cropStart;
series.processedXData = processedXData;
series.processedYData = processedYData;
series.closestPointRange = closestPointRange;
},
/**
* Iterate over xData and crop values between min and max. Returns object containing crop start/end
* cropped xData with corresponding part of yData, dataMin and dataMax within the cropped range
*/
cropData: function(xData, yData, min, max) {
var dataLength = xData.length,
cropStart = 0,
cropEnd = dataLength,
cropShoulder = pick(this.cropShoulder, 1), // line-type series need one point outside
i,
j;
// iterate up to find slice start
for (i = 0; i < dataLength; i++) {
if (xData[i] >= min) {
cropStart = Math.max(0, i - cropShoulder);
break;
}
}
// proceed to find slice end
for (j = i; j < dataLength; j++) {
if (xData[j] > max) {
cropEnd = j + cropShoulder;
break;
}
}
return {
xData: xData.slice(cropStart, cropEnd),
yData: yData.slice(cropStart, cropEnd),
start: cropStart,
end: cropEnd
};
},
/**
* Generate the data point after the data has been processed by cropping away
* unused points and optionally grouped in Highcharts Stock.
*/
generatePoints: function() {
var series = this,
options = series.options,
dataOptions = options.data,
data = series.data,
dataLength,
processedXData = series.processedXData,
processedYData = series.processedYData,
PointClass = series.pointClass,
processedDataLength = processedXData.length,
cropStart = series.cropStart || 0,
cursor,
hasGroupedData = series.hasGroupedData,
point,
points = [],
i;
if (!data && !hasGroupedData) {
var arr = [];
arr.length = dataOptions.length;
data = series.data = arr;
}
for (i = 0; i < processedDataLength; i++) {
cursor = cropStart + i;
if (!hasGroupedData) {
if (data[cursor]) {
point = data[cursor];
} else if (dataOptions[cursor] !== undefined) { // #970
data[cursor] = point = (new PointClass()).init(series, dataOptions[cursor], processedXData[i]);
}
points[i] = point;
} else {
// splat the y data in case of ohlc data array
points[i] = (new PointClass()).init(series, [processedXData[i]].concat(splat(processedYData[i])));
points[i].dataGroup = series.groupMap[i];
}
points[i].index = cursor; // For faster access in Point.update
}
// Hide cropped-away points - this only runs when the number of points is above cropThreshold, or when
// swithching view from non-grouped data to grouped data (#637)
if (data && (processedDataLength !== (dataLength = data.length) || hasGroupedData)) {
for (i = 0; i < dataLength; i++) {
if (i === cropStart && !hasGroupedData) { // when has grouped data, clear all points
i += processedDataLength;
}
if (data[i]) {
data[i].destroyElements();
data[i].plotX = undefined; // #1003
}
}
}
series.data = data;
series.points = points;
},
/**
* Calculate Y extremes for visible data
*/
getExtremes: function(yData) {
var xAxis = this.xAxis,
yAxis = this.yAxis,
xData = this.processedXData,
yDataLength,
activeYData = [],
activeCounter = 0,
xExtremes = xAxis.getExtremes(), // #2117, need to compensate for log X axis
xMin = xExtremes.min,
xMax = xExtremes.max,
validValue,
withinRange,
x,
y,
i,
j;
yData = yData || this.stackedYData || this.processedYData || [];
yDataLength = yData.length;
for (i = 0; i < yDataLength; i++) {
x = xData[i];
y = yData[i];
// For points within the visible range, including the first point outside the
// visible range, consider y extremes
validValue = (isNumber(y, true) || isArray(y)) && (!yAxis.isLog || (y.length || y > 0));
withinRange = this.getExtremesFromAll || this.options.getExtremesFromAll || this.cropped ||
((xData[i + 1] || x) >= xMin && (xData[i - 1] || x) <= xMax);
if (validValue && withinRange) {
j = y.length;
if (j) { // array, like ohlc or range data
while (j--) {
if (y[j] !== null) {
activeYData[activeCounter++] = y[j];
}
}
} else {
activeYData[activeCounter++] = y;
}
}
}
this.dataMin = arrayMin(activeYData);
this.dataMax = arrayMax(activeYData);
},
/**
* Translate data points from raw data values to chart specific positioning data
* needed later in drawPoints, drawGraph and drawTracker.
*/
translate: function() {
if (!this.processedXData) { // hidden series
this.processData();
}
this.generatePoints();
var series = this,
options = series.options,
stacking = options.stacking,
xAxis = series.xAxis,
categories = xAxis.categories,
yAxis = series.yAxis,
points = series.points,
dataLength = points.length,
hasModifyValue = !!series.modifyValue,
i,
pointPlacement = options.pointPlacement,
dynamicallyPlaced = pointPlacement === 'between' || isNumber(pointPlacement),
threshold = options.threshold,
stackThreshold = options.startFromThreshold ? threshold : 0,
plotX,
plotY,
lastPlotX,
stackIndicator,
closestPointRangePx = Number.MAX_VALUE;
// Translate each point
for (i = 0; i < dataLength; i++) {
var point = points[i],
xValue = point.x,
yValue = point.y,
yBottom = point.low,
stack = stacking && yAxis.stacks[(series.negStacks && yValue < (stackThreshold ? 0 : threshold) ? '-' : '') + series.stackKey],
pointStack,
stackValues;
// Discard disallowed y values for log axes (#3434)
if (yAxis.isLog && yValue !== null && yValue <= 0) {
point.isNull = true;
}
// Get the plotX translation
point.plotX = plotX = correctFloat( // #5236
Math.min(Math.max(-1e5, xAxis.translate(xValue, 0, 0, 0, 1, pointPlacement, this.type === 'flags')), 1e5) // #3923
);
// Calculate the bottom y value for stacked series
if (stacking && series.visible && !point.isNull && stack && stack[xValue]) {
stackIndicator = series.getStackIndicator(stackIndicator, xValue, series.index);
pointStack = stack[xValue];
stackValues = pointStack.points[stackIndicator.key];
yBottom = stackValues[0];
yValue = stackValues[1];
if (yBottom === stackThreshold && stackIndicator.key === stack[xValue].base) {
yBottom = pick(threshold, yAxis.min);
}
if (yAxis.isLog && yBottom <= 0) { // #1200, #1232
yBottom = null;
}
point.total = point.stackTotal = pointStack.total;
point.percentage = pointStack.total && (point.y / pointStack.total * 100);
point.stackY = yValue;
// Place the stack label
pointStack.setOffset(series.pointXOffset || 0, series.barW || 0);
}
// Set translated yBottom or remove it
point.yBottom = defined(yBottom) ?
yAxis.translate(yBottom, 0, 1, 0, 1) :
null;
// general hook, used for Highstock compare mode
if (hasModifyValue) {
yValue = series.modifyValue(yValue, point);
}
// Set the the plotY value, reset it for redraws
point.plotY = plotY = (typeof yValue === 'number' && yValue !== Infinity) ?
Math.min(Math.max(-1e5, yAxis.translate(yValue, 0, 1, 0, 1)), 1e5) : // #3201
undefined;
point.isInside = plotY !== undefined && plotY >= 0 && plotY <= yAxis.len && // #3519
plotX >= 0 && plotX <= xAxis.len;
// Set client related positions for mouse tracking
point.clientX = dynamicallyPlaced ? correctFloat(xAxis.translate(xValue, 0, 0, 0, 1, pointPlacement)) : plotX; // #1514, #5383, #5518
point.negative = point.y < (threshold || 0);
// some API data
point.category = categories && categories[point.x] !== undefined ?
categories[point.x] : point.x;
// Determine auto enabling of markers (#3635, #5099)
if (!point.isNull) {
if (lastPlotX !== undefined) {
closestPointRangePx = Math.min(closestPointRangePx, Math.abs(plotX - lastPlotX));
}
lastPlotX = plotX;
}
}
series.closestPointRangePx = closestPointRangePx;
},
/**
* Return the series points with null points filtered out
*/
getValidPoints: function(points, insideOnly) {
var chart = this.chart;
return grep(points || this.points || [], function isValidPoint(point) { // #3916, #5029
if (insideOnly && !chart.isInsidePlot(point.plotX, point.plotY, chart.inverted)) { // #5085
return false;
}
return !point.isNull;
});
},
/**
* Set the clipping for the series. For animated series it is called twice, first to initiate
* animating the clip then the second time without the animation to set the final clip.
*/
setClip: function(animation) {
var chart = this.chart,
options = this.options,
renderer = chart.renderer,
inverted = chart.inverted,
seriesClipBox = this.clipBox,
clipBox = seriesClipBox || chart.clipBox,
sharedClipKey = this.sharedClipKey || ['_sharedClip', animation && animation.duration, animation && animation.easing, clipBox.height, options.xAxis, options.yAxis].join(','), // #4526
clipRect = chart[sharedClipKey],
markerClipRect = chart[sharedClipKey + 'm'];
// If a clipping rectangle with the same properties is currently present in the chart, use that.
if (!clipRect) {
// When animation is set, prepare the initial positions
if (animation) {
clipBox.width = 0;
chart[sharedClipKey + 'm'] = markerClipRect = renderer.clipRect(-99, // include the width of the first marker
inverted ? -chart.plotLeft : -chart.plotTop,
99,
inverted ? chart.chartWidth : chart.chartHeight
);
}
chart[sharedClipKey] = clipRect = renderer.clipRect(clipBox);
// Create hashmap for series indexes
clipRect.count = {
length: 0
};
}
if (animation) {
if (!clipRect.count[this.index]) {
clipRect.count[this.index] = true;
clipRect.count.length += 1;
}
}
if (options.clip !== false) {
this.group.clip(animation || seriesClipBox ? clipRect : chart.clipRect);
this.markerGroup.clip(markerClipRect);
this.sharedClipKey = sharedClipKey;
}
// Remove the shared clipping rectangle when all series are shown
if (!animation) {
if (clipRect.count[this.index]) {
delete clipRect.count[this.index];
clipRect.count.length -= 1;
}
if (clipRect.count.length === 0 && sharedClipKey && chart[sharedClipKey]) {
if (!seriesClipBox) {
chart[sharedClipKey] = chart[sharedClipKey].destroy();
}
if (chart[sharedClipKey + 'm']) {
chart[sharedClipKey + 'm'] = chart[sharedClipKey + 'm'].destroy();
}
}
}
},
/**
* Animate in the series
*/
animate: function(init) {
var series = this,
chart = series.chart,
clipRect,
animation = animObject(series.options.animation),
sharedClipKey;
// Initialize the animation. Set up the clipping rectangle.
if (init) {
series.setClip(animation);
// Run the animation
} else {
sharedClipKey = this.sharedClipKey;
clipRect = chart[sharedClipKey];
if (clipRect) {
clipRect.animate({
width: chart.plotSizeX
}, animation);
}
if (chart[sharedClipKey + 'm']) {
chart[sharedClipKey + 'm'].animate({
width: chart.plotSizeX + 99
}, animation);
}
// Delete this function to allow it only once
series.animate = null;
}
},
/**
* This runs after animation to land on the final plot clipping
*/
afterAnimate: function() {
this.setClip();
fireEvent(this, 'afterAnimate');
},
/**
* Draw the markers
*/
drawPoints: function() {
var series = this,
points = series.points,
chart = series.chart,
plotY,
i,
point,
symbol,
graphic,
options = series.options,
seriesMarkerOptions = options.marker,
pointMarkerOptions,
hasPointMarker,
enabled,
isInside,
markerGroup = series.markerGroup,
xAxis = series.xAxis,
markerAttribs,
globallyEnabled = pick(
seriesMarkerOptions.enabled,
xAxis.isRadial ? true : null,
series.closestPointRangePx > 2 * seriesMarkerOptions.radius
);
if (seriesMarkerOptions.enabled !== false || series._hasPointMarkers) {
i = points.length;
while (i--) {
point = points[i];
plotY = point.plotY;
graphic = point.graphic;
pointMarkerOptions = point.marker || {};
hasPointMarker = !!point.marker;
enabled = (globallyEnabled && pointMarkerOptions.enabled === undefined) || pointMarkerOptions.enabled;
isInside = point.isInside;
// only draw the point if y is defined
if (enabled && isNumber(plotY) && point.y !== null) {
// Shortcuts
symbol = pick(pointMarkerOptions.symbol, series.symbol);
point.hasImage = symbol.indexOf('url') === 0;
markerAttribs = series.markerAttribs(
point,
point.selected && 'select'
);
if (graphic) { // update
graphic[isInside ? 'show' : 'hide'](true) // Since the marker group isn't clipped, each individual marker must be toggled
.animate(markerAttribs);
} else if (isInside && (markerAttribs.width > 0 || point.hasImage)) {
point.graphic = graphic = chart.renderer.symbol(
symbol,
markerAttribs.x,
markerAttribs.y,
markerAttribs.width,
markerAttribs.height,
hasPointMarker ? pointMarkerOptions : seriesMarkerOptions
)
.add(markerGroup);
}
if (graphic) {
graphic.addClass(point.getClassName(), true);
}
} else if (graphic) {
point.graphic = graphic.destroy(); // #1269
}
}
}
},
/**
* Get non-presentational attributes for the point.
*/
markerAttribs: function(point, state) {
var seriesMarkerOptions = this.options.marker,
seriesStateOptions,
pointOptions = point && point.options,
pointMarkerOptions = (pointOptions && pointOptions.marker) || {},
pointStateOptions,
radius = pick(
pointMarkerOptions.radius,
seriesMarkerOptions.radius
),
attribs;
// Handle hover and select states
if (state) {
seriesStateOptions = seriesMarkerOptions.states[state];
pointStateOptions = pointMarkerOptions.states &&
pointMarkerOptions.states[state];
radius = pick(
pointStateOptions && pointStateOptions.radius,
seriesStateOptions && seriesStateOptions.radius,
radius + (seriesStateOptions && seriesStateOptions.radiusPlus || 0)
);
}
if (point.hasImage) {
radius = 0; // and subsequently width and height is not set
}
attribs = {
x: Math.floor(point.plotX) - radius, // Math.floor for #1843
y: point.plotY - radius
};
if (radius) {
attribs.width = attribs.height = 2 * radius;
}
return attribs;
},
/**
* Clear DOM objects and free up memory
*/
destroy: function() {
var series = this,
chart = series.chart,
issue134 = /AppleWebKit\/533/.test(win.navigator.userAgent),
destroy,
i,
data = series.data || [],
point,
prop,
axis;
// add event hook
fireEvent(series, 'destroy');
// remove all events
removeEvent(series);
// erase from axes
each(series.axisTypes || [], function(AXIS) {
axis = series[AXIS];
if (axis && axis.series) {
erase(axis.series, series);
axis.isDirty = axis.forceRedraw = true;
}
});
// remove legend items
if (series.legendItem) {
series.chart.legend.destroyItem(series);
}
// destroy all points with their elements
i = data.length;
while (i--) {
point = data[i];
if (point && point.destroy) {
point.destroy();
}
}
series.points = null;
// Clear the animation timeout if we are destroying the series during initial animation
clearTimeout(series.animationTimeout);
// Destroy all SVGElements associated to the series
for (prop in series) {
if (series[prop] instanceof SVGElement && !series[prop].survive) { // Survive provides a hook for not destroying
// issue 134 workaround
destroy = issue134 && prop === 'group' ?
'hide' :
'destroy';
series[prop][destroy]();
}
}
// remove from hoverSeries
if (chart.hoverSeries === series) {
chart.hoverSeries = null;
}
erase(chart.series, series);
// clear all members
for (prop in series) {
delete series[prop];
}
},
/**
* Get the graph path
*/
getGraphPath: function(points, nullsAsZeroes, connectCliffs) {
var series = this,
options = series.options,
step = options.step,
reversed,
graphPath = [],
xMap = [],
gap;
points = points || series.points;
// Bottom of a stack is reversed
reversed = points.reversed;
if (reversed) {
points.reverse();
}
// Reverse the steps (#5004)
step = {
right: 1,
center: 2
}[step] || (step && 3);
if (step && reversed) {
step = 4 - step;
}
// Remove invalid points, especially in spline (#5015)
if (options.connectNulls && !nullsAsZeroes && !connectCliffs) {
points = this.getValidPoints(points);
}
// Build the line
each(points, function(point, i) {
var plotX = point.plotX,
plotY = point.plotY,
lastPoint = points[i - 1],
pathToPoint; // the path to this point from the previous
if ((point.leftCliff || (lastPoint && lastPoint.rightCliff)) && !connectCliffs) {
gap = true; // ... and continue
}
// Line series, nullsAsZeroes is not handled
if (point.isNull && !defined(nullsAsZeroes) && i > 0) {
gap = !options.connectNulls;
// Area series, nullsAsZeroes is set
} else if (point.isNull && !nullsAsZeroes) {
gap = true;
} else {
if (i === 0 || gap) {
pathToPoint = ['M', point.plotX, point.plotY];
} else if (series.getPointSpline) { // generate the spline as defined in the SplineSeries object
pathToPoint = series.getPointSpline(points, point, i);
} else if (step) {
if (step === 1) { // right
pathToPoint = [
'L',
lastPoint.plotX,
plotY
];
} else if (step === 2) { // center
pathToPoint = [
'L',
(lastPoint.plotX + plotX) / 2,
lastPoint.plotY,
'L',
(lastPoint.plotX + plotX) / 2,
plotY
];
} else {
pathToPoint = [
'L',
plotX,
lastPoint.plotY
];
}
pathToPoint.push('L', plotX, plotY);
} else {
// normal line to next point
pathToPoint = [
'L',
plotX,
plotY
];
}
// Prepare for animation. When step is enabled, there are two path nodes for each x value.
xMap.push(point.x);
if (step) {
xMap.push(point.x);
}
graphPath.push.apply(graphPath, pathToPoint);
gap = false;
}
});
graphPath.xMap = xMap;
series.graphPath = graphPath;
return graphPath;
},
/**
* Draw the actual graph
*/
drawGraph: function() {
var series = this,
options = this.options,
graphPath = (this.gappedPath || this.getGraphPath).call(this),
props = [
[
'graph',
'highcharts-graph'
]
];
// Add the zone properties if any
each(this.zones, function(zone, i) {
props.push([
'zone-graph-' + i,
'highcharts-graph highcharts-zone-graph-' + i + ' ' + (zone.className || '')
]);
});
// Draw the graph
each(props, function(prop, i) {
var graphKey = prop[0],
graph = series[graphKey],
attribs;
if (graph) {
graph.endX = graphPath.xMap;
graph.animate({
d: graphPath
});
} else if (graphPath.length) { // #1487
series[graphKey] = series.chart.renderer.path(graphPath)
.addClass(prop[1])
.attr({
zIndex: 1
}) // #1069
.add(series.group);
}
// Helpers for animation
if (graph) {
graph.startX = graphPath.xMap;
//graph.shiftUnit = options.step ? 2 : 1;
graph.isArea = graphPath.isArea; // For arearange animation
}
});
},
/**
* Clip the graphs into the positive and negative coloured graphs
*/
applyZones: function() {
var series = this,
chart = this.chart,
renderer = chart.renderer,
zones = this.zones,
translatedFrom,
translatedTo,
clips = this.clips || [],
clipAttr,
graph = this.graph,
area = this.area,
chartSizeMax = Math.max(chart.chartWidth, chart.chartHeight),
axis = this[(this.zoneAxis || 'y') + 'Axis'],
extremes,
reversed,
inverted = chart.inverted,
horiz,
pxRange,
pxPosMin,
pxPosMax,
ignoreZones = false;
if (zones.length && (graph || area) && axis && axis.min !== undefined) {
reversed = axis.reversed;
horiz = axis.horiz;
// The use of the Color Threshold assumes there are no gaps
// so it is safe to hide the original graph and area
if (graph) {
graph.hide();
}
if (area) {
area.hide();
}
// Create the clips
extremes = axis.getExtremes();
each(zones, function(threshold, i) {
translatedFrom = reversed ?
(horiz ? chart.plotWidth : 0) :
(horiz ? 0 : axis.toPixels(extremes.min));
translatedFrom = Math.min(Math.max(pick(translatedTo, translatedFrom), 0), chartSizeMax);
translatedTo = Math.min(Math.max(Math.round(axis.toPixels(pick(threshold.value, extremes.max), true)), 0), chartSizeMax);
if (ignoreZones) {
translatedFrom = translatedTo = axis.toPixels(extremes.max);
}
pxRange = Math.abs(translatedFrom - translatedTo);
pxPosMin = Math.min(translatedFrom, translatedTo);
pxPosMax = Math.max(translatedFrom, translatedTo);
if (axis.isXAxis) {
clipAttr = {
x: inverted ? pxPosMax : pxPosMin,
y: 0,
width: pxRange,
height: chartSizeMax
};
if (!horiz) {
clipAttr.x = chart.plotHeight - clipAttr.x;
}
} else {
clipAttr = {
x: 0,
y: inverted ? pxPosMax : pxPosMin,
width: chartSizeMax,
height: pxRange
};
if (horiz) {
clipAttr.y = chart.plotWidth - clipAttr.y;
}
}
if (clips[i]) {
clips[i].animate(clipAttr);
} else {
clips[i] = renderer.clipRect(clipAttr);
if (graph) {
series['zone-graph-' + i].clip(clips[i]);
}
if (area) {
series['zone-area-' + i].clip(clips[i]);
}
}
// if this zone extends out of the axis, ignore the others
ignoreZones = threshold.value > extremes.max;
});
this.clips = clips;
}
},
/**
* Initialize and perform group inversion on series.group and series.markerGroup
*/
invertGroups: function(inverted) {
var series = this,
chart = series.chart;
// Pie, go away (#1736)
if (!series.xAxis) {
return;
}
// A fixed size is needed for inversion to work
function setInvert() {
var size = {
width: series.yAxis.len,
height: series.xAxis.len
};
each(['group', 'markerGroup'], function(groupName) {
if (series[groupName]) {
series[groupName].attr(size).invert(inverted);
}
});
}
addEvent(chart, 'resize', setInvert); // do it on resize
addEvent(series, 'destroy', function() {
removeEvent(chart, 'resize', setInvert);
});
// Do it now
setInvert(inverted); // do it now
// On subsequent render and redraw, just do setInvert without setting up events again
series.invertGroups = setInvert;
},
/**
* General abstraction for creating plot groups like series.group, series.dataLabelsGroup and
* series.markerGroup. On subsequent calls, the group will only be adjusted to the updated plot size.
*/
plotGroup: function(prop, name, visibility, zIndex, parent) {
var group = this[prop],
isNew = !group;
// Generate it on first call
if (isNew) {
this[prop] = group = this.chart.renderer.g(name)
.attr({
zIndex: zIndex || 0.1 // IE8 and pointer logic use this
})
.add(parent);
group.addClass('highcharts-series-' + this.index + ' highcharts-' + this.type + '-series highcharts-color-' + this.colorIndex +
' ' + (this.options.className || ''));
}
// Place it on first and subsequent (redraw) calls
group.attr({
visibility: visibility
})[isNew ? 'attr' : 'animate'](this.getPlotBox());
return group;
},
/**
* Get the translation and scale for the plot area of this series
*/
getPlotBox: function() {
var chart = this.chart,
xAxis = this.xAxis,
yAxis = this.yAxis;
// Swap axes for inverted (#2339)
if (chart.inverted) {
xAxis = yAxis;
yAxis = this.xAxis;
}
return {
translateX: xAxis ? xAxis.left : chart.plotLeft,
translateY: yAxis ? yAxis.top : chart.plotTop,
scaleX: 1, // #1623
scaleY: 1
};
},
/**
* Render the graph and markers
*/
render: function() {
var series = this,
chart = series.chart,
group,
options = series.options,
// Animation doesn't work in IE8 quirks when the group div is hidden,
// and looks bad in other oldIE
animDuration = !!series.animate && chart.renderer.isSVG && animObject(options.animation).duration,
visibility = series.visible ? 'inherit' : 'hidden', // #2597
zIndex = options.zIndex,
hasRendered = series.hasRendered,
chartSeriesGroup = chart.seriesGroup,
inverted = chart.inverted;
// the group
group = series.plotGroup(
'group',
'series',
visibility,
zIndex,
chartSeriesGroup
);
series.markerGroup = series.plotGroup(
'markerGroup',
'markers',
visibility,
zIndex,
chartSeriesGroup
);
// initiate the animation
if (animDuration) {
series.animate(true);
}
// SVGRenderer needs to know this before drawing elements (#1089, #1795)
group.inverted = series.isCartesian ? inverted : false;
// draw the graph if any
if (series.drawGraph) {
series.drawGraph();
series.applyZones();
}
/* each(series.points, function (point) {
if (point.redraw) {
point.redraw();
}
});*/
// draw the data labels (inn pies they go before the points)
if (series.drawDataLabels) {
series.drawDataLabels();
}
// draw the points
if (series.visible) {
series.drawPoints();
}
// draw the mouse tracking area
if (series.drawTracker && series.options.enableMouseTracking !== false) {
series.drawTracker();
}
// Handle inverted series and tracker groups
series.invertGroups(inverted);
// Initial clipping, must be defined after inverting groups for VML. Applies to columns etc. (#3839).
if (options.clip !== false && !series.sharedClipKey && !hasRendered) {
group.clip(chart.clipRect);
}
// Run the animation
if (animDuration) {
series.animate();
}
// Call the afterAnimate function on animation complete (but don't overwrite the animation.complete option
// which should be available to the user).
if (!hasRendered) {
series.animationTimeout = syncTimeout(function() {
series.afterAnimate();
}, animDuration);
}
series.isDirty = series.isDirtyData = false; // means data is in accordance with what you see
// (See #322) series.isDirty = series.isDirtyData = false; // means data is in accordance with what you see
series.hasRendered = true;
},
/**
* Redraw the series after an update in the axes.
*/
redraw: function() {
var series = this,
chart = series.chart,
wasDirty = series.isDirty || series.isDirtyData, // cache it here as it is set to false in render, but used after
group = series.group,
xAxis = series.xAxis,
yAxis = series.yAxis;
// reposition on resize
if (group) {
if (chart.inverted) {
group.attr({
width: chart.plotWidth,
height: chart.plotHeight
});
}
group.animate({
translateX: pick(xAxis && xAxis.left, chart.plotLeft),
translateY: pick(yAxis && yAxis.top, chart.plotTop)
});
}
series.translate();
series.render();
if (wasDirty) { // #3868, #3945
delete this.kdTree;
}
},
/**
* KD Tree && PointSearching Implementation
*/
kdDimensions: 1,
kdAxisArray: ['clientX', 'plotY'],
searchPoint: function(e, compareX) {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
inverted = series.chart.inverted;
return this.searchKDTree({
clientX: inverted ? xAxis.len - e.chartY + xAxis.pos : e.chartX - xAxis.pos,
plotY: inverted ? yAxis.len - e.chartX + yAxis.pos : e.chartY - yAxis.pos
}, compareX);
},
buildKDTree: function() {
var series = this,
dimensions = series.kdDimensions;
// Internal function
function _kdtree(points, depth, dimensions) {
var axis,
median,
length = points && points.length;
if (length) {
// alternate between the axis
axis = series.kdAxisArray[depth % dimensions];
// sort point array
points.sort(function(a, b) {
return a[axis] - b[axis];
});
median = Math.floor(length / 2);
// build and return nod
return {
point: points[median],
left: _kdtree(points.slice(0, median), depth + 1, dimensions),
right: _kdtree(points.slice(median + 1), depth + 1, dimensions)
};
}
}
// Start the recursive build process with a clone of the points array and null points filtered out (#3873)
function startRecursive() {
series.kdTree = _kdtree(
series.getValidPoints(
null, !series.directTouch // For line-type series restrict to plot area, but column-type series not (#3916, #4511)
),
dimensions,
dimensions
);
}
delete series.kdTree;
// For testing tooltips, don't build async
syncTimeout(startRecursive, series.options.kdNow ? 0 : 1);
},
searchKDTree: function(point, compareX) {
var series = this,
kdX = this.kdAxisArray[0],
kdY = this.kdAxisArray[1],
kdComparer = compareX ? 'distX' : 'dist';
// Set the one and two dimensional distance on the point object
function setDistance(p1, p2) {
var x = (defined(p1[kdX]) && defined(p2[kdX])) ? Math.pow(p1[kdX] - p2[kdX], 2) : null,
y = (defined(p1[kdY]) && defined(p2[kdY])) ? Math.pow(p1[kdY] - p2[kdY], 2) : null,
r = (x || 0) + (y || 0);
p2.dist = defined(r) ? Math.sqrt(r) : Number.MAX_VALUE;
p2.distX = defined(x) ? Math.sqrt(x) : Number.MAX_VALUE;
}
function _search(search, tree, depth, dimensions) {
var point = tree.point,
axis = series.kdAxisArray[depth % dimensions],
tdist,
sideA,
sideB,
ret = point,
nPoint1,
nPoint2;
setDistance(search, point);
// Pick side based on distance to splitting point
tdist = search[axis] - point[axis];
sideA = tdist < 0 ? 'left' : 'right';
sideB = tdist < 0 ? 'right' : 'left';
// End of tree
if (tree[sideA]) {
nPoint1 = _search(search, tree[sideA], depth + 1, dimensions);
ret = (nPoint1[kdComparer] < ret[kdComparer] ? nPoint1 : point);
}
if (tree[sideB]) {
// compare distance to current best to splitting point to decide wether to check side B or not
if (Math.sqrt(tdist * tdist) < ret[kdComparer]) {
nPoint2 = _search(search, tree[sideB], depth + 1, dimensions);
ret = (nPoint2[kdComparer] < ret[kdComparer] ? nPoint2 : ret);
}
}
return ret;
}
if (!this.kdTree) {
this.buildKDTree();
}
if (this.kdTree) {
return _search(point,
this.kdTree, this.kdDimensions, this.kdDimensions);
}
}
}); // end Series prototype
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
animate = H.animate,
Axis = H.Axis,
Chart = H.Chart,
createElement = H.createElement,
css = H.css,
defined = H.defined,
each = H.each,
erase = H.erase,
extend = H.extend,
fireEvent = H.fireEvent,
inArray = H.inArray,
isNumber = H.isNumber,
isObject = H.isObject,
merge = H.merge,
pick = H.pick,
Point = H.Point,
Series = H.Series,
seriesTypes = H.seriesTypes,
setAnimation = H.setAnimation,
splat = H.splat;
// Extend the Chart prototype for dynamic methods
extend(Chart.prototype, {
/**
* Add a series dynamically after time
*
* @param {Object} options The config options
* @param {Boolean} redraw Whether to redraw the chart after adding. Defaults to true.
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*
* @return {Object} series The newly created series object
*/
addSeries: function(options, redraw, animation) {
var series,
chart = this;
if (options) {
redraw = pick(redraw, true); // defaults to true
fireEvent(chart, 'addSeries', {
options: options
}, function() {
series = chart.initSeries(options);
chart.isDirtyLegend = true; // the series array is out of sync with the display
chart.linkSeries();
if (redraw) {
chart.redraw(animation);
}
});
}
return series;
},
/**
* Add an axis to the chart
* @param {Object} options The axis option
* @param {Boolean} isX Whether it is an X axis or a value axis
*/
addAxis: function(options, isX, redraw, animation) {
var key = isX ? 'xAxis' : 'yAxis',
chartOptions = this.options,
userOptions = merge(options, {
index: this[key].length,
isX: isX
});
new Axis(this, userOptions); // eslint-disable-line no-new
// Push the new axis options to the chart options
chartOptions[key] = splat(chartOptions[key] || {});
chartOptions[key].push(userOptions);
if (pick(redraw, true)) {
this.redraw(animation);
}
},
/**
* Dim the chart and show a loading text or symbol
* @param {String} str An optional text to show in the loading label instead of the default one
*/
showLoading: function(str) {
var chart = this,
options = chart.options,
loadingDiv = chart.loadingDiv,
loadingOptions = options.loading,
setLoadingSize = function() {
if (loadingDiv) {
css(loadingDiv, {
left: chart.plotLeft + 'px',
top: chart.plotTop + 'px',
width: chart.plotWidth + 'px',
height: chart.plotHeight + 'px'
});
}
};
// create the layer at the first call
if (!loadingDiv) {
chart.loadingDiv = loadingDiv = createElement('div', {
className: 'highcharts-loading highcharts-loading-hidden'
}, null, chart.container);
chart.loadingSpan = createElement(
'span', {
className: 'highcharts-loading-inner'
},
null,
loadingDiv
);
addEvent(chart, 'redraw', setLoadingSize); // #1080
}
loadingDiv.className = 'highcharts-loading';
// Update text
chart.loadingSpan.innerHTML = str || options.lang.loading;
chart.loadingShown = true;
setLoadingSize();
},
/**
* Hide the loading layer
*/
hideLoading: function() {
var options = this.options,
loadingDiv = this.loadingDiv;
if (loadingDiv) {
loadingDiv.className = 'highcharts-loading highcharts-loading-hidden';
}
this.loadingShown = false;
},
/**
* These properties cause isDirtyBox to be set to true when updating. Can be extended from plugins.
*/
propsRequireDirtyBox: ['backgroundColor', 'borderColor', 'borderWidth', 'margin', 'marginTop', 'marginRight',
'marginBottom', 'marginLeft', 'spacing', 'spacingTop', 'spacingRight', 'spacingBottom', 'spacingLeft',
'borderRadius', 'plotBackgroundColor', 'plotBackgroundImage', 'plotBorderColor', 'plotBorderWidth',
'plotShadow', 'shadow'
],
/**
* These properties cause all series to be updated when updating. Can be extended from plugins.
*/
propsRequireUpdateSeries: ['chart.polar', 'chart.ignoreHiddenSeries', 'chart.type', 'colors', 'plotOptions'],
/**
* Chart.update function that takes the whole options stucture.
*/
update: function(options, redraw) {
var key,
adders = {
credits: 'addCredits',
title: 'setTitle',
subtitle: 'setSubtitle'
},
optionsChart = options.chart,
updateAllAxes,
updateAllSeries,
newWidth,
newHeight;
// If the top-level chart option is present, some special updates are required
if (optionsChart) {
merge(true, this.options.chart, optionsChart);
// Setter function
if ('className' in optionsChart) {
this.setClassName(optionsChart.className);
}
if ('inverted' in optionsChart || 'polar' in optionsChart) {
this.propFromSeries(); // Parses options.chart.inverted and options.chart.polar together with the available series
updateAllAxes = true;
}
for (key in optionsChart) {
if (optionsChart.hasOwnProperty(key)) {
if (inArray('chart.' + key, this.propsRequireUpdateSeries) !== -1) {
updateAllSeries = true;
}
// Only dirty box
if (inArray(key, this.propsRequireDirtyBox) !== -1) {
this.isDirtyBox = true;
}
}
}
}
// Some option stuctures correspond one-to-one to chart objects that have
// update methods, for example
// options.credits => chart.credits
// options.legend => chart.legend
// options.title => chart.title
// options.tooltip => chart.tooltip
// options.subtitle => chart.subtitle
// options.navigator => chart.navigator
// options.scrollbar => chart.scrollbar
for (key in options) {
if (this[key] && typeof this[key].update === 'function') {
this[key].update(options[key], false);
// If a one-to-one object does not exist, look for an adder function
} else if (typeof this[adders[key]] === 'function') {
this[adders[key]](options[key]);
}
if (key !== 'chart' && inArray(key, this.propsRequireUpdateSeries) !== -1) {
updateAllSeries = true;
}
}
if (options.plotOptions) {
merge(true, this.options.plotOptions, options.plotOptions);
}
// Setters for collections. For axes and series, each item is referred by an id. If the
// id is not found, it defaults to the first item in the collection, so setting series
// without an id, will update the first series in the chart.
each(['xAxis', 'yAxis', 'series'], function(coll) {
if (options[coll]) {
each(splat(options[coll]), function(newOptions) {
var item = (defined(newOptions.id) && this.get(newOptions.id)) || this[coll][0];
if (item && item.coll === coll) {
item.update(newOptions, false);
}
}, this);
}
}, this);
if (updateAllAxes) {
each(this.axes, function(axis) {
axis.update({}, false);
});
}
// Certain options require the whole series structure to be thrown away
// and rebuilt
if (updateAllSeries) {
each(this.series, function(series) {
series.update({}, false);
});
}
// For loading, just update the options, do not redraw
if (options.loading) {
merge(true, this.options.loading, options.loading);
}
// Update size. Redraw is forced.
newWidth = optionsChart && optionsChart.width;
newHeight = optionsChart && optionsChart.height;
if ((isNumber(newWidth) && newWidth !== this.chartWidth) ||
(isNumber(newHeight) && newHeight !== this.chartHeight)) {
this.setSize(newWidth, newHeight);
} else if (pick(redraw, true)) {
this.redraw();
}
},
/**
* Setter function to allow use from chart.update
*/
setSubtitle: function(options) {
this.setTitle(undefined, options);
}
});
// extend the Point prototype for dynamic methods
extend(Point.prototype, {
/**
* Point.update with new options (typically x/y data) and optionally redraw the series.
*
* @param {Object} options Point options as defined in the series.data array
* @param {Boolean} redraw Whether to redraw the chart or wait for an explicit call
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*/
update: function(options, redraw, animation, runEvent) {
var point = this,
series = point.series,
graphic = point.graphic,
i,
chart = series.chart,
seriesOptions = series.options;
redraw = pick(redraw, true);
function update() {
point.applyOptions(options);
// Update visuals
if (point.y === null && graphic) { // #4146
point.graphic = graphic.destroy();
}
if (isObject(options, true)) {
// Destroy so we can get new elements
if (graphic && graphic.element) {
if (options && options.marker && options.marker.symbol) {
point.graphic = graphic.destroy();
}
}
if (options && options.dataLabels && point.dataLabel) { // #2468
point.dataLabel = point.dataLabel.destroy();
}
}
// record changes in the parallel arrays
i = point.index;
series.updateParallelArrays(point, i);
// Record the options to options.data. If there is an object from before,
// use point options, otherwise use raw options. (#4701)
seriesOptions.data[i] = isObject(seriesOptions.data[i], true) ? point.options : options;
// redraw
series.isDirty = series.isDirtyData = true;
if (!series.fixedBox && series.hasCartesianSeries) { // #1906, #2320
chart.isDirtyBox = true;
}
if (seriesOptions.legendType === 'point') { // #1831, #1885
chart.isDirtyLegend = true;
}
if (redraw) {
chart.redraw(animation);
}
}
// Fire the event with a default handler of doing the update
if (runEvent === false) { // When called from setData
update();
} else {
point.firePointEvent('update', {
options: options
}, update);
}
},
/**
* Remove a point and optionally redraw the series and if necessary the axes
* @param {Boolean} redraw Whether to redraw the chart or wait for an explicit call
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*/
remove: function(redraw, animation) {
this.series.removePoint(inArray(this, this.series.data), redraw, animation);
}
});
// Extend the series prototype for dynamic methods
extend(Series.prototype, {
/**
* Add a point dynamically after chart load time
* @param {Object} options Point options as given in series.data
* @param {Boolean} redraw Whether to redraw the chart or wait for an explicit call
* @param {Boolean} shift If shift is true, a point is shifted off the start
* of the series as one is appended to the end.
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*/
addPoint: function(options, redraw, shift, animation) {
var series = this,
seriesOptions = series.options,
data = series.data,
chart = series.chart,
names = series.xAxis && series.xAxis.names,
dataOptions = seriesOptions.data,
point,
isInTheMiddle,
xData = series.xData,
i,
x;
// Optional redraw, defaults to true
redraw = pick(redraw, true);
// Get options and push the point to xData, yData and series.options. In series.generatePoints
// the Point instance will be created on demand and pushed to the series.data array.
point = {
series: series
};
series.pointClass.prototype.applyOptions.apply(point, [options]);
x = point.x;
// Get the insertion point
i = xData.length;
if (series.requireSorting && x < xData[i - 1]) {
isInTheMiddle = true;
while (i && xData[i - 1] > x) {
i--;
}
}
series.updateParallelArrays(point, 'splice', i, 0, 0); // insert undefined item
series.updateParallelArrays(point, i); // update it
if (names && point.name) {
names[x] = point.name;
}
dataOptions.splice(i, 0, options);
if (isInTheMiddle) {
series.data.splice(i, 0, null);
series.processData();
}
// Generate points to be added to the legend (#1329)
if (seriesOptions.legendType === 'point') {
series.generatePoints();
}
// Shift the first point off the parallel arrays
if (shift) {
if (data[0] && data[0].remove) {
data[0].remove(false);
} else {
data.shift();
series.updateParallelArrays(point, 'shift');
dataOptions.shift();
}
}
// redraw
series.isDirty = true;
series.isDirtyData = true;
if (redraw) {
chart.redraw(animation); // Animation is set anyway on redraw, #5665
}
},
/**
* Remove a point (rendered or not), by index
*/
removePoint: function(i, redraw, animation) {
var series = this,
data = series.data,
point = data[i],
points = series.points,
chart = series.chart,
remove = function() {
if (points && points.length === data.length) { // #4935
points.splice(i, 1);
}
data.splice(i, 1);
series.options.data.splice(i, 1);
series.updateParallelArrays(point || {
series: series
}, 'splice', i, 1);
if (point) {
point.destroy();
}
// redraw
series.isDirty = true;
series.isDirtyData = true;
if (redraw) {
chart.redraw();
}
};
setAnimation(animation, chart);
redraw = pick(redraw, true);
// Fire the event with a default handler of removing the point
if (point) {
point.firePointEvent('remove', null, remove);
} else {
remove();
}
},
/**
* Remove a series and optionally redraw the chart
*
* @param {Boolean} redraw Whether to redraw the chart or wait for an explicit call
* @param {Boolean|Object} animation Whether to apply animation, and optionally animation
* configuration
*/
remove: function(redraw, animation, withEvent) {
var series = this,
chart = series.chart;
function remove() {
// Destroy elements
series.destroy();
// Redraw
chart.isDirtyLegend = chart.isDirtyBox = true;
chart.linkSeries();
if (pick(redraw, true)) {
chart.redraw(animation);
}
}
// Fire the event with a default handler of removing the point
if (withEvent !== false) {
fireEvent(series, 'remove', null, remove);
} else {
remove();
}
},
/**
* Series.update with a new set of options
*/
update: function(newOptions, redraw) {
var series = this,
chart = this.chart,
// must use user options when changing type because this.options is merged
// in with type specific plotOptions
oldOptions = this.userOptions,
oldType = this.type,
newType = newOptions.type || oldOptions.type || chart.options.chart.type,
proto = seriesTypes[oldType].prototype,
preserve = ['group', 'markerGroup', 'dataLabelsGroup'],
n;
// If we're changing type or zIndex, create new groups (#3380, #3404)
if ((newType && newType !== oldType) || newOptions.zIndex !== undefined) {
preserve.length = 0;
}
// Make sure groups are not destroyed (#3094)
each(preserve, function(prop) {
preserve[prop] = series[prop];
delete series[prop];
});
// Do the merge, with some forced options
newOptions = merge(oldOptions, {
animation: false,
index: this.index,
pointStart: this.xData[0] // when updating after addPoint
}, {
data: this.options.data
}, newOptions);
// Destroy the series and delete all properties. Reinsert all methods
// and properties from the new type prototype (#2270, #3719)
this.remove(false, null, false);
for (n in proto) {
this[n] = undefined;
}
extend(this, seriesTypes[newType || oldType].prototype);
// Re-register groups (#3094)
each(preserve, function(prop) {
series[prop] = preserve[prop];
});
this.init(chart, newOptions);
chart.linkSeries(); // Links are lost in this.remove (#3028)
if (pick(redraw, true)) {
chart.redraw(false);
}
}
});
// Extend the Axis.prototype for dynamic methods
extend(Axis.prototype, {
/**
* Axis.update with a new options structure
*/
update: function(newOptions, redraw) {
var chart = this.chart;
newOptions = chart.options[this.coll][this.options.index] = merge(this.userOptions, newOptions);
this.destroy(true);
this.init(chart, extend(newOptions, {
events: undefined
}));
chart.isDirtyBox = true;
if (pick(redraw, true)) {
chart.redraw();
}
},
/**
* Remove the axis from the chart
*/
remove: function(redraw) {
var chart = this.chart,
key = this.coll, // xAxis or yAxis
axisSeries = this.series,
i = axisSeries.length;
// Remove associated series (#2687)
while (i--) {
if (axisSeries[i]) {
axisSeries[i].remove(false);
}
}
// Remove the axis
erase(chart.axes, this);
erase(chart[key], this);
chart.options[key].splice(this.options.index, 1);
each(chart[key], function(axis, i) { // Re-index, #1706
axis.options.index = i;
});
this.destroy();
chart.isDirtyBox = true;
if (pick(redraw, true)) {
chart.redraw();
}
},
/**
* Update the axis title by options
*/
setTitle: function(newTitleOptions, redraw) {
this.update({
title: newTitleOptions
}, redraw);
},
/**
* Set new axis categories and optionally redraw
* @param {Array} categories
* @param {Boolean} redraw
*/
setCategories: function(categories, redraw) {
this.update({
categories: categories
}, redraw);
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var animObject = H.animObject,
color = H.color,
each = H.each,
extend = H.extend,
isNumber = H.isNumber,
LegendSymbolMixin = H.LegendSymbolMixin,
merge = H.merge,
noop = H.noop,
pick = H.pick,
Series = H.Series,
seriesType = H.seriesType,
stop = H.stop,
svg = H.svg;
/**
* The column series type
*/
seriesType('column', 'line', {
borderRadius: 0,
//colorByPoint: undefined,
groupPadding: 0.2,
//grouping: true,
marker: null, // point options are specified in the base options
pointPadding: 0.1,
//pointWidth: null,
minPointLength: 0,
cropThreshold: 50, // when there are more points, they will not animate out of the chart on xAxis.setExtremes
pointRange: null, // null means auto, meaning 1 in a categorized axis and least distance between points if not categories
states: {
hover: {
halo: false
}
},
dataLabels: {
align: null, // auto
verticalAlign: null, // auto
y: null
},
softThreshold: false,
startFromThreshold: true, // false doesn't work well: http://jsfiddle.net/highcharts/hz8fopan/14/
stickyTracking: false,
tooltip: {
distance: 6
},
threshold: 0,
// Prototype members
}, {
cropShoulder: 0,
directTouch: true, // When tooltip is not shared, this series (and derivatives) requires direct touch/hover. KD-tree does not apply.
trackerGroups: ['group', 'dataLabelsGroup'],
negStacks: true, // use separate negative stacks, unlike area stacks where a negative
// point is substracted from previous (#1910)
/**
* Initialize the series
*/
init: function() {
Series.prototype.init.apply(this, arguments);
var series = this,
chart = series.chart;
// if the series is added dynamically, force redraw of other
// series affected by a new column
if (chart.hasRendered) {
each(chart.series, function(otherSeries) {
if (otherSeries.type === series.type) {
otherSeries.isDirty = true;
}
});
}
},
/**
* Return the width and x offset of the columns adjusted for grouping, groupPadding, pointPadding,
* pointWidth etc.
*/
getColumnMetrics: function() {
var series = this,
options = series.options,
xAxis = series.xAxis,
yAxis = series.yAxis,
reversedXAxis = xAxis.reversed,
stackKey,
stackGroups = {},
columnCount = 0;
// Get the total number of column type series.
// This is called on every series. Consider moving this logic to a
// chart.orderStacks() function and call it on init, addSeries and removeSeries
if (options.grouping === false) {
columnCount = 1;
} else {
each(series.chart.series, function(otherSeries) {
var otherOptions = otherSeries.options,
otherYAxis = otherSeries.yAxis,
columnIndex;
if (otherSeries.type === series.type && otherSeries.visible &&
yAxis.len === otherYAxis.len && yAxis.pos === otherYAxis.pos) { // #642, #2086
if (otherOptions.stacking) {
stackKey = otherSeries.stackKey;
if (stackGroups[stackKey] === undefined) {
stackGroups[stackKey] = columnCount++;
}
columnIndex = stackGroups[stackKey];
} else if (otherOptions.grouping !== false) { // #1162
columnIndex = columnCount++;
}
otherSeries.columnIndex = columnIndex;
}
});
}
var categoryWidth = Math.min(
Math.abs(xAxis.transA) * (xAxis.ordinalSlope || options.pointRange || xAxis.closestPointRange || xAxis.tickInterval || 1), // #2610
xAxis.len // #1535
),
groupPadding = categoryWidth * options.groupPadding,
groupWidth = categoryWidth - 2 * groupPadding,
pointOffsetWidth = groupWidth / columnCount,
pointWidth = Math.min(
options.maxPointWidth || xAxis.len,
pick(options.pointWidth, pointOffsetWidth * (1 - 2 * options.pointPadding))
),
pointPadding = (pointOffsetWidth - pointWidth) / 2,
colIndex = (series.columnIndex || 0) + (reversedXAxis ? 1 : 0), // #1251, #3737
pointXOffset = pointPadding + (groupPadding + colIndex *
pointOffsetWidth - (categoryWidth / 2)) *
(reversedXAxis ? -1 : 1);
// Save it for reading in linked series (Error bars particularly)
series.columnMetrics = {
width: pointWidth,
offset: pointXOffset
};
return series.columnMetrics;
},
/**
* Make the columns crisp. The edges are rounded to the nearest full pixel.
*/
crispCol: function(x, y, w, h) {
var chart = this.chart,
borderWidth = this.borderWidth,
xCrisp = -(borderWidth % 2 ? 0.5 : 0),
yCrisp = borderWidth % 2 ? 0.5 : 1,
right,
bottom,
fromTop;
if (chart.inverted && chart.renderer.isVML) {
yCrisp += 1;
}
// Horizontal. We need to first compute the exact right edge, then round it
// and compute the width from there.
right = Math.round(x + w) + xCrisp;
x = Math.round(x) + xCrisp;
w = right - x;
// Vertical
bottom = Math.round(y + h) + yCrisp;
fromTop = Math.abs(y) <= 0.5 && bottom > 0.5; // #4504, #4656
y = Math.round(y) + yCrisp;
h = bottom - y;
// Top edges are exceptions
if (fromTop && h) { // #5146
y -= 1;
h += 1;
}
return {
x: x,
y: y,
width: w,
height: h
};
},
/**
* Translate each point to the plot area coordinate system and find shape positions
*/
translate: function() {
var series = this,
chart = series.chart,
options = series.options,
dense = series.dense = series.closestPointRange * series.xAxis.transA < 2,
borderWidth = series.borderWidth = pick(
options.borderWidth,
dense ? 0 : 1 // #3635
),
yAxis = series.yAxis,
threshold = options.threshold,
translatedThreshold = series.translatedThreshold = yAxis.getThreshold(threshold),
minPointLength = pick(options.minPointLength, 5),
metrics = series.getColumnMetrics(),
pointWidth = metrics.width,
seriesBarW = series.barW = Math.max(pointWidth, 1 + 2 * borderWidth), // postprocessed for border width
pointXOffset = series.pointXOffset = metrics.offset;
if (chart.inverted) {
translatedThreshold -= 0.5; // #3355
}
// When the pointPadding is 0, we want the columns to be packed tightly, so we allow individual
// columns to have individual sizes. When pointPadding is greater, we strive for equal-width
// columns (#2694).
if (options.pointPadding) {
seriesBarW = Math.ceil(seriesBarW);
}
Series.prototype.translate.apply(series);
// Record the new values
each(series.points, function(point) {
var yBottom = pick(point.yBottom, translatedThreshold),
safeDistance = 999 + Math.abs(yBottom),
plotY = Math.min(Math.max(-safeDistance, point.plotY), yAxis.len + safeDistance), // Don't draw too far outside plot area (#1303, #2241, #4264)
barX = point.plotX + pointXOffset,
barW = seriesBarW,
barY = Math.min(plotY, yBottom),
up,
barH = Math.max(plotY, yBottom) - barY;
// Handle options.minPointLength
if (Math.abs(barH) < minPointLength) {
if (minPointLength) {
barH = minPointLength;
up = (!yAxis.reversed && !point.negative) || (yAxis.reversed && point.negative);
barY = Math.abs(barY - translatedThreshold) > minPointLength ? // stacked
yBottom - minPointLength : // keep position
translatedThreshold - (up ? minPointLength : 0); // #1485, #4051
}
}
// Cache for access in polar
point.barX = barX;
point.pointWidth = pointWidth;
// Fix the tooltip on center of grouped columns (#1216, #424, #3648)
point.tooltipPos = chart.inverted ? [yAxis.len + yAxis.pos - chart.plotLeft - plotY, series.xAxis.len - barX - barW / 2, barH] : [barX + barW / 2, plotY + yAxis.pos - chart.plotTop, barH];
// Register shape type and arguments to be used in drawPoints
point.shapeType = 'rect';
point.shapeArgs = series.crispCol.apply(
series,
point.isNull ? [point.plotX, yAxis.len / 2, 0, 0] : // #3169, drilldown from null must have a position to work from
[barX, barY, barW, barH]
);
});
},
getSymbol: noop,
/**
* Use a solid rectangle like the area series types
*/
drawLegendSymbol: LegendSymbolMixin.drawRectangle,
/**
* Columns have no graph
*/
drawGraph: function() {
this.group[this.dense ? 'addClass' : 'removeClass']('highcharts-dense-data');
},
/**
* Draw the columns. For bars, the series.group is rotated, so the same coordinates
* apply for columns and bars. This method is inherited by scatter series.
*
*/
drawPoints: function() {
var series = this,
chart = this.chart,
options = series.options,
renderer = chart.renderer,
animationLimit = options.animationLimit || 250,
shapeArgs;
// draw the columns
each(series.points, function(point) {
var plotY = point.plotY,
graphic = point.graphic;
if (isNumber(plotY) && point.y !== null) {
shapeArgs = point.shapeArgs;
if (graphic) { // update
stop(graphic);
graphic[chart.pointCount < animationLimit ? 'animate' : 'attr'](
merge(shapeArgs)
);
} else {
point.graphic = graphic = renderer[point.shapeType](shapeArgs)
.attr({
'class': point.getClassName()
})
.add(point.group || series.group);
}
} else if (graphic) {
point.graphic = graphic.destroy(); // #1269
}
});
},
/**
* Animate the column heights one by one from zero
* @param {Boolean} init Whether to initialize the animation or run it
*/
animate: function(init) {
var series = this,
yAxis = this.yAxis,
options = series.options,
inverted = this.chart.inverted,
attr = {},
translatedThreshold;
if (svg) { // VML is too slow anyway
if (init) {
attr.scaleY = 0.001;
translatedThreshold = Math.min(yAxis.pos + yAxis.len, Math.max(yAxis.pos, yAxis.toPixels(options.threshold)));
if (inverted) {
attr.translateX = translatedThreshold - yAxis.len;
} else {
attr.translateY = translatedThreshold;
}
series.group.attr(attr);
} else { // run the animation
attr[inverted ? 'translateX' : 'translateY'] = yAxis.pos;
series.group.animate(attr, extend(animObject(series.options.animation), {
// Do the scale synchronously to ensure smooth updating (#5030)
step: function(val, fx) {
series.group.attr({
scaleY: Math.max(0.001, fx.pos) // #5250
});
}
}));
// delete this function to allow it only once
series.animate = null;
}
}
},
/**
* Remove this series from the chart
*/
remove: function() {
var series = this,
chart = series.chart;
// column and bar series affects other series of the same type
// as they are either stacked or grouped
if (chart.hasRendered) {
each(chart.series, function(otherSeries) {
if (otherSeries.type === series.type) {
otherSeries.isDirty = true;
}
});
}
Series.prototype.remove.apply(series, arguments);
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Series = H.Series,
seriesType = H.seriesType;
/**
* The scatter series type
*/
seriesType('scatter', 'line', {
lineWidth: 0,
marker: {
enabled: true // Overrides auto-enabling in line series (#3647)
},
tooltip: {
headerFormat: '<span style="color:{point.color}">\u25CF</span> <span style="font-size: 0.85em"> {series.name}</span><br/>',
pointFormat: 'x: <b>{point.x}</b><br/>y: <b>{point.y}</b><br/>'
}
// Prototype members
}, {
sorted: false,
requireSorting: false,
noSharedTooltip: true,
trackerGroups: ['group', 'markerGroup', 'dataLabelsGroup'],
takeOrdinalPosition: false, // #2342
kdDimensions: 2,
drawGraph: function() {
if (this.options.lineWidth) {
Series.prototype.drawGraph.call(this);
}
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
arrayMax = H.arrayMax,
defined = H.defined,
each = H.each,
extend = H.extend,
format = H.format,
map = H.map,
merge = H.merge,
noop = H.noop,
pick = H.pick,
relativeLength = H.relativeLength,
Series = H.Series,
seriesTypes = H.seriesTypes,
stableSort = H.stableSort,
stop = H.stop;
/**
* Generatl distribution algorithm for distributing labels of differing size along a
* confined length in two dimensions. The algorithm takes an array of objects containing
* a size, a target and a rank. It will place the labels as close as possible to their
* targets, skipping the lowest ranked labels if necessary.
*/
H.distribute = function(boxes, len) {
var i,
overlapping = true,
origBoxes = boxes, // Original array will be altered with added .pos
restBoxes = [], // The outranked overshoot
box,
target,
total = 0;
function sortByTarget(a, b) {
return a.target - b.target;
}
// If the total size exceeds the len, remove those boxes with the lowest rank
i = boxes.length;
while (i--) {
total += boxes[i].size;
}
// Sort by rank, then slice away overshoot
if (total > len) {
stableSort(boxes, function(a, b) {
return (b.rank || 0) - (a.rank || 0);
});
i = 0;
total = 0;
while (total <= len) {
total += boxes[i].size;
i++;
}
restBoxes = boxes.splice(i - 1, boxes.length);
}
// Order by target
stableSort(boxes, sortByTarget);
// So far we have been mutating the original array. Now
// create a copy with target arrays
boxes = map(boxes, function(box) {
return {
size: box.size,
targets: [box.target]
};
});
while (overlapping) {
// Initial positions: target centered in box
i = boxes.length;
while (i--) {
box = boxes[i];
// Composite box, average of targets
target = (Math.min.apply(0, box.targets) + Math.max.apply(0, box.targets)) / 2;
box.pos = Math.min(Math.max(0, target - box.size / 2), len - box.size);
}
// Detect overlap and join boxes
i = boxes.length;
overlapping = false;
while (i--) {
if (i > 0 && boxes[i - 1].pos + boxes[i - 1].size > boxes[i].pos) { // Overlap
boxes[i - 1].size += boxes[i].size; // Add this size to the previous box
boxes[i - 1].targets = boxes[i - 1].targets.concat(boxes[i].targets);
// Overlapping right, push left
if (boxes[i - 1].pos + boxes[i - 1].size > len) {
boxes[i - 1].pos = len - boxes[i - 1].size;
}
boxes.splice(i, 1); // Remove this item
overlapping = true;
}
}
}
// Now the composite boxes are placed, we need to put the original boxes within them
i = 0;
each(boxes, function(box) {
var posInCompositeBox = 0;
each(box.targets, function() {
origBoxes[i].pos = box.pos + posInCompositeBox;
posInCompositeBox += origBoxes[i].size;
i++;
});
});
// Add the rest (hidden) boxes and sort by target
origBoxes.push.apply(origBoxes, restBoxes);
stableSort(origBoxes, sortByTarget);
};
/**
* Draw the data labels
*/
Series.prototype.drawDataLabels = function() {
var series = this,
seriesOptions = series.options,
options = seriesOptions.dataLabels,
points = series.points,
pointOptions,
generalOptions,
hasRendered = series.hasRendered || 0,
str,
dataLabelsGroup,
defer = pick(options.defer, true),
renderer = series.chart.renderer;
if (options.enabled || series._hasPointLabels) {
// Process default alignment of data labels for columns
if (series.dlProcessOptions) {
series.dlProcessOptions(options);
}
// Create a separate group for the data labels to avoid rotation
dataLabelsGroup = series.plotGroup(
'dataLabelsGroup',
'data-labels',
defer && !hasRendered ? 'hidden' : 'visible', // #5133
options.zIndex || 6
);
if (defer) {
dataLabelsGroup.attr({
opacity: +hasRendered
}); // #3300
if (!hasRendered) {
addEvent(series, 'afterAnimate', function() {
if (series.visible) { // #2597, #3023, #3024
dataLabelsGroup.show(true);
}
dataLabelsGroup[seriesOptions.animation ? 'animate' : 'attr']({
opacity: 1
}, {
duration: 200
});
});
}
}
// Make the labels for each point
generalOptions = options;
each(points, function(point) {
var enabled,
dataLabel = point.dataLabel,
labelConfig,
attr,
name,
rotation,
connector = point.connector,
isNew = true,
style,
moreStyle = {};
// Determine if each data label is enabled
pointOptions = point.dlOptions || (point.options && point.options.dataLabels); // dlOptions is used in treemaps
enabled = pick(pointOptions && pointOptions.enabled, generalOptions.enabled) && point.y !== null; // #2282, #4641
// If the point is outside the plot area, destroy it. #678, #820
if (dataLabel && !enabled) {
point.dataLabel = dataLabel.destroy();
// Individual labels are disabled if the are explicitly disabled
// in the point options, or if they fall outside the plot area.
} else if (enabled) {
// Create individual options structure that can be extended without
// affecting others
options = merge(generalOptions, pointOptions);
style = options.style;
rotation = options.rotation;
// Get the string
labelConfig = point.getLabelConfig();
str = options.format ?
format(options.format, labelConfig) :
options.formatter.call(labelConfig, options);
// update existing label
if (dataLabel) {
if (defined(str)) {
dataLabel
.attr({
text: str
});
isNew = false;
} else { // #1437 - the label is shown conditionally
point.dataLabel = dataLabel = dataLabel.destroy();
if (connector) {
point.connector = connector.destroy();
}
}
// create new label
} else if (defined(str)) {
attr = {
//align: align,
r: options.borderRadius || 0,
rotation: rotation,
padding: options.padding,
zIndex: 1
};
// Remove unused attributes (#947)
for (name in attr) {
if (attr[name] === undefined) {
delete attr[name];
}
}
dataLabel = point.dataLabel = renderer[rotation ? 'text' : 'label']( // labels don't support rotation
str,
0, -9999,
options.shape,
null,
null,
options.useHTML,
null,
'data-label'
)
.attr(attr);
dataLabel.addClass('highcharts-data-label-color-' + point.colorIndex + ' ' + (options.className || ''));
dataLabel.add(dataLabelsGroup);
}
if (dataLabel) {
// Now the data label is created and placed at 0,0, so we need to align it
series.alignDataLabel(point, dataLabel, options, null, isNew);
}
}
});
}
};
/**
* Align each individual data label
*/
Series.prototype.alignDataLabel = function(point, dataLabel, options, alignTo, isNew) {
var chart = this.chart,
inverted = chart.inverted,
plotX = pick(point.plotX, -9999),
plotY = pick(point.plotY, -9999),
bBox = dataLabel.getBBox(),
fontSize,
baseline,
rotation = options.rotation,
normRotation,
negRotation,
align = options.align,
rotCorr, // rotation correction
// Math.round for rounding errors (#2683), alignTo to allow column labels (#2700)
visible = this.visible && (point.series.forceDL || chart.isInsidePlot(plotX, Math.round(plotY), inverted) ||
(alignTo && chart.isInsidePlot(plotX, inverted ? alignTo.x + 1 : alignTo.y + alignTo.height - 1, inverted))),
alignAttr, // the final position;
justify = pick(options.overflow, 'justify') === 'justify';
if (visible) {
baseline = chart.renderer.fontMetrics(fontSize, dataLabel).b;
// The alignment box is a singular point
alignTo = extend({
x: inverted ? chart.plotWidth - plotY : plotX,
y: Math.round(inverted ? chart.plotHeight - plotX : plotY),
width: 0,
height: 0
}, alignTo);
// Add the text size for alignment calculation
extend(options, {
width: bBox.width,
height: bBox.height
});
// Allow a hook for changing alignment in the last moment, then do the alignment
if (rotation) {
justify = false; // Not supported for rotated text
rotCorr = chart.renderer.rotCorr(baseline, rotation); // #3723
alignAttr = {
x: alignTo.x + options.x + alignTo.width / 2 + rotCorr.x,
y: alignTo.y + options.y + {
top: 0,
middle: 0.5,
bottom: 1
}[options.verticalAlign] * alignTo.height
};
dataLabel[isNew ? 'attr' : 'animate'](alignAttr)
.attr({ // #3003
align: align
});
// Compensate for the rotated label sticking out on the sides
normRotation = (rotation + 720) % 360;
negRotation = normRotation > 180 && normRotation < 360;
if (align === 'left') {
alignAttr.y -= negRotation ? bBox.height : 0;
} else if (align === 'center') {
alignAttr.x -= bBox.width / 2;
alignAttr.y -= bBox.height / 2;
} else if (align === 'right') {
alignAttr.x -= bBox.width;
alignAttr.y -= negRotation ? 0 : bBox.height;
}
} else {
dataLabel.align(options, null, alignTo);
alignAttr = dataLabel.alignAttr;
}
// Handle justify or crop
if (justify) {
this.justifyDataLabel(dataLabel, options, alignAttr, bBox, alignTo, isNew);
// Now check that the data label is within the plot area
} else if (pick(options.crop, true)) {
visible = chart.isInsidePlot(alignAttr.x, alignAttr.y) && chart.isInsidePlot(alignAttr.x + bBox.width, alignAttr.y + bBox.height);
}
// When we're using a shape, make it possible with a connector or an arrow pointing to thie point
if (options.shape && !rotation) {
dataLabel.attr({
anchorX: point.plotX,
anchorY: point.plotY
});
}
}
// Show or hide based on the final aligned position
if (!visible) {
stop(dataLabel);
dataLabel.attr({
y: -9999
});
dataLabel.placed = false; // don't animate back in
}
};
/**
* If data labels fall partly outside the plot area, align them back in, in a way that
* doesn't hide the point.
*/
Series.prototype.justifyDataLabel = function(dataLabel, options, alignAttr, bBox, alignTo, isNew) {
var chart = this.chart,
align = options.align,
verticalAlign = options.verticalAlign,
off,
justified,
padding = dataLabel.box ? 0 : (dataLabel.padding || 0);
// Off left
off = alignAttr.x + padding;
if (off < 0) {
if (align === 'right') {
options.align = 'left';
} else {
options.x = -off;
}
justified = true;
}
// Off right
off = alignAttr.x + bBox.width - padding;
if (off > chart.plotWidth) {
if (align === 'left') {
options.align = 'right';
} else {
options.x = chart.plotWidth - off;
}
justified = true;
}
// Off top
off = alignAttr.y + padding;
if (off < 0) {
if (verticalAlign === 'bottom') {
options.verticalAlign = 'top';
} else {
options.y = -off;
}
justified = true;
}
// Off bottom
off = alignAttr.y + bBox.height - padding;
if (off > chart.plotHeight) {
if (verticalAlign === 'top') {
options.verticalAlign = 'bottom';
} else {
options.y = chart.plotHeight - off;
}
justified = true;
}
if (justified) {
dataLabel.placed = !isNew;
dataLabel.align(options, null, alignTo);
}
};
/**
* Override the base drawDataLabels method by pie specific functionality
*/
if (seriesTypes.pie) {
seriesTypes.pie.prototype.drawDataLabels = function() {
var series = this,
data = series.data,
point,
chart = series.chart,
options = series.options.dataLabels,
connectorPadding = pick(options.connectorPadding, 10),
connectorWidth = pick(options.connectorWidth, 1),
plotWidth = chart.plotWidth,
plotHeight = chart.plotHeight,
connector,
distanceOption = options.distance,
seriesCenter = series.center,
radius = seriesCenter[2] / 2,
centerY = seriesCenter[1],
outside = distanceOption > 0,
dataLabel,
dataLabelWidth,
labelPos,
labelHeight,
halves = [ // divide the points into right and left halves for anti collision
[], // right
[] // left
],
x,
y,
visibility,
j,
overflow = [0, 0, 0, 0]; // top, right, bottom, left
// get out if not enabled
if (!series.visible || (!options.enabled && !series._hasPointLabels)) {
return;
}
// run parent method
Series.prototype.drawDataLabels.apply(series);
each(data, function(point) {
if (point.dataLabel && point.visible) { // #407, #2510
// Arrange points for detection collision
halves[point.half].push(point);
// Reset positions (#4905)
point.dataLabel._pos = null;
}
});
/* Loop over the points in each half, starting from the top and bottom
* of the pie to detect overlapping labels.
*/
each(halves, function(points, i) {
var top,
bottom,
length = points.length,
positions,
naturalY,
size;
if (!length) {
return;
}
// Sort by angle
series.sortByAngle(points, i - 0.5);
// Only do anti-collision when we are outside the pie and have connectors (#856)
if (distanceOption > 0) {
top = Math.max(0, centerY - radius - distanceOption);
bottom = Math.min(centerY + radius + distanceOption, chart.plotHeight);
positions = map(points, function(point) {
if (point.dataLabel) {
size = point.dataLabel.getBBox().height || 21;
return {
target: point.labelPos[1] - top + size / 2,
size: size,
rank: point.y
};
}
});
H.distribute(positions, bottom + size - top);
}
// now the used slots are sorted, fill them up sequentially
for (j = 0; j < length; j++) {
point = points[j];
labelPos = point.labelPos;
dataLabel = point.dataLabel;
visibility = point.visible === false ? 'hidden' : 'inherit';
naturalY = labelPos[1];
if (positions) {
if (positions[j].pos === undefined) {
visibility = 'hidden';
} else {
labelHeight = positions[j].size;
y = top + positions[j].pos;
}
} else {
y = naturalY;
}
// get the x - use the natural x position for labels near the top and bottom, to prevent the top
// and botton slice connectors from touching each other on either side
if (options.justify) {
x = seriesCenter[0] + (i ? -1 : 1) * (radius + distanceOption);
} else {
x = series.getX(y < top + 2 || y > bottom - 2 ? naturalY : y, i);
}
// Record the placement and visibility
dataLabel._attr = {
visibility: visibility,
align: labelPos[6]
};
dataLabel._pos = {
x: x + options.x +
({
left: connectorPadding,
right: -connectorPadding
}[labelPos[6]] || 0),
y: y + options.y - 10 // 10 is for the baseline (label vs text)
};
labelPos.x = x;
labelPos.y = y;
// Detect overflowing data labels
if (series.options.size === null) {
dataLabelWidth = dataLabel.width;
// Overflow left
if (x - dataLabelWidth < connectorPadding) {
overflow[3] = Math.max(Math.round(dataLabelWidth - x + connectorPadding), overflow[3]);
// Overflow right
} else if (x + dataLabelWidth > plotWidth - connectorPadding) {
overflow[1] = Math.max(Math.round(x + dataLabelWidth - plotWidth + connectorPadding), overflow[1]);
}
// Overflow top
if (y - labelHeight / 2 < 0) {
overflow[0] = Math.max(Math.round(-y + labelHeight / 2), overflow[0]);
// Overflow left
} else if (y + labelHeight / 2 > plotHeight) {
overflow[2] = Math.max(Math.round(y + labelHeight / 2 - plotHeight), overflow[2]);
}
}
} // for each point
}); // for each half
// Do not apply the final placement and draw the connectors until we have verified
// that labels are not spilling over.
if (arrayMax(overflow) === 0 || this.verifyDataLabelOverflow(overflow)) {
// Place the labels in the final position
this.placeDataLabels();
// Draw the connectors
if (outside && connectorWidth) {
each(this.points, function(point) {
var isNew;
connector = point.connector;
dataLabel = point.dataLabel;
if (dataLabel && dataLabel._pos && point.visible) {
visibility = dataLabel._attr.visibility;
isNew = !connector;
if (isNew) {
point.connector = connector = chart.renderer.path()
.addClass('highcharts-data-label-connector highcharts-color-' + point.colorIndex)
.add(series.dataLabelsGroup);
}
connector[isNew ? 'attr' : 'animate']({
d: series.connectorPath(point.labelPos)
});
connector.attr('visibility', visibility);
} else if (connector) {
point.connector = connector.destroy();
}
});
}
}
};
/**
* Extendable method for getting the path of the connector between the data label
* and the pie slice.
*/
seriesTypes.pie.prototype.connectorPath = function(labelPos) {
var x = labelPos.x,
y = labelPos.y;
return pick(this.options.softConnector, true) ? [
'M',
x + (labelPos[6] === 'left' ? 5 : -5), y, // end of the string at the label
'C',
x, y, // first break, next to the label
2 * labelPos[2] - labelPos[4], 2 * labelPos[3] - labelPos[5],
labelPos[2], labelPos[3], // second break
'L',
labelPos[4], labelPos[5] // base
] : [
'M',
x + (labelPos[6] === 'left' ? 5 : -5), y, // end of the string at the label
'L',
labelPos[2], labelPos[3], // second break
'L',
labelPos[4], labelPos[5] // base
];
};
/**
* Perform the final placement of the data labels after we have verified that they
* fall within the plot area.
*/
seriesTypes.pie.prototype.placeDataLabels = function() {
each(this.points, function(point) {
var dataLabel = point.dataLabel,
_pos;
if (dataLabel && point.visible) {
_pos = dataLabel._pos;
if (_pos) {
dataLabel.attr(dataLabel._attr);
dataLabel[dataLabel.moved ? 'animate' : 'attr'](_pos);
dataLabel.moved = true;
} else if (dataLabel) {
dataLabel.attr({
y: -9999
});
}
}
});
};
seriesTypes.pie.prototype.alignDataLabel = noop;
/**
* Verify whether the data labels are allowed to draw, or we should run more translation and data
* label positioning to keep them inside the plot area. Returns true when data labels are ready
* to draw.
*/
seriesTypes.pie.prototype.verifyDataLabelOverflow = function(overflow) {
var center = this.center,
options = this.options,
centerOption = options.center,
minSize = options.minSize || 80,
newSize = minSize,
ret;
// Handle horizontal size and center
if (centerOption[0] !== null) { // Fixed center
newSize = Math.max(center[2] - Math.max(overflow[1], overflow[3]), minSize);
} else { // Auto center
newSize = Math.max(
center[2] - overflow[1] - overflow[3], // horizontal overflow
minSize
);
center[0] += (overflow[3] - overflow[1]) / 2; // horizontal center
}
// Handle vertical size and center
if (centerOption[1] !== null) { // Fixed center
newSize = Math.max(Math.min(newSize, center[2] - Math.max(overflow[0], overflow[2])), minSize);
} else { // Auto center
newSize = Math.max(
Math.min(
newSize,
center[2] - overflow[0] - overflow[2] // vertical overflow
),
minSize
);
center[1] += (overflow[0] - overflow[2]) / 2; // vertical center
}
// If the size must be decreased, we need to run translate and drawDataLabels again
if (newSize < center[2]) {
center[2] = newSize;
center[3] = Math.min(relativeLength(options.innerSize || 0, newSize), newSize); // #3632
this.translate(center);
if (this.drawDataLabels) {
this.drawDataLabels();
}
// Else, return true to indicate that the pie and its labels is within the plot area
} else {
ret = true;
}
return ret;
};
}
if (seriesTypes.column) {
/**
* Override the basic data label alignment by adjusting for the position of the column
*/
seriesTypes.column.prototype.alignDataLabel = function(point, dataLabel, options, alignTo, isNew) {
var inverted = this.chart.inverted,
series = point.series,
dlBox = point.dlBox || point.shapeArgs, // data label box for alignment
below = pick(point.below, point.plotY > pick(this.translatedThreshold, series.yAxis.len)), // point.below is used in range series
inside = pick(options.inside, !!this.options.stacking), // draw it inside the box?
overshoot;
// Align to the column itself, or the top of it
if (dlBox) { // Area range uses this method but not alignTo
alignTo = merge(dlBox);
if (alignTo.y < 0) {
alignTo.height += alignTo.y;
alignTo.y = 0;
}
overshoot = alignTo.y + alignTo.height - series.yAxis.len;
if (overshoot > 0) {
alignTo.height -= overshoot;
}
if (inverted) {
alignTo = {
x: series.yAxis.len - alignTo.y - alignTo.height,
y: series.xAxis.len - alignTo.x - alignTo.width,
width: alignTo.height,
height: alignTo.width
};
}
// Compute the alignment box
if (!inside) {
if (inverted) {
alignTo.x += below ? 0 : alignTo.width;
alignTo.width = 0;
} else {
alignTo.y += below ? alignTo.height : 0;
alignTo.height = 0;
}
}
}
// When alignment is undefined (typically columns and bars), display the individual
// point below or above the point depending on the threshold
options.align = pick(
options.align, !inverted || inside ? 'center' : below ? 'right' : 'left'
);
options.verticalAlign = pick(
options.verticalAlign,
inverted || inside ? 'middle' : below ? 'top' : 'bottom'
);
// Call the parent method
Series.prototype.alignDataLabel.call(this, point, dataLabel, options, alignTo, isNew);
};
}
}(Highcharts));
(function(H) {
/**
* (c) 2009-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
/**
* Highcharts module to hide overlapping data labels. This module is included in Highcharts.
*/
var Chart = H.Chart,
each = H.each,
pick = H.pick,
addEvent = H.addEvent;
// Collect potensial overlapping data labels. Stack labels probably don't need to be
// considered because they are usually accompanied by data labels that lie inside the columns.
Chart.prototype.callbacks.push(function(chart) {
function collectAndHide() {
var labels = [];
each(chart.series, function(series) {
var dlOptions = series.options.dataLabels,
collections = series.dataLabelCollections || ['dataLabel']; // Range series have two collections
if ((dlOptions.enabled || series._hasPointLabels) && !dlOptions.allowOverlap && series.visible) { // #3866
each(collections, function(coll) {
each(series.points, function(point) {
if (point[coll]) {
point[coll].labelrank = pick(point.labelrank, point.shapeArgs && point.shapeArgs.height); // #4118
labels.push(point[coll]);
}
});
});
}
});
chart.hideOverlappingLabels(labels);
}
// Do it now ...
collectAndHide();
// ... and after each chart redraw
addEvent(chart, 'redraw', collectAndHide);
});
/**
* Hide overlapping labels. Labels are moved and faded in and out on zoom to provide a smooth
* visual imression.
*/
Chart.prototype.hideOverlappingLabels = function(labels) {
var len = labels.length,
label,
i,
j,
label1,
label2,
isIntersecting,
pos1,
pos2,
parent1,
parent2,
padding,
intersectRect = function(x1, y1, w1, h1, x2, y2, w2, h2) {
return !(
x2 > x1 + w1 ||
x2 + w2 < x1 ||
y2 > y1 + h1 ||
y2 + h2 < y1
);
};
// Mark with initial opacity
for (i = 0; i < len; i++) {
label = labels[i];
if (label) {
label.oldOpacity = label.opacity;
label.newOpacity = 1;
}
}
// Prevent a situation in a gradually rising slope, that each label
// will hide the previous one because the previous one always has
// lower rank.
labels.sort(function(a, b) {
return (b.labelrank || 0) - (a.labelrank || 0);
});
// Detect overlapping labels
for (i = 0; i < len; i++) {
label1 = labels[i];
for (j = i + 1; j < len; ++j) {
label2 = labels[j];
if (label1 && label2 && label1.placed && label2.placed && label1.newOpacity !== 0 && label2.newOpacity !== 0) {
pos1 = label1.alignAttr;
pos2 = label2.alignAttr;
parent1 = label1.parentGroup; // Different panes have different positions
parent2 = label2.parentGroup;
padding = 2 * (label1.box ? 0 : label1.padding); // Substract the padding if no background or border (#4333)
isIntersecting = intersectRect(
pos1.x + parent1.translateX,
pos1.y + parent1.translateY,
label1.width - padding,
label1.height - padding,
pos2.x + parent2.translateX,
pos2.y + parent2.translateY,
label2.width - padding,
label2.height - padding
);
if (isIntersecting) {
(label1.labelrank < label2.labelrank ? label1 : label2).newOpacity = 0;
}
}
}
}
// Hide or show
each(labels, function(label) {
var complete,
newOpacity;
if (label) {
newOpacity = label.newOpacity;
if (label.oldOpacity !== newOpacity && label.placed) {
// Make sure the label is completely hidden to avoid catching clicks (#4362)
if (newOpacity) {
label.show(true);
} else {
complete = function() {
label.hide();
};
}
// Animate or set the opacity
label.alignAttr.opacity = newOpacity;
label[label.isOld ? 'animate' : 'attr'](label.alignAttr, null, complete);
}
label.isOld = true;
}
});
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Axis = H.Axis,
each = H.each,
pick = H.pick,
wrap = H.wrap;
/**
* Override to use the extreme coordinates from the SVG shape, not the
* data values
*/
wrap(Axis.prototype, 'getSeriesExtremes', function(proceed) {
var isXAxis = this.isXAxis,
dataMin,
dataMax,
xData = [],
useMapGeometry;
// Remove the xData array and cache it locally so that the proceed method doesn't use it
if (isXAxis) {
each(this.series, function(series, i) {
if (series.useMapGeometry) {
xData[i] = series.xData;
series.xData = [];
}
});
}
// Call base to reach normal cartesian series (like mappoint)
proceed.call(this);
// Run extremes logic for map and mapline
if (isXAxis) {
dataMin = pick(this.dataMin, Number.MAX_VALUE);
dataMax = pick(this.dataMax, -Number.MAX_VALUE);
each(this.series, function(series, i) {
if (series.useMapGeometry) {
dataMin = Math.min(dataMin, pick(series.minX, dataMin));
dataMax = Math.max(dataMax, pick(series.maxX, dataMin));
series.xData = xData[i]; // Reset xData array
useMapGeometry = true;
}
});
if (useMapGeometry) {
this.dataMin = dataMin;
this.dataMax = dataMax;
}
}
});
/**
* Override axis translation to make sure the aspect ratio is always kept
*/
wrap(Axis.prototype, 'setAxisTranslation', function(proceed) {
var chart = this.chart,
mapRatio,
plotRatio = chart.plotWidth / chart.plotHeight,
adjustedAxisLength,
xAxis = chart.xAxis[0],
padAxis,
fixTo,
fixDiff,
preserveAspectRatio;
// Run the parent method
proceed.call(this);
// Check for map-like series
if (this.coll === 'yAxis' && xAxis.transA !== undefined) {
each(this.series, function(series) {
if (series.preserveAspectRatio) {
preserveAspectRatio = true;
}
});
}
// On Y axis, handle both
if (preserveAspectRatio) {
// Use the same translation for both axes
this.transA = xAxis.transA = Math.min(this.transA, xAxis.transA);
mapRatio = plotRatio / ((xAxis.max - xAxis.min) / (this.max - this.min));
// What axis to pad to put the map in the middle
padAxis = mapRatio < 1 ? this : xAxis;
// Pad it
adjustedAxisLength = (padAxis.max - padAxis.min) * padAxis.transA;
padAxis.pixelPadding = padAxis.len - adjustedAxisLength;
padAxis.minPixelPadding = padAxis.pixelPadding / 2;
fixTo = padAxis.fixTo;
if (fixTo) {
fixDiff = fixTo[1] - padAxis.toValue(fixTo[0], true);
fixDiff *= padAxis.transA;
if (Math.abs(fixDiff) > padAxis.minPixelPadding || (padAxis.min === padAxis.dataMin && padAxis.max === padAxis.dataMax)) { // zooming out again, keep within restricted area
fixDiff = 0;
}
padAxis.minPixelPadding -= fixDiff;
}
}
});
/**
* Override Axis.render in order to delete the fixTo prop
*/
wrap(Axis.prototype, 'render', function(proceed) {
proceed.call(this);
this.fixTo = null;
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Axis = H.Axis,
Chart = H.Chart,
color = H.color,
ColorAxis,
each = H.each,
extend = H.extend,
isNumber = H.isNumber,
Legend = H.Legend,
LegendSymbolMixin = H.LegendSymbolMixin,
noop = H.noop,
merge = H.merge,
pick = H.pick,
wrap = H.wrap;
/**
* The ColorAxis object for inclusion in gradient legends
*/
ColorAxis = H.ColorAxis = function() {
this.init.apply(this, arguments);
};
extend(ColorAxis.prototype, Axis.prototype);
extend(ColorAxis.prototype, {
defaultColorAxisOptions: {
lineWidth: 0,
minPadding: 0,
maxPadding: 0,
gridLineWidth: 1,
tickPixelInterval: 72,
startOnTick: true,
endOnTick: true,
offset: 0,
marker: {
animation: {
duration: 50
},
width: 0.01
},
labels: {
overflow: 'justify'
},
minColor: '#e6ebf5',
maxColor: '#003399',
tickLength: 5,
showInLegend: true
},
init: function(chart, userOptions) {
var horiz = chart.options.legend.layout !== 'vertical',
options;
this.coll = 'colorAxis';
// Build the options
options = merge(this.defaultColorAxisOptions, {
side: horiz ? 2 : 1,
reversed: !horiz
}, userOptions, {
opposite: !horiz,
showEmpty: false,
title: null
});
Axis.prototype.init.call(this, chart, options);
// Base init() pushes it to the xAxis array, now pop it again
//chart[this.isXAxis ? 'xAxis' : 'yAxis'].pop();
// Prepare data classes
if (userOptions.dataClasses) {
this.initDataClasses(userOptions);
}
this.initStops(userOptions);
// Override original axis properties
this.horiz = horiz;
this.zoomEnabled = false;
// Add default values
this.defaultLegendLength = 200;
},
/*
* Return an intermediate color between two colors, according to pos where 0
* is the from color and 1 is the to color.
* NOTE: Changes here should be copied
* to the same function in drilldown.src.js and solid-gauge-src.js.
*/
tweenColors: function(from, to, pos) {
// Check for has alpha, because rgba colors perform worse due to lack of
// support in WebKit.
var hasAlpha,
ret;
// Unsupported color, return to-color (#3920)
if (!to.rgba.length || !from.rgba.length) {
ret = to.input || 'none';
// Interpolate
} else {
from = from.rgba;
to = to.rgba;
hasAlpha = (to[3] !== 1 || from[3] !== 1);
ret = (hasAlpha ? 'rgba(' : 'rgb(') +
Math.round(to[0] + (from[0] - to[0]) * (1 - pos)) + ',' +
Math.round(to[1] + (from[1] - to[1]) * (1 - pos)) + ',' +
Math.round(to[2] + (from[2] - to[2]) * (1 - pos)) +
(hasAlpha ? (',' + (to[3] + (from[3] - to[3]) * (1 - pos))) : '') + ')';
}
return ret;
},
initDataClasses: function(userOptions) {
var axis = this,
chart = this.chart,
dataClasses,
colorCounter = 0,
colorCount = chart.options.chart.colorCount,
options = this.options,
len = userOptions.dataClasses.length;
this.dataClasses = dataClasses = [];
this.legendItems = [];
each(userOptions.dataClasses, function(dataClass, i) {
var colors;
dataClass = merge(dataClass);
dataClasses.push(dataClass);
if (!dataClass.color) {
if (options.dataClassColor === 'category') {
dataClass.colorIndex = colorCounter;
// increase and loop back to zero
colorCounter++;
if (colorCounter === colorCount) {
colorCounter = 0;
}
} else {
dataClass.color = axis.tweenColors(
color(options.minColor),
color(options.maxColor),
len < 2 ? 0.5 : i / (len - 1) // #3219
);
}
}
});
},
initStops: function(userOptions) {
this.stops = userOptions.stops || [
[0, this.options.minColor],
[1, this.options.maxColor]
];
each(this.stops, function(stop) {
stop.color = color(stop[1]);
});
},
/**
* Extend the setOptions method to process extreme colors and color
* stops.
*/
setOptions: function(userOptions) {
Axis.prototype.setOptions.call(this, userOptions);
this.options.crosshair = this.options.marker;
},
setAxisSize: function() {
var symbol = this.legendSymbol,
chart = this.chart,
legendOptions = chart.options.legend || {},
x,
y,
width,
height;
if (symbol) {
this.left = x = symbol.attr('x');
this.top = y = symbol.attr('y');
this.width = width = symbol.attr('width');
this.height = height = symbol.attr('height');
this.right = chart.chartWidth - x - width;
this.bottom = chart.chartHeight - y - height;
this.len = this.horiz ? width : height;
this.pos = this.horiz ? x : y;
} else {
// Fake length for disabled legend to avoid tick issues and such (#5205)
this.len = (this.horiz ? legendOptions.symbolWidth : legendOptions.symbolHeight) || this.defaultLegendLength;
}
},
/**
* Translate from a value to a color
*/
toColor: function(value, point) {
var pos,
stops = this.stops,
from,
to,
color,
dataClasses = this.dataClasses,
dataClass,
i;
if (dataClasses) {
i = dataClasses.length;
while (i--) {
dataClass = dataClasses[i];
from = dataClass.from;
to = dataClass.to;
if ((from === undefined || value >= from) && (to === undefined || value <= to)) {
color = dataClass.color;
if (point) {
point.dataClass = i;
point.colorIndex = dataClass.colorIndex;
}
break;
}
}
} else {
if (this.isLog) {
value = this.val2lin(value);
}
pos = 1 - ((this.max - value) / ((this.max - this.min) || 1));
i = stops.length;
while (i--) {
if (pos > stops[i][0]) {
break;
}
}
from = stops[i] || stops[i + 1];
to = stops[i + 1] || from;
// The position within the gradient
pos = 1 - (to[0] - pos) / ((to[0] - from[0]) || 1);
color = this.tweenColors(
from.color,
to.color,
pos
);
}
return color;
},
/**
* Override the getOffset method to add the whole axis groups inside the legend.
*/
getOffset: function() {
var group = this.legendGroup,
sideOffset = this.chart.axisOffset[this.side];
if (group) {
// Hook for the getOffset method to add groups to this parent group
this.axisParent = group;
// Call the base
Axis.prototype.getOffset.call(this);
// First time only
if (!this.added) {
this.added = true;
this.labelLeft = 0;
this.labelRight = this.width;
}
// Reset it to avoid color axis reserving space
this.chart.axisOffset[this.side] = sideOffset;
}
},
/**
* Create the color gradient
*/
setLegendColor: function() {
var grad,
horiz = this.horiz,
options = this.options,
reversed = this.reversed,
one = reversed ? 1 : 0,
zero = reversed ? 0 : 1;
grad = horiz ? [one, 0, zero, 0] : [0, zero, 0, one]; // #3190
this.legendColor = {
linearGradient: {
x1: grad[0],
y1: grad[1],
x2: grad[2],
y2: grad[3]
},
stops: options.stops || [
[0, options.minColor],
[1, options.maxColor]
]
};
},
/**
* The color axis appears inside the legend and has its own legend symbol
*/
drawLegendSymbol: function(legend, item) {
var padding = legend.padding,
legendOptions = legend.options,
horiz = this.horiz,
width = pick(legendOptions.symbolWidth, horiz ? this.defaultLegendLength : 12),
height = pick(legendOptions.symbolHeight, horiz ? 12 : this.defaultLegendLength),
labelPadding = pick(legendOptions.labelPadding, horiz ? 16 : 30),
itemDistance = pick(legendOptions.itemDistance, 10);
this.setLegendColor();
// Create the gradient
item.legendSymbol = this.chart.renderer.rect(
0,
legend.baseline - 11,
width,
height
).attr({
zIndex: 1
}).add(item.legendGroup);
// Set how much space this legend item takes up
this.legendItemWidth = width + padding + (horiz ? itemDistance : labelPadding);
this.legendItemHeight = height + padding + (horiz ? labelPadding : 0);
},
/**
* Fool the legend
*/
setState: noop,
visible: true,
setVisible: noop,
getSeriesExtremes: function() {
var series;
if (this.series.length) {
series = this.series[0];
this.dataMin = series.valueMin;
this.dataMax = series.valueMax;
}
},
drawCrosshair: function(e, point) {
var plotX = point && point.plotX,
plotY = point && point.plotY,
crossPos,
axisPos = this.pos,
axisLen = this.len;
if (point) {
crossPos = this.toPixels(point[point.series.colorKey]);
if (crossPos < axisPos) {
crossPos = axisPos - 2;
} else if (crossPos > axisPos + axisLen) {
crossPos = axisPos + axisLen + 2;
}
point.plotX = crossPos;
point.plotY = this.len - crossPos;
Axis.prototype.drawCrosshair.call(this, e, point);
point.plotX = plotX;
point.plotY = plotY;
if (this.cross) {
this.cross
.addClass('highcharts-coloraxis-marker')
.add(this.legendGroup);
}
}
},
getPlotLinePath: function(a, b, c, d, pos) {
return isNumber(pos) ? // crosshairs only // #3969 pos can be 0 !!
(this.horiz ? ['M', pos - 4, this.top - 6, 'L', pos + 4, this.top - 6, pos, this.top, 'Z'] : ['M', this.left, pos, 'L', this.left - 6, pos + 6, this.left - 6, pos - 6, 'Z']) :
Axis.prototype.getPlotLinePath.call(this, a, b, c, d);
},
update: function(newOptions, redraw) {
var chart = this.chart,
legend = chart.legend;
each(this.series, function(series) {
series.isDirtyData = true; // Needed for Axis.update when choropleth colors change
});
// When updating data classes, destroy old items and make sure new ones are created (#3207)
if (newOptions.dataClasses && legend.allItems) {
each(legend.allItems, function(item) {
if (item.isDataClass) {
item.legendGroup.destroy();
}
});
chart.isDirtyLegend = true;
}
// Keep the options structure updated for export. Unlike xAxis and yAxis, the colorAxis is
// not an array. (#3207)
chart.options[this.coll] = merge(this.userOptions, newOptions);
Axis.prototype.update.call(this, newOptions, redraw);
if (this.legendItem) {
this.setLegendColor();
legend.colorizeItem(this, true);
}
},
/**
* Get the legend item symbols for data classes
*/
getDataClassLegendSymbols: function() {
var axis = this,
chart = this.chart,
legendItems = this.legendItems,
legendOptions = chart.options.legend,
valueDecimals = legendOptions.valueDecimals,
valueSuffix = legendOptions.valueSuffix || '',
name;
if (!legendItems.length) {
each(this.dataClasses, function(dataClass, i) {
var vis = true,
from = dataClass.from,
to = dataClass.to;
// Assemble the default name. This can be overridden by legend.options.labelFormatter
name = '';
if (from === undefined) {
name = '< ';
} else if (to === undefined) {
name = '> ';
}
if (from !== undefined) {
name += H.numberFormat(from, valueDecimals) + valueSuffix;
}
if (from !== undefined && to !== undefined) {
name += ' - ';
}
if (to !== undefined) {
name += H.numberFormat(to, valueDecimals) + valueSuffix;
}
// Add a mock object to the legend items
legendItems.push(extend({
chart: chart,
name: name,
options: {},
drawLegendSymbol: LegendSymbolMixin.drawRectangle,
visible: true,
setState: noop,
isDataClass: true,
setVisible: function() {
vis = this.visible = !vis;
each(axis.series, function(series) {
each(series.points, function(point) {
if (point.dataClass === i) {
point.setVisible(vis);
}
});
});
chart.legend.colorizeItem(this, vis);
}
}, dataClass));
});
}
return legendItems;
},
name: '' // Prevents 'undefined' in legend in IE8
});
/**
* Handle animation of the color attributes directly
*/
each(['fill', 'stroke'], function(prop) {
H.Fx.prototype[prop + 'Setter'] = function() {
this.elem.attr(prop, ColorAxis.prototype.tweenColors(color(this.start), color(this.end), this.pos));
};
});
/**
* Extend the chart getAxes method to also get the color axis
*/
wrap(Chart.prototype, 'getAxes', function(proceed) {
var options = this.options,
colorAxisOptions = options.colorAxis;
proceed.call(this);
this.colorAxis = [];
if (colorAxisOptions) {
new ColorAxis(this, colorAxisOptions); // eslint-disable-line no-new
}
});
/**
* Wrap the legend getAllItems method to add the color axis. This also removes the
* axis' own series to prevent them from showing up individually.
*/
wrap(Legend.prototype, 'getAllItems', function(proceed) {
var allItems = [],
colorAxis = this.chart.colorAxis[0];
if (colorAxis && colorAxis.options) {
if (colorAxis.options.showInLegend) {
// Data classes
if (colorAxis.options.dataClasses) {
allItems = allItems.concat(colorAxis.getDataClassLegendSymbols());
// Gradient legend
} else {
// Add this axis on top
allItems.push(colorAxis);
}
}
// Don't add the color axis' series
each(colorAxis.series, function(series) {
series.options.showInLegend = false;
});
}
return allItems.concat(proceed.call(this));
});
wrap(Legend.prototype, 'colorizeItem', function(proceed, item, visible) {
proceed.call(this, item, visible);
if (visible && item.legendColor) {
item.legendSymbol.attr({
fill: item.legendColor
});
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var defined = H.defined,
each = H.each,
noop = H.noop,
seriesTypes = H.seriesTypes;
/**
* Mixin for maps and heatmaps
*/
H.colorPointMixin = {
/**
* Color points have a value option that determines whether or not it is a null point
*/
isValid: function() {
return this.value !== null;
},
/**
* Set the visibility of a single point
*/
setVisible: function(vis) {
var point = this,
method = vis ? 'show' : 'hide';
// Show and hide associated elements
each(['graphic', 'dataLabel'], function(key) {
if (point[key]) {
point[key][method]();
}
});
}
};
H.colorSeriesMixin = {
pointArrayMap: ['value'],
axisTypes: ['xAxis', 'yAxis', 'colorAxis'],
optionalAxis: 'colorAxis',
trackerGroups: ['group', 'markerGroup', 'dataLabelsGroup'],
getSymbol: noop,
parallelArrays: ['x', 'y', 'value'],
colorKey: 'value',
/**
* In choropleth maps, the color is a result of the value, so this needs translation too
*/
translateColors: function() {
var series = this,
nullColor = this.options.nullColor,
colorAxis = this.colorAxis,
colorKey = this.colorKey;
each(this.data, function(point) {
var value = point[colorKey],
color;
color = point.options.color ||
(point.isNull ? nullColor : (colorAxis && value !== undefined) ? colorAxis.toColor(value, point) : point.color || series.color);
if (color) {
point.color = color;
}
});
},
/**
* Get the color attibutes to apply on the graphic
*/
colorAttribs: function(point) {
var ret = {};
if (defined(point.color)) {
ret[this.colorProp || 'fill'] = point.color;
}
return ret;
}
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var color = H.color,
ColorAxis = H.ColorAxis,
colorPointMixin = H.colorPointMixin,
colorSeriesMixin = H.colorSeriesMixin,
doc = H.doc,
each = H.each,
extend = H.extend,
isNumber = H.isNumber,
LegendSymbolMixin = H.LegendSymbolMixin,
map = H.map,
merge = H.merge,
noop = H.noop,
pick = H.pick,
isArray = H.isArray,
Point = H.Point,
Series = H.Series,
seriesType = H.seriesType,
seriesTypes = H.seriesTypes,
splat = H.splat;
// The vector-effect attribute is not supported in IE <= 11 (at least), so we need
// diffent logic (#3218)
var supportsVectorEffect = doc.documentElement.style.vectorEffect !== undefined;
/**
* The MapAreaPoint object
*/
/**
* Add the map series type
*/
seriesType('map', 'scatter', {
allAreas: true,
animation: false, // makes the complex shapes slow
nullColor: '#f7f7f7',
borderColor: '#cccccc',
borderWidth: 1,
marker: null,
stickyTracking: false,
joinBy: 'hc-key',
dataLabels: {
formatter: function() { // #2945
return this.point.value;
},
inside: true, // for the color
verticalAlign: 'middle',
crop: false,
overflow: false,
padding: 0
},
turboThreshold: 0,
tooltip: {
followPointer: true,
pointFormat: '{point.name}: {point.value}<br/>'
},
states: {
normal: {
animation: true
},
hover: {
brightness: 0.2,
halo: null
},
select: {
color: '#cccccc'
}
}
// Prototype members
}, merge(colorSeriesMixin, {
type: 'map',
supportsDrilldown: true,
getExtremesFromAll: true,
useMapGeometry: true, // get axis extremes from paths, not values
forceDL: true,
searchPoint: noop,
directTouch: true, // When tooltip is not shared, this series (and derivatives) requires direct touch/hover. KD-tree does not apply.
preserveAspectRatio: true, // X axis and Y axis must have same translation slope
pointArrayMap: ['value'],
/**
* Get the bounding box of all paths in the map combined.
*/
getBox: function(paths) {
var MAX_VALUE = Number.MAX_VALUE,
maxX = -MAX_VALUE,
minX = MAX_VALUE,
maxY = -MAX_VALUE,
minY = MAX_VALUE,
minRange = MAX_VALUE,
xAxis = this.xAxis,
yAxis = this.yAxis,
hasBox;
// Find the bounding box
each(paths || [], function(point) {
if (point.path) {
if (typeof point.path === 'string') {
point.path = H.splitPath(point.path);
}
var path = point.path || [],
i = path.length,
even = false, // while loop reads from the end
pointMaxX = -MAX_VALUE,
pointMinX = MAX_VALUE,
pointMaxY = -MAX_VALUE,
pointMinY = MAX_VALUE,
properties = point.properties;
// The first time a map point is used, analyze its box
if (!point._foundBox) {
while (i--) {
if (isNumber(path[i])) {
if (even) { // even = x
pointMaxX = Math.max(pointMaxX, path[i]);
pointMinX = Math.min(pointMinX, path[i]);
} else { // odd = Y
pointMaxY = Math.max(pointMaxY, path[i]);
pointMinY = Math.min(pointMinY, path[i]);
}
even = !even;
}
}
// Cache point bounding box for use to position data labels, bubbles etc
point._midX = pointMinX + (pointMaxX - pointMinX) *
(point.middleX || (properties && properties['hc-middle-x']) || 0.5); // pick is slower and very marginally needed
point._midY = pointMinY + (pointMaxY - pointMinY) *
(point.middleY || (properties && properties['hc-middle-y']) || 0.5);
point._maxX = pointMaxX;
point._minX = pointMinX;
point._maxY = pointMaxY;
point._minY = pointMinY;
point.labelrank = pick(point.labelrank, (pointMaxX - pointMinX) * (pointMaxY - pointMinY));
point._foundBox = true;
}
maxX = Math.max(maxX, point._maxX);
minX = Math.min(minX, point._minX);
maxY = Math.max(maxY, point._maxY);
minY = Math.min(minY, point._minY);
minRange = Math.min(point._maxX - point._minX, point._maxY - point._minY, minRange);
hasBox = true;
}
});
// Set the box for the whole series
if (hasBox) {
this.minY = Math.min(minY, pick(this.minY, MAX_VALUE));
this.maxY = Math.max(maxY, pick(this.maxY, -MAX_VALUE));
this.minX = Math.min(minX, pick(this.minX, MAX_VALUE));
this.maxX = Math.max(maxX, pick(this.maxX, -MAX_VALUE));
// If no minRange option is set, set the default minimum zooming range to 5 times the
// size of the smallest element
if (xAxis && xAxis.options.minRange === undefined) {
xAxis.minRange = Math.min(5 * minRange, (this.maxX - this.minX) / 5, xAxis.minRange || MAX_VALUE);
}
if (yAxis && yAxis.options.minRange === undefined) {
yAxis.minRange = Math.min(5 * minRange, (this.maxY - this.minY) / 5, yAxis.minRange || MAX_VALUE);
}
}
},
getExtremes: function() {
// Get the actual value extremes for colors
Series.prototype.getExtremes.call(this, this.valueData);
// Recalculate box on updated data
if (this.chart.hasRendered && this.isDirtyData) {
this.getBox(this.options.data);
}
this.valueMin = this.dataMin;
this.valueMax = this.dataMax;
// Extremes for the mock Y axis
this.dataMin = this.minY;
this.dataMax = this.maxY;
},
/**
* Translate the path so that it automatically fits into the plot area box
* @param {Object} path
*/
translatePath: function(path) {
var series = this,
even = false, // while loop reads from the end
xAxis = series.xAxis,
yAxis = series.yAxis,
xMin = xAxis.min,
xTransA = xAxis.transA,
xMinPixelPadding = xAxis.minPixelPadding,
yMin = yAxis.min,
yTransA = yAxis.transA,
yMinPixelPadding = yAxis.minPixelPadding,
i,
ret = []; // Preserve the original
// Do the translation
if (path) {
i = path.length;
while (i--) {
if (isNumber(path[i])) {
ret[i] = even ?
(path[i] - xMin) * xTransA + xMinPixelPadding :
(path[i] - yMin) * yTransA + yMinPixelPadding;
even = !even;
} else {
ret[i] = path[i];
}
}
}
return ret;
},
/**
* Extend setData to join in mapData. If the allAreas option is true, all areas
* from the mapData are used, and those that don't correspond to a data value
* are given null values.
*/
setData: function(data, redraw, animation, updatePoints) {
var options = this.options,
chartOptions = this.chart.options.chart,
globalMapData = chartOptions && chartOptions.map,
mapData = options.mapData,
joinBy = options.joinBy,
joinByNull = joinBy === null,
pointArrayMap = options.keys || this.pointArrayMap,
dataUsed = [],
mapMap = {},
mapPoint,
transform,
mapTransforms = this.chart.mapTransforms,
props,
i;
// Collect mapData from chart options if not defined on series
if (!mapData && globalMapData) {
mapData = typeof globalMapData === 'string' ? H.maps[globalMapData] : globalMapData;
}
if (joinByNull) {
joinBy = '_i';
}
joinBy = this.joinBy = splat(joinBy);
if (!joinBy[1]) {
joinBy[1] = joinBy[0];
}
// Pick up numeric values, add index
// Convert Array point definitions to objects using pointArrayMap
if (data) {
each(data, function(val, i) {
var ix = 0;
if (isNumber(val)) {
data[i] = {
value: val
};
} else if (isArray(val)) {
data[i] = {};
// Automatically copy first item to hc-key if there is an extra leading string
if (!options.keys && val.length > pointArrayMap.length && typeof val[0] === 'string') {
data[i]['hc-key'] = val[0];
++ix;
}
// Run through pointArrayMap and what's left of the point data array in parallel, copying over the values
for (var j = 0; j < pointArrayMap.length; ++j, ++ix) {
if (pointArrayMap[j]) {
data[i][pointArrayMap[j]] = val[ix];
}
}
}
if (joinByNull) {
data[i]._i = i;
}
});
}
this.getBox(data);
// Pick up transform definitions for chart
this.chart.mapTransforms = mapTransforms = chartOptions && chartOptions.mapTransforms || mapData && mapData['hc-transform'] || mapTransforms;
// Cache cos/sin of transform rotation angle
if (mapTransforms) {
for (transform in mapTransforms) {
if (mapTransforms.hasOwnProperty(transform) && transform.rotation) {
transform.cosAngle = Math.cos(transform.rotation);
transform.sinAngle = Math.sin(transform.rotation);
}
}
}
if (mapData) {
if (mapData.type === 'FeatureCollection') {
this.mapTitle = mapData.title;
mapData = H.geojson(mapData, this.type, this);
}
this.mapData = mapData;
this.mapMap = {};
for (i = 0; i < mapData.length; i++) {
mapPoint = mapData[i];
props = mapPoint.properties;
mapPoint._i = i;
// Copy the property over to root for faster access
if (joinBy[0] && props && props[joinBy[0]]) {
mapPoint[joinBy[0]] = props[joinBy[0]];
}
mapMap[mapPoint[joinBy[0]]] = mapPoint;
}
this.mapMap = mapMap;
// Registered the point codes that actually hold data
if (data && joinBy[1]) {
each(data, function(point) {
if (mapMap[point[joinBy[1]]]) {
dataUsed.push(mapMap[point[joinBy[1]]]);
}
});
}
if (options.allAreas) {
this.getBox(mapData);
data = data || [];
// Registered the point codes that actually hold data
if (joinBy[1]) {
each(data, function(point) {
dataUsed.push(point[joinBy[1]]);
});
}
// Add those map points that don't correspond to data, which will be drawn as null points
dataUsed = '|' + map(dataUsed, function(point) {
return point && point[joinBy[0]];
}).join('|') + '|'; // String search is faster than array.indexOf
each(mapData, function(mapPoint) {
if (!joinBy[0] || dataUsed.indexOf('|' + mapPoint[joinBy[0]] + '|') === -1) {
data.push(merge(mapPoint, {
value: null
}));
updatePoints = false; // #5050 - adding all areas causes the update optimization of setData to kick in, even though the point order has changed
}
});
} else {
this.getBox(dataUsed); // Issue #4784
}
}
Series.prototype.setData.call(this, data, redraw, animation, updatePoints);
},
/**
* No graph for the map series
*/
drawGraph: noop,
/**
* We need the points' bounding boxes in order to draw the data labels, so
* we skip it now and call it from drawPoints instead.
*/
drawDataLabels: noop,
/**
* Allow a quick redraw by just translating the area group. Used for zooming and panning
* in capable browsers.
*/
doFullTranslate: function() {
return this.isDirtyData || this.chart.isResizing || this.chart.renderer.isVML || !this.baseTrans;
},
/**
* Add the path option for data points. Find the max value for color calculation.
*/
translate: function() {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
doFullTranslate = series.doFullTranslate();
series.generatePoints();
each(series.data, function(point) {
// Record the middle point (loosely based on centroid), determined
// by the middleX and middleY options.
point.plotX = xAxis.toPixels(point._midX, true);
point.plotY = yAxis.toPixels(point._midY, true);
if (doFullTranslate) {
point.shapeType = 'path';
point.shapeArgs = {
d: series.translatePath(point.path)
};
}
});
series.translateColors();
},
/**
* Get presentational attributes
*/
pointAttribs: function(point, state) {
var attr = seriesTypes.column.prototype.pointAttribs.call(this, point, state);
// Prevent flickering whan called from setState
if (point.isFading) {
delete attr.fill;
}
// If vector-effect is not supported, we set the stroke-width on the group element
// and let all point graphics inherit. That way we don't have to iterate over all
// points to update the stroke-width on zooming. TODO: Check unstyled
if (supportsVectorEffect) {
attr['vector-effect'] = 'non-scaling-stroke';
} else {
attr['stroke-width'] = 'inherit';
}
return attr;
},
/**
* Use the drawPoints method of column, that is able to handle simple shapeArgs.
* Extend it by assigning the tooltip position.
*/
drawPoints: function() {
var series = this,
xAxis = series.xAxis,
yAxis = series.yAxis,
group = series.group,
chart = series.chart,
renderer = chart.renderer,
scaleX,
scaleY,
translateX,
translateY,
baseTrans = this.baseTrans;
// Set a group that handles transform during zooming and panning in order to preserve clipping
// on series.group
if (!series.transformGroup) {
series.transformGroup = renderer.g()
.attr({
scaleX: 1,
scaleY: 1
})
.add(group);
series.transformGroup.survive = true;
}
// Draw the shapes again
if (series.doFullTranslate()) {
// Individual point actions. TODO: Check unstyled.
// Draw them in transformGroup
series.group = series.transformGroup;
seriesTypes.column.prototype.drawPoints.apply(series);
series.group = group; // Reset
// Add class names
each(series.points, function(point) {
if (point.graphic) {
if (point.name) {
point.graphic.addClass('highcharts-name-' + point.name.replace(/ /g, '-').toLowerCase());
}
if (point.properties && point.properties['hc-key']) {
point.graphic.addClass('highcharts-key-' + point.properties['hc-key'].toLowerCase());
}
}
});
// Set the base for later scale-zooming. The originX and originY properties are the
// axis values in the plot area's upper left corner.
this.baseTrans = {
originX: xAxis.min - xAxis.minPixelPadding / xAxis.transA,
originY: yAxis.min - yAxis.minPixelPadding / yAxis.transA + (yAxis.reversed ? 0 : yAxis.len / yAxis.transA),
transAX: xAxis.transA,
transAY: yAxis.transA
};
// Reset transformation in case we're doing a full translate (#3789)
this.transformGroup.animate({
translateX: 0,
translateY: 0,
scaleX: 1,
scaleY: 1
});
// Just update the scale and transform for better performance
} else {
scaleX = xAxis.transA / baseTrans.transAX;
scaleY = yAxis.transA / baseTrans.transAY;
translateX = xAxis.toPixels(baseTrans.originX, true);
translateY = yAxis.toPixels(baseTrans.originY, true);
// Handle rounding errors in normal view (#3789)
if (scaleX > 0.99 && scaleX < 1.01 && scaleY > 0.99 && scaleY < 1.01) {
scaleX = 1;
scaleY = 1;
translateX = Math.round(translateX);
translateY = Math.round(translateY);
}
this.transformGroup.animate({
translateX: translateX,
translateY: translateY,
scaleX: scaleX,
scaleY: scaleY
});
}
// Set the stroke-width directly on the group element so the children inherit it. We need to use
// setAttribute directly, because the stroke-widthSetter method expects a stroke color also to be
// set.
if (!supportsVectorEffect) {
series.group.element.setAttribute(
'stroke-width',
series.options[
(series.pointAttrToOptions && series.pointAttrToOptions['stroke-width']) || 'borderWidth'
] / (scaleX || 1)
);
}
this.drawMapDataLabels();
},
/**
* Draw the data labels. Special for maps is the time that the data labels are drawn (after points),
* and the clipping of the dataLabelsGroup.
*/
drawMapDataLabels: function() {
Series.prototype.drawDataLabels.call(this);
if (this.dataLabelsGroup) {
this.dataLabelsGroup.clip(this.chart.clipRect);
}
},
/**
* Override render to throw in an async call in IE8. Otherwise it chokes on the US counties demo.
*/
render: function() {
var series = this,
render = Series.prototype.render;
// Give IE8 some time to breathe.
if (series.chart.renderer.isVML && series.data.length > 3000) {
setTimeout(function() {
render.call(series);
});
} else {
render.call(series);
}
},
/**
* The initial animation for the map series. By default, animation is disabled.
* Animation of map shapes is not at all supported in VML browsers.
*/
animate: function(init) {
var chart = this.chart,
animation = this.options.animation,
group = this.group,
xAxis = this.xAxis,
yAxis = this.yAxis,
left = xAxis.pos,
top = yAxis.pos;
if (chart.renderer.isSVG) {
if (animation === true) {
animation = {
duration: 1000
};
}
// Initialize the animation
if (init) {
// Scale down the group and place it in the center
group.attr({
translateX: left + xAxis.len / 2,
translateY: top + yAxis.len / 2,
scaleX: 0.001, // #1499
scaleY: 0.001
});
// Run the animation
} else {
group.animate({
translateX: left,
translateY: top,
scaleX: 1,
scaleY: 1
}, animation);
// Delete this function to allow it only once
this.animate = null;
}
}
},
/**
* Animate in the new series from the clicked point in the old series.
* Depends on the drilldown.js module
*/
animateDrilldown: function(init) {
var toBox = this.chart.plotBox,
level = this.chart.drilldownLevels[this.chart.drilldownLevels.length - 1],
fromBox = level.bBox,
animationOptions = this.chart.options.drilldown.animation,
scale;
if (!init) {
scale = Math.min(fromBox.width / toBox.width, fromBox.height / toBox.height);
level.shapeArgs = {
scaleX: scale,
scaleY: scale,
translateX: fromBox.x,
translateY: fromBox.y
};
each(this.points, function(point) {
if (point.graphic) {
point.graphic
.attr(level.shapeArgs)
.animate({
scaleX: 1,
scaleY: 1,
translateX: 0,
translateY: 0
}, animationOptions);
}
});
this.animate = null;
}
},
drawLegendSymbol: LegendSymbolMixin.drawRectangle,
/**
* When drilling up, pull out the individual point graphics from the lower series
* and animate them into the origin point in the upper series.
*/
animateDrillupFrom: function(level) {
seriesTypes.column.prototype.animateDrillupFrom.call(this, level);
},
/**
* When drilling up, keep the upper series invisible until the lower series has
* moved into place
*/
animateDrillupTo: function(init) {
seriesTypes.column.prototype.animateDrillupTo.call(this, init);
}
// Point class
}), extend({
/**
* Extend the Point object to split paths
*/
applyOptions: function(options, x) {
var point = Point.prototype.applyOptions.call(this, options, x),
series = this.series,
joinBy = series.joinBy,
mapPoint;
if (series.mapData) {
mapPoint = point[joinBy[1]] !== undefined && series.mapMap[point[joinBy[1]]];
if (mapPoint) {
// This applies only to bubbles
if (series.xyFromShape) {
point.x = mapPoint._midX;
point.y = mapPoint._midY;
}
extend(point, mapPoint); // copy over properties
} else {
point.value = point.value || null;
}
}
return point;
},
/**
* Stop the fade-out
*/
onMouseOver: function(e) {
clearTimeout(this.colorInterval);
if (this.value !== null) {
Point.prototype.onMouseOver.call(this, e);
} else { //#3401 Tooltip doesn't hide when hovering over null points
this.series.onMouseOut(e);
}
},
/**
* Zoom the chart to view a specific area point
*/
zoomTo: function() {
var point = this,
series = point.series;
series.xAxis.setExtremes(
point._minX,
point._maxX,
false
);
series.yAxis.setExtremes(
point._minY,
point._maxY,
false
);
series.chart.redraw();
}
}, colorPointMixin));
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
Chart = H.Chart,
doc = H.doc,
each = H.each,
extend = H.extend,
merge = H.merge,
pick = H.pick,
wrap = H.wrap;
function stopEvent(e) {
if (e) {
if (e.preventDefault) {
e.preventDefault();
}
if (e.stopPropagation) {
e.stopPropagation();
}
e.cancelBubble = true;
}
}
// Add events to the Chart object itself
extend(Chart.prototype, {
renderMapNavigation: function() {
var chart = this,
options = this.options.mapNavigation,
buttons = options.buttons,
n,
button,
buttonOptions,
attr,
states,
hoverStates,
selectStates,
outerHandler = function(e) {
this.handler.call(chart, e);
stopEvent(e); // Stop default click event (#4444)
};
if (pick(options.enableButtons, options.enabled) && !chart.renderer.forExport) {
chart.mapNavButtons = [];
for (n in buttons) {
if (buttons.hasOwnProperty(n)) {
buttonOptions = merge(options.buttonOptions, buttons[n]);
button = chart.renderer.button(
buttonOptions.text,
0,
0,
outerHandler,
attr,
hoverStates,
selectStates,
0,
n === 'zoomIn' ? 'topbutton' : 'bottombutton'
)
.addClass('highcharts-map-navigation')
.attr({
width: buttonOptions.width,
height: buttonOptions.height,
title: chart.options.lang[n],
padding: buttonOptions.padding,
zIndex: 5
})
.add();
button.handler = buttonOptions.onclick;
button.align(extend(buttonOptions, {
width: button.width,
height: 2 * button.height
}), null, buttonOptions.alignTo);
addEvent(button.element, 'dblclick', stopEvent); // Stop double click event (#4444)
chart.mapNavButtons.push(button);
}
}
}
},
/**
* Fit an inner box to an outer. If the inner box overflows left or right, align it to the sides of the
* outer. If it overflows both sides, fit it within the outer. This is a pattern that occurs more places
* in Highcharts, perhaps it should be elevated to a common utility function.
*/
fitToBox: function(inner, outer) {
each([
['x', 'width'],
['y', 'height']
], function(dim) {
var pos = dim[0],
size = dim[1];
if (inner[pos] + inner[size] > outer[pos] + outer[size]) { // right overflow
if (inner[size] > outer[size]) { // the general size is greater, fit fully to outer
inner[size] = outer[size];
inner[pos] = outer[pos];
} else { // align right
inner[pos] = outer[pos] + outer[size] - inner[size];
}
}
if (inner[size] > outer[size]) {
inner[size] = outer[size];
}
if (inner[pos] < outer[pos]) {
inner[pos] = outer[pos];
}
});
return inner;
},
/**
* Zoom the map in or out by a certain amount. Less than 1 zooms in, greater than 1 zooms out.
*/
mapZoom: function(howMuch, centerXArg, centerYArg, mouseX, mouseY) {
/*if (this.isMapZooming) {
this.mapZoomQueue = arguments;
return;
}*/
var chart = this,
xAxis = chart.xAxis[0],
xRange = xAxis.max - xAxis.min,
centerX = pick(centerXArg, xAxis.min + xRange / 2),
newXRange = xRange * howMuch,
yAxis = chart.yAxis[0],
yRange = yAxis.max - yAxis.min,
centerY = pick(centerYArg, yAxis.min + yRange / 2),
newYRange = yRange * howMuch,
fixToX = mouseX ? ((mouseX - xAxis.pos) / xAxis.len) : 0.5,
fixToY = mouseY ? ((mouseY - yAxis.pos) / yAxis.len) : 0.5,
newXMin = centerX - newXRange * fixToX,
newYMin = centerY - newYRange * fixToY,
newExt = chart.fitToBox({
x: newXMin,
y: newYMin,
width: newXRange,
height: newYRange
}, {
x: xAxis.dataMin,
y: yAxis.dataMin,
width: xAxis.dataMax - xAxis.dataMin,
height: yAxis.dataMax - yAxis.dataMin
}),
zoomOut = newExt.x <= xAxis.dataMin &&
newExt.width >= xAxis.dataMax - xAxis.dataMin &&
newExt.y <= yAxis.dataMin &&
newExt.height >= yAxis.dataMax - yAxis.dataMin;
// When mousewheel zooming, fix the point under the mouse
if (mouseX) {
xAxis.fixTo = [mouseX - xAxis.pos, centerXArg];
}
if (mouseY) {
yAxis.fixTo = [mouseY - yAxis.pos, centerYArg];
}
// Zoom
if (howMuch !== undefined && !zoomOut) {
xAxis.setExtremes(newExt.x, newExt.x + newExt.width, false);
yAxis.setExtremes(newExt.y, newExt.y + newExt.height, false);
// Reset zoom
} else {
xAxis.setExtremes(undefined, undefined, false);
yAxis.setExtremes(undefined, undefined, false);
}
// Prevent zooming until this one is finished animating
/*chart.holdMapZoom = true;
setTimeout(function () {
chart.holdMapZoom = false;
}, 200);*/
/*delay = animation ? animation.duration || 500 : 0;
if (delay) {
chart.isMapZooming = true;
setTimeout(function () {
chart.isMapZooming = false;
if (chart.mapZoomQueue) {
chart.mapZoom.apply(chart, chart.mapZoomQueue);
}
chart.mapZoomQueue = null;
}, delay);
}*/
chart.redraw();
}
});
/**
* Extend the Chart.render method to add zooming and panning
*/
wrap(Chart.prototype, 'render', function(proceed) {
var chart = this,
mapNavigation = chart.options.mapNavigation;
// Render the plus and minus buttons. Doing this before the shapes makes getBBox much quicker, at least in Chrome.
chart.renderMapNavigation();
proceed.call(chart);
// Add the double click event
if (pick(mapNavigation.enableDoubleClickZoom, mapNavigation.enabled) || mapNavigation.enableDoubleClickZoomTo) {
addEvent(chart.container, 'dblclick', function(e) {
chart.pointer.onContainerDblClick(e);
});
}
// Add the mousewheel event
if (pick(mapNavigation.enableMouseWheelZoom, mapNavigation.enabled)) {
addEvent(chart.container, doc.onmousewheel === undefined ? 'DOMMouseScroll' : 'mousewheel', function(e) {
chart.pointer.onContainerMouseWheel(e);
stopEvent(e); // Issue #5011, returning false from non-jQuery event does not prevent default
return false;
});
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var extend = H.extend,
pick = H.pick,
Pointer = H.Pointer,
wrap = H.wrap;
// Extend the Pointer
extend(Pointer.prototype, {
/**
* The event handler for the doubleclick event
*/
onContainerDblClick: function(e) {
var chart = this.chart;
e = this.normalize(e);
if (chart.options.mapNavigation.enableDoubleClickZoomTo) {
if (chart.pointer.inClass(e.target, 'highcharts-tracker') && chart.hoverPoint) {
chart.hoverPoint.zoomTo();
}
} else if (chart.isInsidePlot(e.chartX - chart.plotLeft, e.chartY - chart.plotTop)) {
chart.mapZoom(
0.5,
chart.xAxis[0].toValue(e.chartX),
chart.yAxis[0].toValue(e.chartY),
e.chartX,
e.chartY
);
}
},
/**
* The event handler for the mouse scroll event
*/
onContainerMouseWheel: function(e) {
var chart = this.chart,
delta;
e = this.normalize(e);
// Firefox uses e.detail, WebKit and IE uses wheelDelta
delta = e.detail || -(e.wheelDelta / 120);
if (chart.isInsidePlot(e.chartX - chart.plotLeft, e.chartY - chart.plotTop)) {
chart.mapZoom(
Math.pow(chart.options.mapNavigation.mouseWheelSensitivity, delta),
chart.xAxis[0].toValue(e.chartX),
chart.yAxis[0].toValue(e.chartY),
e.chartX,
e.chartY
);
}
}
});
// Implement the pinchType option
wrap(Pointer.prototype, 'zoomOption', function(proceed) {
var mapNavigation = this.chart.options.mapNavigation;
proceed.apply(this, [].slice.call(arguments, 1));
// Pinch status
if (pick(mapNavigation.enableTouchZoom, mapNavigation.enabled)) {
this.pinchX = this.pinchHor = this.pinchY = this.pinchVert = this.hasZoom = true;
}
});
// Extend the pinchTranslate method to preserve fixed ratio when zooming
wrap(Pointer.prototype, 'pinchTranslate', function(proceed, pinchDown, touches, transform, selectionMarker, clip, lastValidTouch) {
var xBigger;
proceed.call(this, pinchDown, touches, transform, selectionMarker, clip, lastValidTouch);
// Keep ratio
if (this.chart.options.chart.type === 'map' && this.hasZoom) {
xBigger = transform.scaleX > transform.scaleY;
this.pinchTranslateDirection(!xBigger,
pinchDown,
touches,
transform,
selectionMarker,
clip,
lastValidTouch,
xBigger ? transform.scaleX : transform.scaleY
);
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var seriesType = H.seriesType,
seriesTypes = H.seriesTypes;
// The mapline series type
seriesType('mapline', 'map', {
}, {
type: 'mapline',
colorProp: 'stroke',
drawLegendSymbol: seriesTypes.line.prototype.drawLegendSymbol
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var merge = H.merge,
Point = H.Point,
seriesType = H.seriesType;
// The mappoint series type
seriesType('mappoint', 'scatter', {
dataLabels: {
enabled: true,
formatter: function() { // #2945
return this.point.name;
},
crop: false,
defer: false,
overflow: false,
style: {
color: '#000000'
}
}
// Prototype members
}, {
type: 'mappoint',
forceDL: true
// Point class
}, {
applyOptions: function(options, x) {
var mergedOptions = options.lat !== undefined && options.lon !== undefined ? merge(options, this.series.chart.fromLatLonToPoint(options)) : options;
return Point.prototype.applyOptions.call(this, mergedOptions, x);
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var arrayMax = H.arrayMax,
arrayMin = H.arrayMin,
Axis = H.Axis,
color = H.color,
each = H.each,
isNumber = H.isNumber,
noop = H.noop,
pick = H.pick,
pInt = H.pInt,
Point = H.Point,
Series = H.Series,
seriesType = H.seriesType,
seriesTypes = H.seriesTypes;
/* ****************************************************************************
* Start Bubble series code *
*****************************************************************************/
seriesType('bubble', 'scatter', {
dataLabels: {
formatter: function() { // #2945
return this.point.z;
},
inside: true,
verticalAlign: 'middle'
},
// displayNegative: true,
marker: {
// Avoid offset in Point.setState
radius: null,
states: {
hover: {
radiusPlus: 0
}
}
},
minSize: 8,
maxSize: '20%',
// negativeColor: null,
// sizeBy: 'area'
softThreshold: false,
states: {
hover: {
halo: {
size: 5
}
}
},
tooltip: {
pointFormat: '({point.x}, {point.y}), Size: {point.z}'
},
turboThreshold: 0,
zThreshold: 0,
zoneAxis: 'z'
// Prototype members
}, {
pointArrayMap: ['y', 'z'],
parallelArrays: ['x', 'y', 'z'],
trackerGroups: ['group', 'dataLabelsGroup'],
bubblePadding: true,
zoneAxis: 'z',
/**
* Get the radius for each point based on the minSize, maxSize and each point's Z value. This
* must be done prior to Series.translate because the axis needs to add padding in
* accordance with the point sizes.
*/
getRadii: function(zMin, zMax, minSize, maxSize) {
var len,
i,
pos,
zData = this.zData,
radii = [],
options = this.options,
sizeByArea = options.sizeBy !== 'width',
zThreshold = options.zThreshold,
zRange = zMax - zMin,
value,
radius;
// Set the shape type and arguments to be picked up in drawPoints
for (i = 0, len = zData.length; i < len; i++) {
value = zData[i];
// When sizing by threshold, the absolute value of z determines the size
// of the bubble.
if (options.sizeByAbsoluteValue && value !== null) {
value = Math.abs(value - zThreshold);
zMax = Math.max(zMax - zThreshold, Math.abs(zMin - zThreshold));
zMin = 0;
}
if (value === null) {
radius = null;
// Issue #4419 - if value is less than zMin, push a radius that's always smaller than the minimum size
} else if (value < zMin) {
radius = minSize / 2 - 1;
} else {
// Relative size, a number between 0 and 1
pos = zRange > 0 ? (value - zMin) / zRange : 0.5;
if (sizeByArea && pos >= 0) {
pos = Math.sqrt(pos);
}
radius = Math.ceil(minSize + pos * (maxSize - minSize)) / 2;
}
radii.push(radius);
}
this.radii = radii;
},
/**
* Perform animation on the bubbles
*/
animate: function(init) {
var animation = this.options.animation;
if (!init) { // run the animation
each(this.points, function(point) {
var graphic = point.graphic,
shapeArgs = point.shapeArgs;
if (graphic && shapeArgs) {
// start values
graphic.attr('r', 1);
// animate
graphic.animate({
r: shapeArgs.r
}, animation);
}
});
// delete this function to allow it only once
this.animate = null;
}
},
/**
* Extend the base translate method to handle bubble size
*/
translate: function() {
var i,
data = this.data,
point,
radius,
radii = this.radii;
// Run the parent method
seriesTypes.scatter.prototype.translate.call(this);
// Set the shape type and arguments to be picked up in drawPoints
i = data.length;
while (i--) {
point = data[i];
radius = radii ? radii[i] : 0; // #1737
if (isNumber(radius) && radius >= this.minPxSize / 2) {
// Shape arguments
point.shapeType = 'circle';
point.shapeArgs = {
x: point.plotX,
y: point.plotY,
r: radius
};
// Alignment box for the data label
point.dlBox = {
x: point.plotX - radius,
y: point.plotY - radius,
width: 2 * radius,
height: 2 * radius
};
} else { // below zThreshold
point.shapeArgs = point.plotY = point.dlBox = undefined; // #1691
}
}
},
/**
* Get the series' symbol in the legend
*
* @param {Object} legend The legend object
* @param {Object} item The series (this) or point
*/
drawLegendSymbol: function(legend, item) {
var renderer = this.chart.renderer,
radius = renderer.fontMetrics(legend.itemStyle.fontSize).f / 2;
item.legendSymbol = renderer.circle(
radius,
legend.baseline - radius,
radius
).attr({
zIndex: 3
}).add(item.legendGroup);
item.legendSymbol.isMarker = true;
},
drawPoints: seriesTypes.column.prototype.drawPoints,
alignDataLabel: seriesTypes.column.prototype.alignDataLabel,
buildKDTree: noop,
applyZones: noop
// Point class
}, {
haloPath: function() {
return Point.prototype.haloPath.call(this, this.shapeArgs.r + this.series.options.states.hover.halo.size);
},
ttBelow: false
});
/**
* Add logic to pad each axis with the amount of pixels
* necessary to avoid the bubbles to overflow.
*/
Axis.prototype.beforePadding = function() {
var axis = this,
axisLength = this.len,
chart = this.chart,
pxMin = 0,
pxMax = axisLength,
isXAxis = this.isXAxis,
dataKey = isXAxis ? 'xData' : 'yData',
min = this.min,
extremes = {},
smallestSize = Math.min(chart.plotWidth, chart.plotHeight),
zMin = Number.MAX_VALUE,
zMax = -Number.MAX_VALUE,
range = this.max - min,
transA = axisLength / range,
activeSeries = [];
// Handle padding on the second pass, or on redraw
each(this.series, function(series) {
var seriesOptions = series.options,
zData;
if (series.bubblePadding && (series.visible || !chart.options.chart.ignoreHiddenSeries)) {
// Correction for #1673
axis.allowZoomOutside = true;
// Cache it
activeSeries.push(series);
if (isXAxis) { // because X axis is evaluated first
// For each series, translate the size extremes to pixel values
each(['minSize', 'maxSize'], function(prop) {
var length = seriesOptions[prop],
isPercent = /%$/.test(length);
length = pInt(length);
extremes[prop] = isPercent ?
smallestSize * length / 100 :
length;
});
series.minPxSize = extremes.minSize;
series.maxPxSize = extremes.maxSize;
// Find the min and max Z
zData = series.zData;
if (zData.length) { // #1735
zMin = pick(seriesOptions.zMin, Math.min(
zMin,
Math.max(
arrayMin(zData),
seriesOptions.displayNegative === false ? seriesOptions.zThreshold : -Number.MAX_VALUE
)
));
zMax = pick(seriesOptions.zMax, Math.max(zMax, arrayMax(zData)));
}
}
}
});
each(activeSeries, function(series) {
var data = series[dataKey],
i = data.length,
radius;
if (isXAxis) {
series.getRadii(zMin, zMax, series.minPxSize, series.maxPxSize);
}
if (range > 0) {
while (i--) {
if (isNumber(data[i]) && axis.dataMin <= data[i] && data[i] <= axis.dataMax) {
radius = series.radii[i];
pxMin = Math.min(((data[i] - min) * transA) - radius, pxMin);
pxMax = Math.max(((data[i] - min) * transA) + radius, pxMax);
}
}
}
});
if (activeSeries.length && range > 0 && !this.isLog) {
pxMax -= axisLength;
transA *= (axisLength + pxMin - pxMax) / axisLength;
each([
['min', 'userMin', pxMin],
['max', 'userMax', pxMax]
], function(keys) {
if (pick(axis.options[keys[0]], axis[keys[1]]) === undefined) {
axis[keys[0]] += keys[2] / transA;
}
});
}
};
/* ****************************************************************************
* End Bubble series code *
*****************************************************************************/
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var merge = H.merge,
Point = H.Point,
seriesType = H.seriesType,
seriesTypes = H.seriesTypes;
// The mapbubble series type
if (seriesTypes.bubble) {
seriesType('mapbubble', 'bubble', {
animationLimit: 500,
tooltip: {
pointFormat: '{point.name}: {point.z}'
}
// Prototype members
}, {
xyFromShape: true,
type: 'mapbubble',
pointArrayMap: ['z'], // If one single value is passed, it is interpreted as z
/**
* Return the map area identified by the dataJoinBy option
*/
getMapData: seriesTypes.map.prototype.getMapData,
getBox: seriesTypes.map.prototype.getBox,
setData: seriesTypes.map.prototype.setData
// Point class
}, {
applyOptions: function(options, x) {
var point;
if (options && options.lat !== undefined && options.lon !== undefined) {
point = Point.prototype.applyOptions.call(
this,
merge(options, this.series.chart.fromLatLonToPoint(options)),
x
);
} else {
point = seriesTypes.map.prototype.pointClass.prototype.applyOptions.call(this, options, x);
}
return point;
},
ttBelow: false
});
}
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Chart = H.Chart,
each = H.each,
extend = H.extend,
error = H.error,
format = H.format,
merge = H.merge,
win = H.win,
wrap = H.wrap;
/**
* Test for point in polygon. Polygon defined as array of [x,y] points.
*/
function pointInPolygon(point, polygon) {
var i,
j,
rel1,
rel2,
c = false,
x = point.x,
y = point.y;
for (i = 0, j = polygon.length - 1; i < polygon.length; j = i++) {
rel1 = polygon[i][1] > y;
rel2 = polygon[j][1] > y;
if (rel1 !== rel2 && (x < (polygon[j][0] - polygon[i][0]) * (y - polygon[i][1]) / (polygon[j][1] - polygon[i][1]) + polygon[i][0])) {
c = !c;
}
}
return c;
}
/**
* Get point from latLon using specified transform definition
*/
Chart.prototype.transformFromLatLon = function(latLon, transform) {
if (win.proj4 === undefined) {
error(21);
return {
x: 0,
y: null
};
}
var projected = win.proj4(transform.crs, [latLon.lon, latLon.lat]),
cosAngle = transform.cosAngle || (transform.rotation && Math.cos(transform.rotation)),
sinAngle = transform.sinAngle || (transform.rotation && Math.sin(transform.rotation)),
rotated = transform.rotation ? [projected[0] * cosAngle + projected[1] * sinAngle, -projected[0] * sinAngle + projected[1] * cosAngle] : projected;
return {
x: ((rotated[0] - (transform.xoffset || 0)) * (transform.scale || 1) + (transform.xpan || 0)) * (transform.jsonres || 1) + (transform.jsonmarginX || 0),
y: (((transform.yoffset || 0) - rotated[1]) * (transform.scale || 1) + (transform.ypan || 0)) * (transform.jsonres || 1) - (transform.jsonmarginY || 0)
};
};
/**
* Get latLon from point using specified transform definition
*/
Chart.prototype.transformToLatLon = function(point, transform) {
if (win.proj4 === undefined) {
error(21);
return;
}
var normalized = {
x: ((point.x - (transform.jsonmarginX || 0)) / (transform.jsonres || 1) - (transform.xpan || 0)) / (transform.scale || 1) + (transform.xoffset || 0),
y: ((-point.y - (transform.jsonmarginY || 0)) / (transform.jsonres || 1) + (transform.ypan || 0)) / (transform.scale || 1) + (transform.yoffset || 0)
},
cosAngle = transform.cosAngle || (transform.rotation && Math.cos(transform.rotation)),
sinAngle = transform.sinAngle || (transform.rotation && Math.sin(transform.rotation)),
// Note: Inverted sinAngle to reverse rotation direction
projected = win.proj4(transform.crs, 'WGS84', transform.rotation ? {
x: normalized.x * cosAngle + normalized.y * -sinAngle,
y: normalized.x * sinAngle + normalized.y * cosAngle
} : normalized);
return {
lat: projected.y,
lon: projected.x
};
};
Chart.prototype.fromPointToLatLon = function(point) {
var transforms = this.mapTransforms,
transform;
if (!transforms) {
error(22);
return;
}
for (transform in transforms) {
if (transforms.hasOwnProperty(transform) && transforms[transform].hitZone &&
pointInPolygon({
x: point.x,
y: -point.y
}, transforms[transform].hitZone.coordinates[0])) {
return this.transformToLatLon(point, transforms[transform]);
}
}
return this.transformToLatLon(point, transforms['default']); // eslint-disable-line dot-notation
};
Chart.prototype.fromLatLonToPoint = function(latLon) {
var transforms = this.mapTransforms,
transform,
coords;
if (!transforms) {
error(22);
return {
x: 0,
y: null
};
}
for (transform in transforms) {
if (transforms.hasOwnProperty(transform) && transforms[transform].hitZone) {
coords = this.transformFromLatLon(latLon, transforms[transform]);
if (pointInPolygon({
x: coords.x,
y: -coords.y
}, transforms[transform].hitZone.coordinates[0])) {
return coords;
}
}
}
return this.transformFromLatLon(latLon, transforms['default']); // eslint-disable-line dot-notation
};
/**
* Convert a geojson object to map data of a given Highcharts type (map, mappoint or mapline).
*/
H.geojson = function(geojson, hType, series) {
var mapData = [],
path = [],
polygonToPath = function(polygon) {
var i,
len = polygon.length;
path.push('M');
for (i = 0; i < len; i++) {
if (i === 1) {
path.push('L');
}
path.push(polygon[i][0], -polygon[i][1]);
}
};
hType = hType || 'map';
each(geojson.features, function(feature) {
var geometry = feature.geometry,
type = geometry.type,
coordinates = geometry.coordinates,
properties = feature.properties,
point;
path = [];
if (hType === 'map' || hType === 'mapbubble') {
if (type === 'Polygon') {
each(coordinates, polygonToPath);
path.push('Z');
} else if (type === 'MultiPolygon') {
each(coordinates, function(items) {
each(items, polygonToPath);
});
path.push('Z');
}
if (path.length) {
point = {
path: path
};
}
} else if (hType === 'mapline') {
if (type === 'LineString') {
polygonToPath(coordinates);
} else if (type === 'MultiLineString') {
each(coordinates, polygonToPath);
}
if (path.length) {
point = {
path: path
};
}
} else if (hType === 'mappoint') {
if (type === 'Point') {
point = {
x: coordinates[0],
y: -coordinates[1]
};
}
}
if (point) {
mapData.push(extend(point, {
name: properties.name || properties.NAME,
properties: properties
}));
}
});
// Create a credits text that includes map source, to be picked up in Chart.addCredits
if (series && geojson.copyrightShort) {
series.chart.mapCredits = format(series.chart.options.credits.mapText, {
geojson: geojson
});
series.chart.mapCreditsFull = format(series.chart.options.credits.mapTextFull, {
geojson: geojson
});
}
return mapData;
};
/**
* Override addCredits to include map source by default
*/
wrap(Chart.prototype, 'addCredits', function(proceed, credits) {
credits = merge(true, this.options.credits, credits);
// Disable credits link if map credits enabled. This to allow for in-text anchors.
if (this.mapCredits) {
credits.href = null;
}
proceed.call(this, credits);
// Add full map credits to hover
if (this.credits && this.mapCreditsFull) {
this.credits.attr({
title: this.mapCreditsFull
});
}
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Chart = H.Chart,
defaultOptions = H.defaultOptions,
each = H.each,
extend = H.extend,
merge = H.merge,
pick = H.pick,
Renderer = H.Renderer,
SVGRenderer = H.SVGRenderer,
VMLRenderer = H.VMLRenderer;
// Add language
extend(defaultOptions.lang, {
zoomIn: 'Zoom in',
zoomOut: 'Zoom out'
});
// Set the default map navigation options
defaultOptions.mapNavigation = {
buttonOptions: {
alignTo: 'plotBox',
align: 'left',
verticalAlign: 'top',
x: 0,
width: 18,
height: 18,
padding: 5
},
buttons: {
zoomIn: {
onclick: function() {
this.mapZoom(0.5);
},
text: '+',
y: 0
},
zoomOut: {
onclick: function() {
this.mapZoom(2);
},
text: '-',
y: 28
}
},
mouseWheelSensitivity: 1.1
// enabled: false,
// enableButtons: null, // inherit from enabled
// enableTouchZoom: null, // inherit from enabled
// enableDoubleClickZoom: null, // inherit from enabled
// enableDoubleClickZoomTo: false
// enableMouseWheelZoom: null, // inherit from enabled
};
/**
* Utility for reading SVG paths directly.
*/
H.splitPath = function(path) {
var i;
// Move letters apart
path = path.replace(/([A-Za-z])/g, ' $1 ');
// Trim
path = path.replace(/^\s*/, '').replace(/\s*$/, '');
// Split on spaces and commas
path = path.split(/[ ,]+/); // Extra comma to escape gulp.scripts task
// Parse numbers
for (i = 0; i < path.length; i++) {
if (!/[a-zA-Z]/.test(path[i])) {
path[i] = parseFloat(path[i]);
}
}
return path;
};
// A placeholder for map definitions
H.maps = {};
// Create symbols for the zoom buttons
function selectiveRoundedRect(x, y, w, h, rTopLeft, rTopRight, rBottomRight, rBottomLeft) {
return ['M', x + rTopLeft, y,
// top side
'L', x + w - rTopRight, y,
// top right corner
'C', x + w - rTopRight / 2, y, x + w, y + rTopRight / 2, x + w, y + rTopRight,
// right side
'L', x + w, y + h - rBottomRight,
// bottom right corner
'C', x + w, y + h - rBottomRight / 2, x + w - rBottomRight / 2, y + h, x + w - rBottomRight, y + h,
// bottom side
'L', x + rBottomLeft, y + h,
// bottom left corner
'C', x + rBottomLeft / 2, y + h, x, y + h - rBottomLeft / 2, x, y + h - rBottomLeft,
// left side
'L', x, y + rTopLeft,
// top left corner
'C', x, y + rTopLeft / 2, x + rTopLeft / 2, y, x + rTopLeft, y,
'Z'
];
}
SVGRenderer.prototype.symbols.topbutton = function(x, y, w, h, attr) {
return selectiveRoundedRect(x - 1, y - 1, w, h, attr.r, attr.r, 0, 0);
};
SVGRenderer.prototype.symbols.bottombutton = function(x, y, w, h, attr) {
return selectiveRoundedRect(x - 1, y - 1, w, h, 0, 0, attr.r, attr.r);
};
// The symbol callbacks are generated on the SVGRenderer object in all browsers. Even
// VML browsers need this in order to generate shapes in export. Now share
// them with the VMLRenderer.
if (Renderer === VMLRenderer) {
each(['topbutton', 'bottombutton'], function(shape) {
VMLRenderer.prototype.symbols[shape] = SVGRenderer.prototype.symbols[shape];
});
}
/**
* A wrapper for Chart with all the default values for a Map
*/
H.Map = H.mapChart = function(a, b, c) {
var hasRenderToArg = typeof a === 'string' || a.nodeName,
options = arguments[hasRenderToArg ? 1 : 0],
hiddenAxis = {
endOnTick: false,
visible: false,
minPadding: 0,
maxPadding: 0,
startOnTick: false
},
seriesOptions,
defaultCreditsOptions = H.getOptions().credits;
/* For visual testing
hiddenAxis.gridLineWidth = 1;
hiddenAxis.gridZIndex = 10;
hiddenAxis.tickPositions = undefined;
// */
// Don't merge the data
seriesOptions = options.series;
options.series = null;
options = merge({
chart: {
panning: 'xy',
type: 'map'
},
credits: {
mapText: pick(defaultCreditsOptions.mapText, ' \u00a9 <a href="{geojson.copyrightUrl}">{geojson.copyrightShort}</a>'),
mapTextFull: pick(defaultCreditsOptions.mapTextFull, '{geojson.copyright}')
},
xAxis: hiddenAxis,
yAxis: merge(hiddenAxis, {
reversed: true
})
},
options, // user's options
{ // forced options
chart: {
inverted: false,
alignTicks: false
}
}
);
options.series = seriesOptions;
return hasRenderToArg ?
new Chart(a, options, c) :
new Chart(options, b);
};
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var colorPointMixin = H.colorPointMixin,
colorSeriesMixin = H.colorSeriesMixin,
each = H.each,
LegendSymbolMixin = H.LegendSymbolMixin,
merge = H.merge,
noop = H.noop,
pick = H.pick,
Series = H.Series,
seriesType = H.seriesType,
seriesTypes = H.seriesTypes;
// The Heatmap series type
seriesType('heatmap', 'scatter', {
animation: false,
borderWidth: 0,
dataLabels: {
formatter: function() { // #2945
return this.point.value;
},
inside: true,
verticalAlign: 'middle',
crop: false,
overflow: false,
padding: 0 // #3837
},
marker: null,
pointRange: null, // dynamically set to colsize by default
tooltip: {
pointFormat: '{point.x}, {point.y}: {point.value}<br/>'
},
states: {
normal: {
animation: true
},
hover: {
halo: false, // #3406, halo is not required on heatmaps
brightness: 0.2
}
}
}, merge(colorSeriesMixin, {
pointArrayMap: ['y', 'value'],
hasPointSpecificOptions: true,
supportsDrilldown: true,
getExtremesFromAll: true,
directTouch: true,
/**
* Override the init method to add point ranges on both axes.
*/
init: function() {
var options;
seriesTypes.scatter.prototype.init.apply(this, arguments);
options = this.options;
options.pointRange = pick(options.pointRange, options.colsize || 1); // #3758, prevent resetting in setData
this.yAxis.axisPointRange = options.rowsize || 1; // general point range
},
translate: function() {
var series = this,
options = series.options,
xAxis = series.xAxis,
yAxis = series.yAxis,
between = function(x, a, b) {
return Math.min(Math.max(a, x), b);
};
series.generatePoints();
each(series.points, function(point) {
var xPad = (options.colsize || 1) / 2,
yPad = (options.rowsize || 1) / 2,
x1 = between(Math.round(xAxis.len - xAxis.translate(point.x - xPad, 0, 1, 0, 1)), -xAxis.len, 2 * xAxis.len),
x2 = between(Math.round(xAxis.len - xAxis.translate(point.x + xPad, 0, 1, 0, 1)), -xAxis.len, 2 * xAxis.len),
y1 = between(Math.round(yAxis.translate(point.y - yPad, 0, 1, 0, 1)), -yAxis.len, 2 * yAxis.len),
y2 = between(Math.round(yAxis.translate(point.y + yPad, 0, 1, 0, 1)), -yAxis.len, 2 * yAxis.len);
// Set plotX and plotY for use in K-D-Tree and more
point.plotX = point.clientX = (x1 + x2) / 2;
point.plotY = (y1 + y2) / 2;
point.shapeType = 'rect';
point.shapeArgs = {
x: Math.min(x1, x2),
y: Math.min(y1, y2),
width: Math.abs(x2 - x1),
height: Math.abs(y2 - y1)
};
});
series.translateColors();
},
drawPoints: function() {
seriesTypes.column.prototype.drawPoints.call(this);
each(this.points, function(point) {
point.graphic.attr(this.colorAttribs(point, point.state));
}, this);
},
animate: noop,
getBox: noop,
drawLegendSymbol: LegendSymbolMixin.drawRectangle,
alignDataLabel: seriesTypes.column.prototype.alignDataLabel,
getExtremes: function() {
// Get the extremes from the value data
Series.prototype.getExtremes.call(this, this.valueData);
this.valueMin = this.dataMin;
this.valueMax = this.dataMax;
// Get the extremes from the y data
Series.prototype.getExtremes.call(this);
}
}), colorPointMixin);
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var addEvent = H.addEvent,
Chart = H.Chart,
createElement = H.createElement,
css = H.css,
defaultOptions = H.defaultOptions,
defaultPlotOptions = H.defaultPlotOptions,
each = H.each,
extend = H.extend,
fireEvent = H.fireEvent,
hasTouch = H.hasTouch,
inArray = H.inArray,
isObject = H.isObject,
Legend = H.Legend,
merge = H.merge,
pick = H.pick,
Point = H.Point,
Series = H.Series,
seriesTypes = H.seriesTypes,
svg = H.svg,
TrackerMixin;
/**
* TrackerMixin for points and graphs
*/
TrackerMixin = H.TrackerMixin = {
drawTrackerPoint: function() {
var series = this,
chart = series.chart,
pointer = chart.pointer,
onMouseOver = function(e) {
var target = e.target,
point;
while (target && !point) {
point = target.point;
target = target.parentNode;
}
if (point !== undefined && point !== chart.hoverPoint) { // undefined on graph in scatterchart
point.onMouseOver(e);
}
};
// Add reference to the point
each(series.points, function(point) {
if (point.graphic) {
point.graphic.element.point = point;
}
if (point.dataLabel) {
point.dataLabel.element.point = point;
}
});
// Add the event listeners, we need to do this only once
if (!series._hasTracking) {
each(series.trackerGroups, function(key) {
if (series[key]) { // we don't always have dataLabelsGroup
series[key]
.addClass('highcharts-tracker')
.on('mouseover', onMouseOver)
.on('mouseout', function(e) {
pointer.onTrackerMouseOut(e);
});
if (hasTouch) {
series[key].on('touchstart', onMouseOver);
}
}
});
series._hasTracking = true;
}
},
/**
* Draw the tracker object that sits above all data labels and markers to
* track mouse events on the graph or points. For the line type charts
* the tracker uses the same graphPath, but with a greater stroke width
* for better control.
*/
drawTrackerGraph: function() {
var series = this,
options = series.options,
trackByArea = options.trackByArea,
trackerPath = [].concat(trackByArea ? series.areaPath : series.graphPath),
trackerPathLength = trackerPath.length,
chart = series.chart,
pointer = chart.pointer,
renderer = chart.renderer,
snap = chart.options.tooltip.snap,
tracker = series.tracker,
i,
onMouseOver = function() {
if (chart.hoverSeries !== series) {
series.onMouseOver();
}
},
/*
* Empirical lowest possible opacities for TRACKER_FILL for an element to stay invisible but clickable
* IE6: 0.002
* IE7: 0.002
* IE8: 0.002
* IE9: 0.00000000001 (unlimited)
* IE10: 0.0001 (exporting only)
* FF: 0.00000000001 (unlimited)
* Chrome: 0.000001
* Safari: 0.000001
* Opera: 0.00000000001 (unlimited)
*/
TRACKER_FILL = 'rgba(192,192,192,' + (svg ? 0.0001 : 0.002) + ')';
// Extend end points. A better way would be to use round linecaps,
// but those are not clickable in VML.
if (trackerPathLength && !trackByArea) {
i = trackerPathLength + 1;
while (i--) {
if (trackerPath[i] === 'M') { // extend left side
trackerPath.splice(i + 1, 0, trackerPath[i + 1] - snap, trackerPath[i + 2], 'L');
}
if ((i && trackerPath[i] === 'M') || i === trackerPathLength) { // extend right side
trackerPath.splice(i, 0, 'L', trackerPath[i - 2] + snap, trackerPath[i - 1]);
}
}
}
// handle single points
/*for (i = 0; i < singlePoints.length; i++) {
singlePoint = singlePoints[i];
trackerPath.push(M, singlePoint.plotX - snap, singlePoint.plotY,
L, singlePoint.plotX + snap, singlePoint.plotY);
}*/
// draw the tracker
if (tracker) {
tracker.attr({
d: trackerPath
});
} else if (series.graph) { // create
series.tracker = renderer.path(trackerPath)
.attr({
'stroke-linejoin': 'round', // #1225
visibility: series.visible ? 'visible' : 'hidden',
stroke: TRACKER_FILL,
fill: trackByArea ? TRACKER_FILL : 'none',
'stroke-width': series.graph.strokeWidth() + (trackByArea ? 0 : 2 * snap),
zIndex: 2
})
.add(series.group);
// The tracker is added to the series group, which is clipped, but is covered
// by the marker group. So the marker group also needs to capture events.
each([series.tracker, series.markerGroup], function(tracker) {
tracker.addClass('highcharts-tracker')
.on('mouseover', onMouseOver)
.on('mouseout', function(e) {
pointer.onTrackerMouseOut(e);
});
if (hasTouch) {
tracker.on('touchstart', onMouseOver);
}
});
}
}
};
/* End TrackerMixin */
/**
* Add tracking event listener to the series group, so the point graphics
* themselves act as trackers
*/
if (seriesTypes.column) {
seriesTypes.column.prototype.drawTracker = TrackerMixin.drawTrackerPoint;
}
if (seriesTypes.pie) {
seriesTypes.pie.prototype.drawTracker = TrackerMixin.drawTrackerPoint;
}
if (seriesTypes.scatter) {
seriesTypes.scatter.prototype.drawTracker = TrackerMixin.drawTrackerPoint;
}
/*
* Extend Legend for item events
*/
extend(Legend.prototype, {
setItemEvents: function(item, legendItem, useHTML) {
var legend = this,
chart = legend.chart,
activeClass = 'highcharts-legend-' + (item.series ? 'point' : 'series') + '-active';
// Set the events on the item group, or in case of useHTML, the item itself (#1249)
(useHTML ? legendItem : item.legendGroup).on('mouseover', function() {
item.setState('hover');
// A CSS class to dim or hide other than the hovered series
chart.seriesGroup.addClass(activeClass);
})
.on('mouseout', function() {
// A CSS class to dim or hide other than the hovered series
chart.seriesGroup.removeClass(activeClass);
item.setState();
})
.on('click', function(event) {
var strLegendItemClick = 'legendItemClick',
fnLegendItemClick = function() {
if (item.setVisible) {
item.setVisible();
}
};
// Pass over the click/touch event. #4.
event = {
browserEvent: event
};
// click the name or symbol
if (item.firePointEvent) { // point
item.firePointEvent(strLegendItemClick, event, fnLegendItemClick);
} else {
fireEvent(item, strLegendItemClick, event, fnLegendItemClick);
}
});
},
createCheckboxForItem: function(item) {
var legend = this;
item.checkbox = createElement('input', {
type: 'checkbox',
checked: item.selected,
defaultChecked: item.selected // required by IE7
}, legend.options.itemCheckboxStyle, legend.chart.container);
addEvent(item.checkbox, 'click', function(event) {
var target = event.target;
fireEvent(
item.series || item,
'checkboxClick', { // #3712
checked: target.checked,
item: item
},
function() {
item.select();
}
);
});
}
});
/*
* Extend the Chart object with interaction
*/
extend(Chart.prototype, {
/**
* Display the zoom button
*/
showResetZoom: function() {
var chart = this,
lang = defaultOptions.lang,
btnOptions = chart.options.chart.resetZoomButton,
theme = btnOptions.theme,
states = theme.states,
alignTo = btnOptions.relativeTo === 'chart' ? null : 'plotBox';
function zoomOut() {
chart.zoomOut();
}
this.resetZoomButton = chart.renderer.button(lang.resetZoom, null, null, zoomOut, theme, states && states.hover)
.attr({
align: btnOptions.position.align,
title: lang.resetZoomTitle
})
.addClass('highcharts-reset-zoom')
.add()
.align(btnOptions.position, false, alignTo);
},
/**
* Zoom out to 1:1
*/
zoomOut: function() {
var chart = this;
fireEvent(chart, 'selection', {
resetSelection: true
}, function() {
chart.zoom();
});
},
/**
* Zoom into a given portion of the chart given by axis coordinates
* @param {Object} event
*/
zoom: function(event) {
var chart = this,
hasZoomed,
pointer = chart.pointer,
displayButton = false,
resetZoomButton;
// If zoom is called with no arguments, reset the axes
if (!event || event.resetSelection) {
each(chart.axes, function(axis) {
hasZoomed = axis.zoom();
});
} else { // else, zoom in on all axes
each(event.xAxis.concat(event.yAxis), function(axisData) {
var axis = axisData.axis,
isXAxis = axis.isXAxis;
// don't zoom more than minRange
if (pointer[isXAxis ? 'zoomX' : 'zoomY'] || pointer[isXAxis ? 'pinchX' : 'pinchY']) {
hasZoomed = axis.zoom(axisData.min, axisData.max);
if (axis.displayBtn) {
displayButton = true;
}
}
});
}
// Show or hide the Reset zoom button
resetZoomButton = chart.resetZoomButton;
if (displayButton && !resetZoomButton) {
chart.showResetZoom();
} else if (!displayButton && isObject(resetZoomButton)) {
chart.resetZoomButton = resetZoomButton.destroy();
}
// Redraw
if (hasZoomed) {
chart.redraw(
pick(chart.options.chart.animation, event && event.animation, chart.pointCount < 100) // animation
);
}
},
/**
* Pan the chart by dragging the mouse across the pane. This function is called
* on mouse move, and the distance to pan is computed from chartX compared to
* the first chartX position in the dragging operation.
*/
pan: function(e, panning) {
var chart = this,
hoverPoints = chart.hoverPoints,
doRedraw;
// remove active points for shared tooltip
if (hoverPoints) {
each(hoverPoints, function(point) {
point.setState();
});
}
each(panning === 'xy' ? [1, 0] : [1], function(isX) { // xy is used in maps
var axis = chart[isX ? 'xAxis' : 'yAxis'][0],
horiz = axis.horiz,
mousePos = e[horiz ? 'chartX' : 'chartY'],
mouseDown = horiz ? 'mouseDownX' : 'mouseDownY',
startPos = chart[mouseDown],
halfPointRange = (axis.pointRange || 0) / 2,
extremes = axis.getExtremes(),
newMin = axis.toValue(startPos - mousePos, true) + halfPointRange,
newMax = axis.toValue(startPos + axis.len - mousePos, true) - halfPointRange,
goingLeft = startPos > mousePos; // #3613
if (axis.series.length &&
(goingLeft || newMin > Math.min(extremes.dataMin, extremes.min)) &&
(!goingLeft || newMax < Math.max(extremes.dataMax, extremes.max))) {
axis.setExtremes(newMin, newMax, false, false, {
trigger: 'pan'
});
doRedraw = true;
}
chart[mouseDown] = mousePos; // set new reference for next run
});
if (doRedraw) {
chart.redraw(false);
}
css(chart.container, {
cursor: 'move'
});
}
});
/*
* Extend the Point object with interaction
*/
extend(Point.prototype, {
/**
* Toggle the selection status of a point
* @param {Boolean} selected Whether to select or unselect the point.
* @param {Boolean} accumulate Whether to add to the previous selection. By default,
* this happens if the control key (Cmd on Mac) was pressed during clicking.
*/
select: function(selected, accumulate) {
var point = this,
series = point.series,
chart = series.chart;
selected = pick(selected, !point.selected);
// fire the event with the default handler
point.firePointEvent(selected ? 'select' : 'unselect', {
accumulate: accumulate
}, function() {
point.selected = point.options.selected = selected;
series.options.data[inArray(point, series.data)] = point.options;
point.setState(selected && 'select');
// unselect all other points unless Ctrl or Cmd + click
if (!accumulate) {
each(chart.getSelectedPoints(), function(loopPoint) {
if (loopPoint.selected && loopPoint !== point) {
loopPoint.selected = loopPoint.options.selected = false;
series.options.data[inArray(loopPoint, series.data)] = loopPoint.options;
loopPoint.setState('');
loopPoint.firePointEvent('unselect');
}
});
}
});
},
/**
* Runs on mouse over the point
*
* @param {Object} e The event arguments
* @param {Boolean} byProximity Falsy for kd points that are closest to the mouse, or to
* actually hovered points. True for other points in shared tooltip.
*/
onMouseOver: function(e, byProximity) {
var point = this,
series = point.series,
chart = series.chart,
tooltip = chart.tooltip,
hoverPoint = chart.hoverPoint;
if (point.series) { // It may have been destroyed, #4130
// In shared tooltip, call mouse over when point/series is actually hovered: (#5766)
if (!byProximity) {
// set normal state to previous series
if (hoverPoint && hoverPoint !== point) {
hoverPoint.onMouseOut();
}
if (chart.hoverSeries !== series) {
series.onMouseOver();
}
chart.hoverPoint = point;
}
// update the tooltip
if (tooltip && (!tooltip.shared || series.noSharedTooltip)) {
// hover point only for non shared points: (#5766)
point.setState('hover');
tooltip.refresh(point, e);
} else if (!tooltip) {
point.setState('hover');
}
// trigger the event
point.firePointEvent('mouseOver');
}
},
/**
* Runs on mouse out from the point
*/
onMouseOut: function() {
var chart = this.series.chart,
hoverPoints = chart.hoverPoints;
this.firePointEvent('mouseOut');
if (!hoverPoints || inArray(this, hoverPoints) === -1) { // #887, #2240
this.setState();
chart.hoverPoint = null;
}
},
/**
* Import events from the series' and point's options. Only do it on
* demand, to save processing time on hovering.
*/
importEvents: function() {
if (!this.hasImportedEvents) {
var point = this,
options = merge(point.series.options.point, point.options),
events = options.events,
eventType;
point.events = events;
for (eventType in events) {
addEvent(point, eventType, events[eventType]);
}
this.hasImportedEvents = true;
}
},
/**
* Set the point's state
* @param {String} state
*/
setState: function(state, move) {
var point = this,
plotX = Math.floor(point.plotX), // #4586
plotY = point.plotY,
series = point.series,
stateOptions = series.options.states[state] || {},
markerOptions = defaultPlotOptions[series.type].marker &&
series.options.marker,
normalDisabled = markerOptions && markerOptions.enabled === false,
markerStateOptions = (markerOptions && markerOptions.states &&
markerOptions.states[state]) || {},
stateDisabled = markerStateOptions.enabled === false,
stateMarkerGraphic = series.stateMarkerGraphic,
pointMarker = point.marker || {},
chart = series.chart,
halo = series.halo,
haloOptions,
markerAttribs,
newSymbol;
state = state || ''; // empty string
if (
// already has this state
(state === point.state && !move) ||
// selected points don't respond to hover
(point.selected && state !== 'select') ||
// series' state options is disabled
(stateOptions.enabled === false) ||
// general point marker's state options is disabled
(state && (stateDisabled || (normalDisabled && markerStateOptions.enabled === false))) ||
// individual point marker's state options is disabled
(state && pointMarker.states && pointMarker.states[state] && pointMarker.states[state].enabled === false) // #1610
) {
return;
}
if (markerOptions) {
markerAttribs = series.markerAttribs(point, state);
}
// Apply hover styles to the existing point
if (point.graphic) {
if (point.state) {
point.graphic.removeClass('highcharts-point-' + point.state);
}
if (state) {
point.graphic.addClass('highcharts-point-' + state);
}
/*attribs = radius ? { // new symbol attributes (#507, #612)
x: plotX - radius,
y: plotY - radius,
width: 2 * radius,
height: 2 * radius
} : {};*/
if (markerAttribs) {
point.graphic.animate(
markerAttribs,
pick(
chart.options.chart.animation, // Turn off globally
markerStateOptions.animation,
markerOptions.animation
)
);
}
// Zooming in from a range with no markers to a range with markers
if (stateMarkerGraphic) {
stateMarkerGraphic.hide();
}
} else {
// if a graphic is not applied to each point in the normal state, create a shared
// graphic for the hover state
if (state && markerStateOptions) {
newSymbol = pointMarker.symbol || series.symbol;
// If the point has another symbol than the previous one, throw away the
// state marker graphic and force a new one (#1459)
if (stateMarkerGraphic && stateMarkerGraphic.currentSymbol !== newSymbol) {
stateMarkerGraphic = stateMarkerGraphic.destroy();
}
// Add a new state marker graphic
if (!stateMarkerGraphic) {
if (newSymbol) {
series.stateMarkerGraphic = stateMarkerGraphic = chart.renderer.symbol(
newSymbol,
markerAttribs.x,
markerAttribs.y,
markerAttribs.width,
markerAttribs.height
)
.add(series.markerGroup);
stateMarkerGraphic.currentSymbol = newSymbol;
}
// Move the existing graphic
} else {
stateMarkerGraphic[move ? 'animate' : 'attr']({ // #1054
x: markerAttribs.x,
y: markerAttribs.y
});
}
}
if (stateMarkerGraphic) {
stateMarkerGraphic[state && chart.isInsidePlot(plotX, plotY, chart.inverted) ? 'show' : 'hide'](); // #2450
stateMarkerGraphic.element.point = point; // #4310
}
}
// Show me your halo
haloOptions = stateOptions.halo;
if (haloOptions && haloOptions.size) {
if (!halo) {
series.halo = halo = chart.renderer.path()
.add(series.markerGroup || series.group);
}
H.stop(halo);
halo[move ? 'animate' : 'attr']({
d: point.haloPath(haloOptions.size)
});
halo.attr({
'class': 'highcharts-halo highcharts-color-' + pick(point.colorIndex, series.colorIndex)
});
} else if (halo) {
halo.animate({
d: point.haloPath(0)
}); // Hide
}
point.state = state;
},
/**
* Get the circular path definition for the halo
* @param {Number} size The radius of the circular halo
* @returns {Array} The path definition
*/
haloPath: function(size) {
var series = this.series,
chart = series.chart,
inverted = chart.inverted,
plotX = Math.floor(this.plotX);
return chart.renderer.symbols.circle(
(inverted ? series.yAxis.len - this.plotY : plotX) - size,
(inverted ? series.xAxis.len - plotX : this.plotY) - size,
size * 2,
size * 2
);
}
});
/*
* Extend the Series object with interaction
*/
extend(Series.prototype, {
/**
* Series mouse over handler
*/
onMouseOver: function() {
var series = this,
chart = series.chart,
hoverSeries = chart.hoverSeries;
// set normal state to previous series
if (hoverSeries && hoverSeries !== series) {
hoverSeries.onMouseOut();
}
// trigger the event, but to save processing time,
// only if defined
if (series.options.events.mouseOver) {
fireEvent(series, 'mouseOver');
}
// hover this
series.setState('hover');
chart.hoverSeries = series;
},
/**
* Series mouse out handler
*/
onMouseOut: function() {
// trigger the event only if listeners exist
var series = this,
options = series.options,
chart = series.chart,
tooltip = chart.tooltip,
hoverPoint = chart.hoverPoint;
chart.hoverSeries = null; // #182, set to null before the mouseOut event fires
// trigger mouse out on the point, which must be in this series
if (hoverPoint) {
hoverPoint.onMouseOut();
}
// fire the mouse out event
if (series && options.events.mouseOut) {
fireEvent(series, 'mouseOut');
}
// hide the tooltip
if (tooltip && !options.stickyTracking && (!tooltip.shared || series.noSharedTooltip)) {
tooltip.hide();
}
// set normal state
series.setState();
},
/**
* Set the state of the graph
*/
setState: function(state) {
var series = this,
options = series.options,
graph = series.graph,
stateOptions = options.states,
lineWidth = options.lineWidth,
attribs,
i = 0;
state = state || '';
if (series.state !== state) {
// Toggle class names
each([series.group, series.markerGroup], function(group) {
if (group) {
// Old state
if (series.state) {
group.removeClass('highcharts-series-' + series.state);
}
// New state
if (state) {
group.addClass('highcharts-series-' + state);
}
}
});
series.state = state;
}
},
/**
* Set the visibility of the graph
*
* @param vis {Boolean} True to show the series, false to hide. If undefined,
* the visibility is toggled.
*/
setVisible: function(vis, redraw) {
var series = this,
chart = series.chart,
legendItem = series.legendItem,
showOrHide,
ignoreHiddenSeries = chart.options.chart.ignoreHiddenSeries,
oldVisibility = series.visible;
// if called without an argument, toggle visibility
series.visible = vis = series.options.visible = series.userOptions.visible = vis === undefined ? !oldVisibility : vis; // #5618
showOrHide = vis ? 'show' : 'hide';
// show or hide elements
each(['group', 'dataLabelsGroup', 'markerGroup', 'tracker', 'tt'], function(key) {
if (series[key]) {
series[key][showOrHide]();
}
});
// hide tooltip (#1361)
if (chart.hoverSeries === series || (chart.hoverPoint && chart.hoverPoint.series) === series) {
series.onMouseOut();
}
if (legendItem) {
chart.legend.colorizeItem(series, vis);
}
// rescale or adapt to resized chart
series.isDirty = true;
// in a stack, all other series are affected
if (series.options.stacking) {
each(chart.series, function(otherSeries) {
if (otherSeries.options.stacking && otherSeries.visible) {
otherSeries.isDirty = true;
}
});
}
// show or hide linked series
each(series.linkedSeries, function(otherSeries) {
otherSeries.setVisible(vis, false);
});
if (ignoreHiddenSeries) {
chart.isDirtyBox = true;
}
if (redraw !== false) {
chart.redraw();
}
fireEvent(series, showOrHide);
},
/**
* Show the graph
*/
show: function() {
this.setVisible(true);
},
/**
* Hide the graph
*/
hide: function() {
this.setVisible(false);
},
/**
* Set the selected state of the graph
*
* @param selected {Boolean} True to select the series, false to unselect. If
* undefined, the selection state is toggled.
*/
select: function(selected) {
var series = this;
// if called without an argument, toggle
series.selected = selected = (selected === undefined) ? !series.selected : selected;
if (series.checkbox) {
series.checkbox.checked = selected;
}
fireEvent(series, selected ? 'select' : 'unselect');
},
drawTracker: TrackerMixin.drawTrackerGraph
});
}(Highcharts));
(function(H) {
/**
* (c) 2010-2016 Torstein Honsi
*
* License: www.highcharts.com/license
*/
'use strict';
var Chart = H.Chart,
each = H.each,
inArray = H.inArray,
isObject = H.isObject,
pick = H.pick,
splat = H.splat;
/**
* Update the chart based on the current chart/document size and options for responsiveness
*/
Chart.prototype.setResponsive = function(redraw) {
var options = this.options.responsive;
if (options && options.rules) {
each(options.rules, function(rule) {
this.matchResponsiveRule(rule, redraw);
}, this);
}
};
/**
* Handle a single responsiveness rule
*/
Chart.prototype.matchResponsiveRule = function(rule, redraw) {
var respRules = this.respRules,
condition = rule.condition,
matches,
fn = rule.callback || function() {
return this.chartWidth <= pick(condition.maxWidth, Number.MAX_VALUE) &&
this.chartHeight <= pick(condition.maxHeight, Number.MAX_VALUE) &&
this.chartWidth >= pick(condition.minWidth, 0) &&
this.chartHeight >= pick(condition.minHeight, 0);
};
if (rule._id === undefined) {
rule._id = H.idCounter++;
}
matches = fn.call(this);
// Apply a rule
if (!respRules[rule._id] && matches) {
// Store the current state of the options
if (rule.chartOptions) {
respRules[rule._id] = this.currentOptions(rule.chartOptions);
this.update(rule.chartOptions, redraw);
}
// Unapply a rule based on the previous options before the rule
// was applied
} else if (respRules[rule._id] && !matches) {
this.update(respRules[rule._id], redraw);
delete respRules[rule._id];
}
};
/**
* Get the current values for a given set of options. Used before we update
* the chart with a new responsiveness rule.
* TODO: Restore axis options (by id?)
*/
Chart.prototype.currentOptions = function(options) {
var ret = {};
/**
* Recurse over a set of options and its current values,
* and store the current values in the ret object.
*/
function getCurrent(options, curr, ret) {
var key, i;
for (key in options) {
if (inArray(key, ['series', 'xAxis', 'yAxis']) > -1) {
options[key] = splat(options[key]);
ret[key] = [];
for (i = 0; i < options[key].length; i++) {
ret[key][i] = {};
getCurrent(options[key][i], curr[key][i], ret[key][i]);
}
} else if (isObject(options[key])) {
ret[key] = {};
getCurrent(options[key], curr[key] || {}, ret[key]);
} else {
ret[key] = curr[key] || null;
}
}
}
getCurrent(options, this.options, ret);
return ret;
};
}(Highcharts));
return Highcharts
}));<|fim▁end|>
|
/**
* Draw the borders and backgrounds for chart and plot area
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.