licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 1050 |
using Distributed
@everywhere include("sst_module.jl")
@everywhere begin
using Downloads
path=joinpath(tempdir(),"demo_OISST")
!ispath(path) ? mkdir(path) : nothing
fil,_=sst_files.file_lists(path=path)
list=sst_files.read_files_list(path=path)
list=list[end-10:end,:]
n_per_workwer=Int(ceil(length(list.fil)/nworkers()))
end
if !isempty(list.fil)
@sync @distributed for m in 1:nworkers()
n0=n_per_workwer*(m-1)+1
n1=min(n_per_workwer*m,length(list.fil))
println("$(n0),$(n1)")
for r in eachrow(list[n0:n1,:])
!isdir(dirname(r.fil)) ? mkdir(dirname(r.fil)) : nothing
if !isfile(r.fil)
println(r.fil)
try
Downloads.download(r.url,r.fil)
catch
try
Downloads.download(r.url[1:end-3]*"_preliminary.nc",r.fil[1:end-3]*"_preliminary.nc")
catch
println("file not found online : "*r.fil[1:end-3])
end
end
end
end
end
else
println("no more files to process")
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 11494 | module sst_files
using Printf, DataFrames, CSV, Dates, NCDatasets, Glob
read_files_list(;path=tempdir(),file="oisst_whole_file_list.csv",add_ymd=true) = begin
if add_ymd
add_to_table(CSV.read(joinpath(path,file),DataFrame))
else
CSV.read(joinpath(path,file),DataFrame)
end
end
function add_to_table(list)
ymd!(list)
list.t=collect(1:length(list.day))
list
end
"""
file_lists(path="")
Create file lists and output to csv.
- `whole_file_list.csv` : all files through today's date
- `to_get_file_list.csv` : files that remain to download
Sample file names :
```
url="https://www.ncei.noaa.gov/thredds/dodsC/OisstBase/NetCDF/V2.1/AVHRR/198201/oisst-avhrr-v02r01.19820101.nc"
url="https://www.ncei.noaa.gov/thredds/fileServer/OisstBase/NetCDF/V2.1/AVHRR/198201/oisst-avhrr-v02r01.19820101.nc"
```
"""
function file_lists(;path=tempname())
url0="https://www.ncei.noaa.gov/thredds/fileServer/OisstBase/NetCDF/V2.1/AVHRR/"
!ispath(path) ? mkdir(path) : nothing
ndays=( today()-Date(1982,1,1) ).value
file_list=DataFrame(fil=String[],url=String[],todo=Bool[])
for t in 1:ndays
dd=Date(1982,1,1)+Dates.Day(t-1)
y=year(dd)
m=month(dd)
d=day(dd)
url=@sprintf "%s%04i%02i%s%04i%02i%02i.nc" url0 y m "/oisst-avhrr-v02r01." y m d
fil=@sprintf "%s/%04i%02i%s%04i%02i%02i.nc" path y m "/oisst-avhrr-v02r01." y m d
push!(file_list,(fil=fil,url=url,todo=!isfile(fil)))
end
fil1=joinpath(path,"oisst_whole_file_list.csv")
CSV.write(fil1,file_list)
fil2=joinpath(path,"oisst_to_get_file_list.csv")
CSV.write(fil2,file_list[file_list.todo,:])
return fil1,fil2
end
function ersst_file_lists(;path=tempdir())
url0="https://www.ncei.noaa.gov/pub/data/cmb/ersst/v5/netcdf/"
nmonths=(2023-1854)*12+7
file_list=DataFrame(fil=String[],url=String[],todo=Bool[])
for t in 1:nmonths
dd=Date(1854,1,1)+Dates.Month(t-1)
y=year(dd)
m=month(dd)
d=day(dd)
url=@sprintf "%s%s%04i%02i.nc" url0 "ersst.v5." y m
fil=@sprintf "files_ersst/ersst.v5.%04i%02i.nc" y m
push!(file_list,(fil=fil,url=url,todo=!isfile(fil)))
end
fil1=joinpath(path,"ersst_whole_file_list.csv")
CSV.write(fil1,file_list)
fil2=joinpath(path,"ersst_to_get_file_list.csv")
CSV.write(fil2,file_list[file_list.todo,:])
return fil1,fil2
end
"""
test_files(list,ii=[])
Test whether all downloaded files are valid.
```
list=CSV.read("oisst_whole_file_list.csv",DataFrame)
list_pb=sst_files.test_files(list)
[Downloads.download(r.url,r.fil) for r in eachrow(list[list_pb,:])]
```
"""
function test_files(list,ii=[])
test=zeros(1,length(list.fil))
isempty(ii) ? jj=collect(1:length(list.fil)) : jj=ii
for f in jj
try
ds=Dataset(list.fil[f])
close(ds)
catch e
println(basename(list.fil[f]))
test[f]=1
end
end
return [i[2] for i in findall(test.==1)]
end
function ymd(f)
tmp=split(f,".")[end-1]
parse.(Int,[tmp[1:4] tmp[5:6] tmp[7:8]])
end
function ymd!(d::DataFrame)
tmp=ymd.(d.fil)
d[!, :year]=[a[1] for a in tmp]
d[!, :month]=[a[2] for a in tmp]
d[!, :day]=[a[3] for a in tmp]
d
end
function monthlymean(gdf,m;path0=pwd(),varname="sst")
list=joinpath.(path0,gdf[m].fil)
ds=Dataset(list[1])
tmp=0*ds[varname][:,:,1,1]
[tmp.+=Dataset(f)[varname][:,:,1,1] for f in list]
tmp./length(list)
end
function to_monthly_file(arr,m; varname="sst",output_path=tempdir())
fil=joinpath(output_path,"$(varname)_month$(m).nc")
ds = Dataset(fil,"c")
defDim(ds,"i",size(arr,1))
defDim(ds,"j",size(arr,2))
v = defVar(ds,varname,Float32,("i","j"))
arr[ismissing.(arr)].=NaN
v[:,:] = arr
close(ds)
return fil
end
###
read_lon_lat(fil) = begin
lon=Dataset(fil)["lon"][:]
lat=Dataset(fil)["lat"][:]
lon,lat
end
"""
write_climatology(output_path,year0,year1,lon,lat)
Consolidate monhtly fields into one file with
- 12 months
- both sst and anom
- coordinate variables
- some metadata
"""
function write_climatology(output_path,year0,year1,lo,la)
arr=zeros(1440,720,12,2)
for m in 1:12
arr[:,:,m,1].=Dataset(joinpath(output_path,"sst_month$(m).nc"))["sst"][:,:]
arr[:,:,m,2].=Dataset(joinpath(output_path,"anom_month$(m).nc"))["anom"][:,:]
end
fi=joinpath(output_path,"OISST_mean_monthly_$(year0)_$(year1).nc")
#
ds = NCDataset(fi,"c")
ds.attrib["title"] = "OISST climatology for $(year0) to $(year1)"
ds.attrib["author"] = "Gael Forget"
defDim(ds,"lon",1440); defDim(ds,"lat",720); defDim(ds,"month",12);
#
lon = defVar(ds,"lon",Float32,("lon",))
lat = defVar(ds,"lat",Float32,("lat",))
mon = defVar(ds,"month",Float32,("month",))
sst = defVar(ds,"sst",Float32,("lon","lat","month"))
anom = defVar(ds,"anom",Float32,("lon","lat","month"))
#
lon[:] = lo[:]
lat[:] = la[:]
mon[:] = 1:12
sst[:,:,:] = arr[:,:,:,1]
anom[:,:,:] = arr[:,:,:,2]
#
close(ds)
fi
end
end
##
module coarse_grain
using Statistics, DataFrames, CSV, NCDatasets, Glob
nl=720
#dnl=40 #for 10 degree squares
dnl=8 #for 2 degree squares
nnl=Int(nl/dnl)
@inline areamean(arr,ii,jj) =
mean(skipmissing(
arr[(ii-1)*dnl.+collect(1:dnl),(jj-1)*dnl.+collect(1:dnl)]
))
function indices(list)
arr=Dataset(list.fil[1])["sst"][:,:]
ii=[ii for ii in 1:nnl*2, jj in 1:nnl]
jj=[jj for ii in 1:nnl*2, jj in 1:nnl]
tmp=[areamean(arr,ii,jj) for ii in 1:nnl*2, jj in 1:nnl]
kk=findall((!isnan).(tmp))
(i=ii[kk],j=jj[kk],k=kk)
end
"""
grid(fil)
Return `(lon=lon,lat=lat,msk=msk,area=area)` based on `fil`.
"""
function grid(fil)
ds=NCDataset(fil,"r")
lon=ds["lon"][:]
lat=ds["lat"][:]
msk=ds["sst"][:,:]
msk[ismissing.(msk)].=NaN
msk=1 .+ 0*msk[:,:]
area=[cellarea(lon0,lon0+0.25,lat0,lat0+0.25) for lon0 in 0:0.25:360-0.25, lat0 in -90:0.25:90-0.25]
close(ds)
(lon=lon,lat=lat,msk=msk,area=area)
end
"""
cellarea(lon0,lon1,lat0,lat1)
[source](https://gis.stackexchange.com/questions/29734/how-to-calculate-area-of-1-x-1-degree-cells-in-a-raster)
As a consequence of a theorem of Archimedes, the area of a cell spanning longitudes l0 to l1 (l1 > l0) and latitudes f0 to f1 (f1 > f0) is
```(sin(f1) - sin(f0)) * (l1 - l0) * R^2```
where
- l0 and l1 are expressed in radians (not degrees or whatever).
- l1 - l0 is calculated modulo 2*pi (e.g., -179 - 181 = 2 degrees, not -362 degrees).
- R is the authalic Earth radius, almost exactly 6371 km.
!!! note
As a quick check, the entire globe area can be computed by letting `l1 - l0 = 2pi`, `f1 = pi/2`, `f0 = -pi/2`. The result is `4 * Pi * R^2`.
"""
function cellarea(lon0,lon1,lat0,lat1)
EarthRadius = 6371.0
#f0=20; f1=21; l0=349; l1=350;
f0=-90; f1=90; l0=0; l1=360;
1e6 * (sind(lat1) - sind(lat0)) * mod1(deg2rad(lon1 - lon0),2pi) * EarthRadius^2
end
@inline nansum(x) = sum(filter(!isnan,x))
@inline nansum(x,y) = mapslices(nansum,x,dims=y)
@inline areaintegral(arr,i::Int,j::Int,G::NamedTuple) = begin
ii=(i-1)*dnl.+collect(1:dnl)
jj=(j-1)*dnl.+collect(1:dnl)
nansum(arr[ii,jj].*G.msk[ii,jj].*G.area[ii,jj])
end
function calc_zm(G::NamedTuple,df)
gdf_tim=groupby(df, :t)
arr=NaN*zeros(maximum(df.j),length(gdf_tim))
for k in minimum(df.j):maximum(df.j)
area_tmp=[areaintegral(G.msk,x.i,x.j,G) for x in eachrow(gdf_tim[1])]
area_tmp[gdf_tim[1].j.!==k].=0
tmp1=[sum(tmp1.sst[:].*area_tmp)/sum(area_tmp) for tmp1 in gdf_tim]
arr[k,:].=tmp1
end
return arr
end
"""
lowres_merge(;path=dirname(file_root()))
Merge all files found in chosen path.
"""
function merge_files(;path=tempdir(),outputfile="sst_lowres.csv", nam="sst_lowres")
file_list=glob("$(nam)*csv",path)
df=DataFrame(i=Int[],j=Int[],t=Int[],sst=Float32[])
[lowres_append!(df,f) for f in file_list]
CSV.write(joinpath(tempdir(),outputfile),df)
end
function lowres_append!(df,f)
tmp=CSV.read(f,DataFrame)
tmp.t.=parse(Int,split(basename(f),"_")[end][1:8])
append!(df,tmp)
return tmp
end
file_root(subdir="sst_lowres_files",filesuff="sst_lowres_") = joinpath(tempdir(),subdir,filesuff)
"""
lowres_read(;path=tempdir())
Read `sst_lowres.csv`
"""
function lowres_read(;path=tempdir(),fil="lowres_oisst_sst.csv")
fil=joinpath(path,fil)
df=CSV.read(fil,DataFrame)
gdf=groupby(df, [:i, :j])
kdf=keys(gdf)
return (df,gdf,kdf)
end
function lowres_index(lon0,lat0,kdf)
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
dx=Int(360/maximum(i))
(ii,jj)=(dx*i.-dx/2,dx*j.-dx/2 .-90)
d=(ii .-lon0).^2 .+ (jj .-lat0).^2
findall(d.==minimum(d))[1]
end
lowres_position(ii,jj,kdf) = begin
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
dx=Int(360/maximum(i))
(dx*ii.-dx/2,dx*jj.-dx/2 .-90)
end
end
##
module scenarios
function read_temp(fil)
log=readlines(fil)
ii=findall([occursin("tas=",i) for i in log])
nt=length(ii)
tas=zeros(nt)
year=zeros(nt)
for i in 1:nt
tmp=split(log[ii[i]],"=")[2]
tas[i]=parse(Float64,split(tmp,"degC")[1])
year[i]=parse(Float64,split(tmp,"in")[2])
end
year,tas
end
function calc_offset(year_sst,ny,scenario=245)
year1=year_sst+ny
hector_fil="hector_scenarios/temperature_ssp$(scenario).log"
hector_year,hector_tas=read_temp(hector_fil)
y0=findall(hector_year.==year_sst)[1]
y1=findall(hector_year.==year1)[1]
hector_tas[y1]-hector_tas[y0]
end
end
##
module timeseries
using DataFrames, Statistics, Dates
function calc(input,list; title="", gdf=nothing)
if isa(input,DataFrames.GroupKey)
sst1=gdf[input].sst[:]
else
sst1=input[:]
end
sst2=repeatclim(sst1,list)
sst3=anom(sst1,list)
ttl="SST time series"
#isa(input,DataFrames.GroupKey) ? ttl=ttl*"for i="*string(input.i)*", j="*string(input.j) : nothing
!isempty(title) ? ttl=title : nothing
ts=(sst=sst1,clim=sst2,anom=sst3,title=ttl,
year=list.year,month=list.month,day=list.day)
tmp1=timeseries.calc_quantile(ts)
merge(ts,tmp1)
end
function gdf_clim(list)
sel=findall([(f.year>=1992 && f.year<=2011) for f in eachrow(list)])
groupby(list[sel,:],[:month,:day])
end
@inline clim(sst,list) = [mean(sst[a.t[:]]) for a in gdf_clim(list)]
@inline function anom(sst,list)
c=clim(sst,list)
a=0*sst
for t in 1:length(list.t)
(y,m,d)=(list.year[t],list.month[t],list.day[t])
tt=min(1+(Date(y,m,d)-Date(y,1,1)).value,365)
a[t]=sst[t]-c[tt]
end
a.+median(c)
end
@inline function repeatclim(sst,list)
c=clim(sst,list)
a=0*sst
for t in 1:length(list.t)
(y,m,d)=(list.year[t],list.month[t],list.day[t])
tt=min(1+(Date(y,m,d)-Date(y,1,1)).value,365)
a[t]=c[tt]
end
a
end
##
@inline function calc_quantile(x,msk,yearday,yd)
d0=yearday[yd]
d1=[sum(mod1.( d0 .+ (-2:2),365) .==dd)==1 for dd in yearday]
sel=findall(msk .&& d1)
quantile(x[sel], [0.1, 0.9])
end
@inline function calc_quantile(ts)
x=ts.sst-ts.clim
msk=(ts.year.>=1992 .&& ts.year.<=2011)
yearday=Date.(ts.year,ts.month,ts.day)-Date.(ts.year,1,1)
yearday=min.(1 .+ [yd.value for yd in yearday],365)
ts_low=zeros(365)
ts_high=zeros(365)
for yd in 1:365
ts_low[yd],ts_high[yd]=calc_quantile(x,msk,yearday,yd)
end
(low=ts_low[yearday],high=ts_high[yearday])
end
##
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 3660 | module plots
using CairoMakie, Statistics, FileIO, Colors, Downloads
#
function by_time(ts; show_anom = true, show_clim=true)
tim=collect(1:length(ts.sst))/365.25 .+ 1982
f,a=lines(tim,ts.sst,label="SST",linewidth=4)
show_clim ? lines!(a,tim,ts.clim,color=:orange,label="seasonal climatology",linewidth=1) : nothing
show_anom ? lines!(a,tim,ts.anom,color=:red,label="SST - seasonal cycle") : nothing
a.title=ts.title
xlims!(1982,2024)
axislegend(a,position=:rb)
f
end
function by_year(ts)
f,a,l=lines(ts.sst[1:365],color=:gray)
[lines!(ts.sst[ (1:365) .+ 365*(y-1)] ,color=:gray) for y in 2:length(1982:2022)]
lines!(ts.sst[ 365*(2023-1982):365*(2024-1982)],color=:orange)
lines!(ts.sst[ 365*(2024-1982):end],color=:red,linewidth=2)
for y in 2021:2022
tt1=vec(1:365) .+(y-1982)*365; lines!(ts.sst[tt1],color=:blue)
end
a.title="SST year by year (red=2024, orange=2023, blue=2021:2022)"
f
end
#
function save_fig(fig,trigger=true; file="")
isempty(file) ? fil=tempname()*".png" : fil=joinpath(tempdir(),file)
save(fil,fig)
println(fil)
fig
end
function to_range!(DD,levs)
DD[findall(DD.<=levs[1])].=levs[1]+(levs[2]-levs[1])/100
DD[findall(DD.>=levs[end])].=levs[end]-(levs[end]-levs[end-1])/100
end
function TimeLat(list,zm,ttl;
ClipToRange=true, year0=1982, year1=2024, lat0=-90, lat1=90)
x=collect(1:length(list.year))/365.25 .+ 1982
dy=Int(180/size(zm,1))
y=collect(-90+dy/2:dy:90-dy/2)
z=permutedims(zm)
levs=(-2.0:0.25:2.0)/5.0
ClipToRange ? to_range!(z,levs) : nothing
fig1 = Figure(resolution = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title=ttl,
xticks=collect(year0:4:year1),yticks=collect(-90.0:20.0:90.0),ylabel="latitude")
hm1=contourf!(ax1,x[1:7:end],y,z[1:7:end,:],levels=levs,colormap=:curl)
Colorbar(fig1[1,2], hm1, height = Relative(0.65))
xlims!(ax1,year0,year1)
ylims!(ax1,lat0,lat1)
fig1
end
#
function lowres_scatter(kdf,fig=[],ax=[]; input=[])
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
(ii,jj)=(10*i.-5,10*j.-95)
if isa(fig,Array)
f,a=scatter(ii,jj,color=input,markersize=10)
c=(:blue,:red)
else
(f,a)=(ax,fig)
c=(:skyblue,:pink)
end
text!(a,ii.+1,jj,text=string.(i),fontsize=11,color=c[1])
text!(a,ii.+1,jj.-3,text=string.(j),fontsize=11,color=c[2])
f
end
#
function local_and_global(ts,ts_global,kdf0)
tim=collect(1:length(ts.anom))/365.25 .+ 1982
fig,ax,li=lines(tim,ts.anom .-median(ts.anom),label="local")
lines!(tim,ts_global.anom .-median(ts_global.anom),label="global")
ax.title="local and global SST anomalies"
xlims!(1982,2024)
ylims!(-2.5,2.5)
axislegend(ax,position = :rb)
fig
end
function map_base()
earth_jpg=joinpath(tempdir(),"Blue_Marble.jpg")
url="https://upload.wikimedia.org/wikipedia/commons/5/56/Blue_Marble_Next_Generation_%2B_topography_%2B_bathymetry.jpg"
!isfile(earth_jpg) ? Downloads.download(url,earth_jpg) : nothing
earth_img=load(earth_jpg)
earth_img=reverse(permutedims(earth_img),dims=2)
earth_img=circshift(earth_img,(1800,0))
#fig = Figure(resolution = (1200, 800)) #, backgroundcolor = :grey80)
fig=with_theme(Figure,theme_light())
ax = Axis(fig[1, 1])
im=image!(ax, -0.05 .. 359.95, -89.95 .. 89.95, 0.5 .+0.5*Gray.(earth_img))
hidedecorations!(ax)
fig,ax,im
end
##
function MHW(ts,ttl="SST anomaly with extreme warm periods in red")
x=ts.sst-ts.clim
y=fill(:blue,size(x))
y[findall(x.>=ts.high)].=:red
tim=collect(1:length(ts.sst))/365.25 .+ 1982
fig,ax,li=lines(tim,x,color=y)
xlims!(1982,2024)
ax.title=ttl
fig
end
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 74998 | ### A Pluto.jl notebook ###
# v0.19.46
using Markdown
using InteractiveUtils
# This Pluto notebook uses @bind for interactivity. When running this notebook outside of Pluto, the following 'mock version' of @bind gives bound variables a default value (instead of an error).
macro bind(def, element)
quote
local iv = try Base.loaded_modules[Base.PkgId(Base.UUID("6e696c72-6542-2067-7265-42206c756150"), "AbstractPlutoDingetjes")].Bonds.initial_value catch; b -> missing; end
local el = $(esc(element))
global $(esc(def)) = Core.applicable(Base.get, el) ? Base.get(el) : iv(el)
el
end
end
# βββ‘ db98d796-c0d2-11ec-2c96-f7510a6d771c
begin
using OptimalTransport, LinearAlgebra
using Tables, DataFrames, Climatology
import PlutoUI, CSV, Downloads, Tulip
using CairoMakie
"Done with packages"
end
# βββ‘ 8d867c72-2924-46a0-8a60-7c6e52f71a67
md"""# `OptimalTransport.jl` applied to `CBIOMES`
#### Methods
See [this wikipedia page](https://en.wikipedia.org/wiki/Transportation_theory_(mathematics)) and the [package documentation](https://juliaoptimaltransport.github.io/OptimalTransport.jl/dev/examples/basic/).
#### Climatologies
Zonal mean Chl computed, between `-179.75 W` and `-120.25 W`, for each month as a function of latitude.
- Model : see <https://github.com/gaelforget/Climatology.jl>
- Satellite : <https://github.com/brorfred/ocean_clustering>
"""
# βββ‘ c1df03d1-6205-4caa-9bdf-7daa5ba59d3a
md"""## Input Data Visualization"""
# βββ‘ da9cc45d-8529-4965-b213-61b2657fce28
begin
m1_select = @bind m1 PlutoUI.Slider(1:12;default=1, show_value=true)
m2_select = @bind m2 PlutoUI.Slider(1:12;default=2, show_value=true)
md"""## Select Months To Compare
Compute Earth Mover Distance / Optimal Transport between two months.
- month 1 index : $(m1_select)
- month 2 index : $(m2_select)
"""
end
# βββ‘ 29b6a32d-9003-4bc7-8351-0d1881153bf6
md"""## Appendix"""
# βββ‘ 973f46d5-83b7-466a-a8c3-406643f7dbc5
begin
lons=-179.75:0.5:-120.25
lats=-19.75:0.5:49.75
fil=joinpath(dirname(pathof(Climatology)),"..","examples","OptimalTransport","M.csv")
M=Tables.matrix(CSV.read(fil,DataFrame))
fil=joinpath(dirname(pathof(Climatology)),"..","examples","OptimalTransport","S.csv")
S=Tables.matrix(CSV.read(fil,DataFrame))
nx=size(M,1)
Cost=Float64.([abs(i-j) for i in 1:nx, j in 1:nx])
"Input Data Ready"
end
# βββ‘ a5301146-6eac-4bcd-97d9-3bfd6fe4f213
let
f=Figure()
ax1=Axis(f[1,1],title="model Chl",ylabel="month",xlabel="latitude")
hm1=heatmap!(ax1,lats,1:12,M,colorrange=(0.0,0.015))
Colorbar(f[1, 2], hm1)
ax2=Axis(f[2,1],title="satellite Chl",ylabel="month",xlabel="latitude")
hm2=heatmap!(ax2,lats,1:12,S,colorrange=(0.005,0.01))
Colorbar(f[2, 2], hm2)
f
end
# βββ‘ ab49655b-ab30-457c-a476-9f6dd310ab4b
begin
Da=emd2(M[:,m1],M[:,m2], Cost, Tulip.Optimizer())
Da=round(Da,digits=4)
end
# βββ‘ 7796c8e9-a090-4aab-a073-50f839ceab22
begin
Ξ΅ = 0.01
# Ξ³ = sinkhorn(M[:,m1], S[:,m2], Cost, Ξ΅, SinkhornGibbs(); maxiter=5_000)
# Ξ³ = sinkhorn(M[:,m1], S[:,m2], Cost, Ξ΅, SinkhornStabilized(); maxiter=5_000)
Ξ³ = sinkhorn(M[:,m1], M[:,m2], Cost, Ξ΅, SinkhornEpsilonScaling(SinkhornStabilized()); maxiter=5_000)
Db=dot(Ξ³, Cost) #compute optimal cost, directly
Db=round(Db,digits=4)
end
# βββ‘ fe0ac519-7995-419a-a8ac-02af958342cd
md"""
#### Linear Programming
optimal distance : $(Da)
#### Stabilized Sinkhorn
optimal distance : $(Db)
"""
# βββ‘ 00000000-0000-0000-0000-000000000001
PLUTO_PROJECT_TOML_CONTENTS = """
[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
Climatology = "9e9a4d37-2d2e-41e3-8b85-f7978328d9c7"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Downloads = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
OptimalTransport = "7e02d93a-ae51-4f58-b602-d97af76e3b33"
PlutoUI = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
Tables = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
Tulip = "6dd1b50a-3aae-11e9-10b5-ef983d2400fa"
[compat]
CSV = "~0.10.14"
CairoMakie = "~0.12.0"
DataFrames = "~1.6.1"
OptimalTransport = "~0.3.19"
PlutoUI = "~0.7.59"
Tulip = "~0.9.6"
"""
# βββ‘ 00000000-0000-0000-0000-000000000002
PLUTO_MANIFEST_TOML_CONTENTS = """
# This file is machine-generated - editing it directly is not advised
julia_version = "1.10.4"
manifest_format = "2.0"
project_hash = "f7a79fd02d60bd7fcdd2591b12cb171eb6f3c319"
[[deps.AMD]]
deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse_jll"]
git-tree-sha1 = "45a1272e3f809d36431e57ab22703c6896b8908f"
uuid = "14f7f29c-3bd6-536c-9a0b-7339e30b5a3e"
version = "0.5.3"
[[deps.AbstractFFTs]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "d92ad398961a3ed262d8bf04a1a2b8340f915fef"
uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c"
version = "1.5.0"
weakdeps = ["ChainRulesCore", "Test"]
[deps.AbstractFFTs.extensions]
AbstractFFTsChainRulesCoreExt = "ChainRulesCore"
AbstractFFTsTestExt = "Test"
[[deps.AbstractPlutoDingetjes]]
deps = ["Pkg"]
git-tree-sha1 = "6e1d2a35f2f90a4bc7c2ed98079b2ba09c35b83a"
uuid = "6e696c72-6542-2067-7265-42206c756150"
version = "1.3.2"
[[deps.AbstractTrees]]
git-tree-sha1 = "2d9c9a55f9c93e8887ad391fbae72f8ef55e1177"
uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
version = "0.4.5"
[[deps.AccurateArithmetic]]
deps = ["LinearAlgebra", "Random", "VectorizationBase"]
git-tree-sha1 = "07af26e8d08c211ef85918f3e25d4c0990d20d70"
uuid = "22286c92-06ac-501d-9306-4abd417d9753"
version = "0.3.8"
[[deps.Adapt]]
deps = ["LinearAlgebra", "Requires"]
git-tree-sha1 = "cde29ddf7e5726c9fb511f340244ea3481267608"
uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
version = "3.7.2"
weakdeps = ["StaticArrays"]
[deps.Adapt.extensions]
AdaptStaticArraysExt = "StaticArrays"
[[deps.AdaptivePredicates]]
git-tree-sha1 = "7d5da5dd472490d048b081ca1bda4a7821b06456"
uuid = "35492f91-a3bd-45ad-95db-fcad7dcfedb7"
version = "1.1.1"
[[deps.AliasTables]]
deps = ["PtrArrays", "Random"]
git-tree-sha1 = "9876e1e164b144ca45e9e3198d0b689cadfed9ff"
uuid = "66dad0bd-aa9a-41b7-9441-69ab47430ed8"
version = "1.1.3"
[[deps.Animations]]
deps = ["Colors"]
git-tree-sha1 = "e81c509d2c8e49592413bfb0bb3b08150056c79d"
uuid = "27a7e980-b3e6-11e9-2bcd-0b925532e340"
version = "0.4.1"
[[deps.ArgTools]]
uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
version = "1.1.1"
[[deps.ArrayInterface]]
deps = ["Adapt", "LinearAlgebra", "Requires", "SparseArrays", "SuiteSparse"]
git-tree-sha1 = "c5aeb516a84459e0318a02507d2261edad97eb75"
uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
version = "7.7.1"
[deps.ArrayInterface.extensions]
ArrayInterfaceBandedMatricesExt = "BandedMatrices"
ArrayInterfaceBlockBandedMatricesExt = "BlockBandedMatrices"
ArrayInterfaceCUDAExt = "CUDA"
ArrayInterfaceGPUArraysCoreExt = "GPUArraysCore"
ArrayInterfaceStaticArraysCoreExt = "StaticArraysCore"
ArrayInterfaceTrackerExt = "Tracker"
[deps.ArrayInterface.weakdeps]
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"
BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c"
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
[[deps.Artifacts]]
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
[[deps.Atomix]]
deps = ["UnsafeAtomics"]
git-tree-sha1 = "c06a868224ecba914baa6942988e2f2aade419be"
uuid = "a9b6321e-bd34-4604-b9c9-b65b8de01458"
version = "0.1.0"
[[deps.Automa]]
deps = ["PrecompileTools", "TranscodingStreams"]
git-tree-sha1 = "014bc22d6c400a7703c0f5dc1fdc302440cf88be"
uuid = "67c07d97-cdcb-5c2c-af73-a7f9c32a568b"
version = "1.0.4"
[[deps.AxisAlgorithms]]
deps = ["LinearAlgebra", "Random", "SparseArrays", "WoodburyMatrices"]
git-tree-sha1 = "01b8ccb13d68535d73d2b0c23e39bd23155fb712"
uuid = "13072b0f-2c55-5437-9ae7-d433b7a33950"
version = "1.1.0"
[[deps.AxisArrays]]
deps = ["Dates", "IntervalSets", "IterTools", "RangeArrays"]
git-tree-sha1 = "16351be62963a67ac4083f748fdb3cca58bfd52f"
uuid = "39de3d68-74b9-583c-8d2d-e117c070f3a9"
version = "0.4.7"
[[deps.Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[deps.BenchmarkTools]]
deps = ["JSON", "Logging", "Printf", "Profile", "Statistics", "UUIDs"]
git-tree-sha1 = "f1dff6729bc61f4d49e140da1af55dcd1ac97b2f"
uuid = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf"
version = "1.5.0"
[[deps.BitFlags]]
git-tree-sha1 = "0691e34b3bb8be9307330f88d1a3c3f25466c24d"
uuid = "d1d4a3ce-64b1-5f1a-9ba4-7e7e69966f35"
version = "0.1.9"
[[deps.BitTwiddlingConvenienceFunctions]]
deps = ["Static"]
git-tree-sha1 = "f21cfd4950cb9f0587d5067e69405ad2acd27b87"
uuid = "62783981-4cbd-42fc-bca8-16325de8dc4b"
version = "0.1.6"
[[deps.Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9e2a6b69137e6969bab0152632dcb3bc108c8bdd"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.8+1"
[[deps.CEnum]]
git-tree-sha1 = "389ad5c84de1ae7cf0e28e381131c98ea87d54fc"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.5.0"
[[deps.CPUSummary]]
deps = ["CpuId", "IfElse", "PrecompileTools", "Static"]
git-tree-sha1 = "5a97e67919535d6841172016c9530fd69494e5ec"
uuid = "2a0fbf3d-bb9c-48f3-b0a9-814d99fd7ab9"
version = "0.2.6"
[[deps.CRC32c]]
uuid = "8bf52ea8-c179-5cab-976a-9e18b702a9bc"
[[deps.CRlibm_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "e329286945d0cfc04456972ea732551869af1cfc"
uuid = "4e9b3aee-d8a1-5a3d-ad8b-7d824db253f0"
version = "1.0.1+0"
[[deps.CSV]]
deps = ["CodecZlib", "Dates", "FilePathsBase", "InlineStrings", "Mmap", "Parsers", "PooledArrays", "PrecompileTools", "SentinelArrays", "Tables", "Unicode", "WeakRefStrings", "WorkerUtilities"]
git-tree-sha1 = "6c834533dc1fabd820c1db03c839bf97e45a3fab"
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
version = "0.10.14"
[[deps.Cairo]]
deps = ["Cairo_jll", "Colors", "Glib_jll", "Graphics", "Libdl", "Pango_jll"]
git-tree-sha1 = "7b6ad8c35f4bc3bca8eb78127c8b99719506a5fb"
uuid = "159f3aea-2a34-519c-b102-8c37f9878175"
version = "1.1.0"
[[deps.CairoMakie]]
deps = ["CRC32c", "Cairo", "Cairo_jll", "Colors", "FileIO", "FreeType", "GeometryBasics", "LinearAlgebra", "Makie", "PrecompileTools"]
git-tree-sha1 = "361dec06290d76b6d70d0c7dc888038eec9df63a"
uuid = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
version = "0.12.9"
[[deps.Cairo_jll]]
deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "LZO_jll", "Libdl", "Pixman_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"]
git-tree-sha1 = "a2f1c8c668c8e3cb4cca4e57a8efdb09067bb3fd"
uuid = "83423d85-b0ee-5818-9007-b63ccbeb887a"
version = "1.18.0+2"
[[deps.CatViews]]
deps = ["Random", "Test"]
git-tree-sha1 = "23d1f1e10d4e24374112fcf800ac981d14a54b24"
uuid = "81a5f4ea-a946-549a-aa7e-2a7f63a27d31"
version = "1.0.0"
[[deps.ChainRulesCore]]
deps = ["Compat", "LinearAlgebra"]
git-tree-sha1 = "71acdbf594aab5bbb2cec89b208c41b4c411e49f"
uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
version = "1.24.0"
weakdeps = ["SparseArrays"]
[deps.ChainRulesCore.extensions]
ChainRulesCoreSparseArraysExt = "SparseArrays"
[[deps.Climatology]]
deps = ["DataDeps", "DataFrames", "Dataverse", "Distributed", "Glob", "JLD2", "MeshArrays", "Pkg", "Printf", "RollingFunctions", "Scratch", "SharedArrays", "Statistics", "TOML"]
git-tree-sha1 = "cc95dac7681ff551cfb88a92b5047c2db740f448"
uuid = "9e9a4d37-2d2e-41e3-8b85-f7978328d9c7"
version = "0.5.5"
[deps.Climatology.extensions]
ClimatologyMITgcmExt = ["MITgcm"]
ClimatologyMakieExt = ["Makie"]
ClimatologyNCDatasetsExt = ["NCDatasets"]
[deps.Climatology.weakdeps]
MITgcm = "dce5fa8e-68ce-4431-a242-9469c69627a0"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab"
[[deps.CloseOpenIntervals]]
deps = ["Static", "StaticArrayInterface"]
git-tree-sha1 = "05ba0d07cd4fd8b7a39541e31a7b0254704ea581"
uuid = "fb6a15b2-703c-40df-9091-08a04967cfa9"
version = "0.1.13"
[[deps.CodecBzip2]]
deps = ["Bzip2_jll", "TranscodingStreams"]
git-tree-sha1 = "e7c529cc31bb85b97631b922fa2e6baf246f5905"
uuid = "523fee87-0ab8-5b00-afb7-3ecf72e48cfd"
version = "0.8.4"
[[deps.CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "bce6804e5e6044c6daab27bb533d1295e4a2e759"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.6"
[[deps.ColorBrewer]]
deps = ["Colors", "JSON", "Test"]
git-tree-sha1 = "61c5334f33d91e570e1d0c3eb5465835242582c4"
uuid = "a2cac450-b92f-5266-8821-25eda20663c8"
version = "0.4.0"
[[deps.ColorSchemes]]
deps = ["ColorTypes", "ColorVectorSpace", "Colors", "FixedPointNumbers", "PrecompileTools", "Random"]
git-tree-sha1 = "b5278586822443594ff615963b0c09755771b3e0"
uuid = "35d6a980-a343-548e-a6ea-1d62b119f2f4"
version = "3.26.0"
[[deps.ColorTypes]]
deps = ["FixedPointNumbers", "Random"]
git-tree-sha1 = "b10d0b65641d57b8b4d5e234446582de5047050d"
uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f"
version = "0.11.5"
[[deps.ColorVectorSpace]]
deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "Requires", "Statistics", "TensorCore"]
git-tree-sha1 = "a1f44953f2382ebb937d60dafbe2deea4bd23249"
uuid = "c3611d14-8923-5661-9e6a-0046d554d3a4"
version = "0.10.0"
weakdeps = ["SpecialFunctions"]
[deps.ColorVectorSpace.extensions]
SpecialFunctionsExt = "SpecialFunctions"
[[deps.Colors]]
deps = ["ColorTypes", "FixedPointNumbers", "Reexport"]
git-tree-sha1 = "362a287c3aa50601b0bc359053d5c2468f0e7ce0"
uuid = "5ae59095-9a9b-59fe-a467-6f913c188581"
version = "0.12.11"
[[deps.CommonSubexpressions]]
deps = ["MacroTools"]
git-tree-sha1 = "cda2cfaebb4be89c9084adaca7dd7333369715c5"
uuid = "bbf7d656-a473-5ed7-a52c-81e309532950"
version = "0.3.1"
[[deps.CommonWorldInvalidations]]
git-tree-sha1 = "ae52d1c52048455e85a387fbee9be553ec2b68d0"
uuid = "f70d9fcc-98c5-4d4a-abd7-e4cdeebd8ca8"
version = "1.0.0"
[[deps.Compat]]
deps = ["TOML", "UUIDs"]
git-tree-sha1 = "8ae8d32e09f0dcf42a36b90d4e17f5dd2e4c4215"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "4.16.0"
weakdeps = ["Dates", "LinearAlgebra"]
[deps.Compat.extensions]
CompatLinearAlgebraExt = "LinearAlgebra"
[[deps.CompilerSupportLibraries_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "1.1.1+0"
[[deps.ConcurrentUtilities]]
deps = ["Serialization", "Sockets"]
git-tree-sha1 = "ea32b83ca4fefa1768dc84e504cc0a94fb1ab8d1"
uuid = "f0e56b4a-5159-44fe-b623-3e5288b988bb"
version = "2.4.2"
[[deps.ConstructionBase]]
git-tree-sha1 = "76219f1ed5771adbb096743bff43fb5fdd4c1157"
uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9"
version = "1.5.8"
weakdeps = ["IntervalSets", "LinearAlgebra", "StaticArrays"]
[deps.ConstructionBase.extensions]
ConstructionBaseIntervalSetsExt = "IntervalSets"
ConstructionBaseLinearAlgebraExt = "LinearAlgebra"
ConstructionBaseStaticArraysExt = "StaticArrays"
[[deps.Contour]]
git-tree-sha1 = "439e35b0b36e2e5881738abc8857bd92ad6ff9a8"
uuid = "d38c429a-6771-53c6-b99e-75d170b6e991"
version = "0.6.3"
[[deps.CpuId]]
deps = ["Markdown"]
git-tree-sha1 = "fcbb72b032692610bfbdb15018ac16a36cf2e406"
uuid = "adafc99b-e345-5852-983c-f28acb93d879"
version = "0.3.1"
[[deps.Crayons]]
git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15"
uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
version = "4.1.1"
[[deps.DataAPI]]
git-tree-sha1 = "abe83f3a2f1b857aac70ef8b269080af17764bbe"
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
version = "1.16.0"
[[deps.DataDeps]]
deps = ["HTTP", "Libdl", "Reexport", "SHA", "Scratch", "p7zip_jll"]
git-tree-sha1 = "8ae085b71c462c2cb1cfedcb10c3c877ec6cf03f"
uuid = "124859b0-ceae-595e-8997-d05f6a7a8dfe"
version = "0.7.13"
[[deps.DataFrames]]
deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "REPL", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
git-tree-sha1 = "04c738083f29f86e62c8afc341f0967d8717bdb8"
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
version = "1.6.1"
[[deps.DataStructures]]
deps = ["Compat", "InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "1d0a14036acb104d9e89698bd408f63ab58cdc82"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.18.20"
[[deps.DataValueInterfaces]]
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
version = "1.0.0"
[[deps.Dataverse]]
deps = ["CSV", "CodecZlib", "DataFrames", "Downloads", "HTTP", "JSON", "Tar", "ZipFile"]
git-tree-sha1 = "9c24c5cf1552251d4bb44a76f9437b2d84e86302"
uuid = "9c0b9be8-e31e-490f-90fe-77697562404d"
version = "0.2.5"
[deps.Dataverse.extensions]
DataverseCondaExt = ["Conda"]
DataversePyCallExt = ["PyCall"]
[deps.Dataverse.weakdeps]
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
[[deps.Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[deps.DelaunayTriangulation]]
deps = ["AdaptivePredicates", "EnumX", "ExactPredicates", "Random"]
git-tree-sha1 = "46f12daa85e5acc0ea5d5f9f8c3f1fc679e0f7e5"
uuid = "927a84f5-c5f4-47a5-9785-b46e178433df"
version = "1.2.0"
[[deps.DiffResults]]
deps = ["StaticArraysCore"]
git-tree-sha1 = "782dd5f4561f5d267313f23853baaaa4c52ea621"
uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5"
version = "1.1.0"
[[deps.DiffRules]]
deps = ["IrrationalConstants", "LogExpFunctions", "NaNMath", "Random", "SpecialFunctions"]
git-tree-sha1 = "23163d55f885173722d1e4cf0f6110cdbaf7e272"
uuid = "b552c78f-8df3-52c6-915a-8e097449b14b"
version = "1.15.1"
[[deps.Distances]]
deps = ["LinearAlgebra", "Statistics", "StatsAPI"]
git-tree-sha1 = "66c4c81f259586e8f002eacebc177e1fb06363b0"
uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
version = "0.10.11"
weakdeps = ["ChainRulesCore", "SparseArrays"]
[deps.Distances.extensions]
DistancesChainRulesCoreExt = "ChainRulesCore"
DistancesSparseArraysExt = "SparseArrays"
[[deps.Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[deps.Distributions]]
deps = ["AliasTables", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsAPI", "StatsBase", "StatsFuns"]
git-tree-sha1 = "e6c693a0e4394f8fda0e51a5bdf5aef26f8235e9"
uuid = "31c24e10-a181-5473-b8eb-7969acd0382f"
version = "0.25.111"
[deps.Distributions.extensions]
DistributionsChainRulesCoreExt = "ChainRulesCore"
DistributionsDensityInterfaceExt = "DensityInterface"
DistributionsTestExt = "Test"
[deps.Distributions.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
DensityInterface = "b429d917-457f-4dbc-8f4c-0cc954292b1d"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[deps.DocStringExtensions]]
deps = ["LibGit2"]
git-tree-sha1 = "2fb1e02f2b635d0845df5d7c167fec4dd739b00d"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.9.3"
[[deps.Downloads]]
deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"]
uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
version = "1.6.0"
[[deps.EarCut_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "e3290f2d49e661fbd94046d7e3726ffcb2d41053"
uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5"
version = "2.2.4+0"
[[deps.EnumX]]
git-tree-sha1 = "bdb1942cd4c45e3c678fd11569d5cccd80976237"
uuid = "4e289a0a-7415-4d19-859d-a7e5c4648b56"
version = "1.0.4"
[[deps.ExactOptimalTransport]]
deps = ["Distances", "Distributions", "FillArrays", "LinearAlgebra", "MathOptInterface", "PDMats", "QuadGK", "SparseArrays", "StatsBase"]
git-tree-sha1 = "aca11e5cbf419be6778707f4ddc90d486bc79e92"
uuid = "24df6009-d856-477c-ac5c-91f668376b31"
version = "0.2.5"
[[deps.ExactPredicates]]
deps = ["IntervalArithmetic", "Random", "StaticArrays"]
git-tree-sha1 = "b3f2ff58735b5f024c392fde763f29b057e4b025"
uuid = "429591f6-91af-11e9-00e2-59fbe8cec110"
version = "2.2.8"
[[deps.ExceptionUnwrapping]]
deps = ["Test"]
git-tree-sha1 = "dcb08a0d93ec0b1cdc4af184b26b591e9695423a"
uuid = "460bff9d-24e4-43bc-9d9f-a8973cb893f4"
version = "0.1.10"
[[deps.Expat_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1c6317308b9dc757616f0b5cb379db10494443a7"
uuid = "2e619515-83b5-522b-bb60-26c02a35a201"
version = "2.6.2+0"
[[deps.ExprTools]]
git-tree-sha1 = "27415f162e6028e81c72b82ef756bf321213b6ec"
uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04"
version = "0.1.10"
[[deps.Extents]]
git-tree-sha1 = "81023caa0021a41712685887db1fc03db26f41f5"
uuid = "411431e0-e8b7-467b-b5e0-f676ba4f2910"
version = "0.1.4"
[[deps.FFMPEG_jll]]
deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"]
git-tree-sha1 = "8cc47f299902e13f90405ddb5bf87e5d474c0d38"
uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5"
version = "6.1.2+0"
[[deps.FFTW]]
deps = ["AbstractFFTs", "FFTW_jll", "LinearAlgebra", "MKL_jll", "Preferences", "Reexport"]
git-tree-sha1 = "4820348781ae578893311153d69049a93d05f39d"
uuid = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
version = "1.8.0"
[[deps.FFTW_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c6033cc3892d0ef5bb9cd29b7f2f0331ea5184ea"
uuid = "f5851436-0d7a-5f13-b9de-f02708fd171a"
version = "3.3.10+0"
[[deps.FastBroadcast]]
deps = ["ArrayInterface", "LinearAlgebra", "Polyester", "Static", "StaticArrayInterface", "StrideArraysCore"]
git-tree-sha1 = "ab1b34570bcdf272899062e1a56285a53ecaae08"
uuid = "7034ab61-46d4-4ed7-9d0f-46aef9175898"
version = "0.3.5"
[[deps.FastClosures]]
git-tree-sha1 = "acebe244d53ee1b461970f8910c235b259e772ef"
uuid = "9aa1b823-49e4-5ca5-8b0f-3971ec8bab6a"
version = "0.3.2"
[[deps.FileIO]]
deps = ["Pkg", "Requires", "UUIDs"]
git-tree-sha1 = "82d8afa92ecf4b52d78d869f038ebfb881267322"
uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
version = "1.16.3"
[[deps.FilePaths]]
deps = ["FilePathsBase", "MacroTools", "Reexport", "Requires"]
git-tree-sha1 = "919d9412dbf53a2e6fe74af62a73ceed0bce0629"
uuid = "8fc22ac5-c921-52a6-82fd-178b2807b824"
version = "0.8.3"
[[deps.FilePathsBase]]
deps = ["Compat", "Dates"]
git-tree-sha1 = "7878ff7172a8e6beedd1dea14bd27c3c6340d361"
uuid = "48062228-2e41-5def-b9a4-89aafe57970f"
version = "0.9.22"
weakdeps = ["Mmap", "Test"]
[deps.FilePathsBase.extensions]
FilePathsBaseMmapExt = "Mmap"
FilePathsBaseTestExt = "Test"
[[deps.FileWatching]]
uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee"
[[deps.FillArrays]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "6a70198746448456524cb442b8af316927ff3e1a"
uuid = "1a297f60-69ca-5386-bcde-b61e274b549b"
version = "1.13.0"
weakdeps = ["PDMats", "SparseArrays", "Statistics"]
[deps.FillArrays.extensions]
FillArraysPDMatsExt = "PDMats"
FillArraysSparseArraysExt = "SparseArrays"
FillArraysStatisticsExt = "Statistics"
[[deps.FixedPointNumbers]]
deps = ["Statistics"]
git-tree-sha1 = "05882d6995ae5c12bb5f36dd2ed3f61c98cbb172"
uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93"
version = "0.8.5"
[[deps.Fontconfig_jll]]
deps = ["Artifacts", "Bzip2_jll", "Expat_jll", "FreeType2_jll", "JLLWrappers", "Libdl", "Libuuid_jll", "Zlib_jll"]
git-tree-sha1 = "db16beca600632c95fc8aca29890d83788dd8b23"
uuid = "a3f928ae-7b40-5064-980b-68af3947d34b"
version = "2.13.96+0"
[[deps.Format]]
git-tree-sha1 = "9c68794ef81b08086aeb32eeaf33531668d5f5fc"
uuid = "1fa38f19-a742-5d3f-a2b9-30dd87b9d5f8"
version = "1.3.7"
[[deps.ForwardDiff]]
deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "LogExpFunctions", "NaNMath", "Preferences", "Printf", "Random", "SpecialFunctions"]
git-tree-sha1 = "cf0fe81336da9fb90944683b8c41984b08793dad"
uuid = "f6369f11-7733-5829-9624-2563aa707210"
version = "0.10.36"
weakdeps = ["StaticArrays"]
[deps.ForwardDiff.extensions]
ForwardDiffStaticArraysExt = "StaticArrays"
[[deps.FreeType]]
deps = ["CEnum", "FreeType2_jll"]
git-tree-sha1 = "907369da0f8e80728ab49c1c7e09327bf0d6d999"
uuid = "b38be410-82b0-50bf-ab77-7b57e271db43"
version = "4.1.1"
[[deps.FreeType2_jll]]
deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "5c1d8ae0efc6c2e7b1fc502cbe25def8f661b7bc"
uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7"
version = "2.13.2+0"
[[deps.FreeTypeAbstraction]]
deps = ["ColorVectorSpace", "Colors", "FreeType", "GeometryBasics"]
git-tree-sha1 = "2493cdfd0740015955a8e46de4ef28f49460d8bc"
uuid = "663a7486-cb36-511b-a19d-713bb74d65c9"
version = "0.10.3"
[[deps.FriBidi_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1ed150b39aebcc805c26b93a8d0122c940f64ce2"
uuid = "559328eb-81f9-559d-9380-de523a88c83c"
version = "1.0.14+0"
[[deps.Future]]
deps = ["Random"]
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
[[deps.GPUArraysCore]]
deps = ["Adapt"]
git-tree-sha1 = "2d6ca471a6c7b536127afccfa7564b5b39227fe0"
uuid = "46192b85-c4d5-4398-a991-12ede77f4527"
version = "0.1.5"
[[deps.GeoFormatTypes]]
git-tree-sha1 = "59107c179a586f0fe667024c5eb7033e81333271"
uuid = "68eda718-8dee-11e9-39e7-89f7f65f511f"
version = "0.4.2"
[[deps.GeoInterface]]
deps = ["Extents", "GeoFormatTypes"]
git-tree-sha1 = "5921fc0704e40c024571eca551800c699f86ceb4"
uuid = "cf35fbd7-0cd7-5166-be24-54bfbe79505f"
version = "1.3.6"
[[deps.GeometryBasics]]
deps = ["EarCut_jll", "Extents", "GeoInterface", "IterTools", "LinearAlgebra", "StaticArrays", "StructArrays", "Tables"]
git-tree-sha1 = "b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134"
uuid = "5c1252a2-5f33-56bf-86c9-59e7332b4326"
version = "0.4.11"
[[deps.Gettext_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "XML2_jll"]
git-tree-sha1 = "9b02998aba7bf074d14de89f9d37ca24a1a0b046"
uuid = "78b55507-aeef-58d4-861c-77aaff3498b1"
version = "0.21.0+0"
[[deps.Glib_jll]]
deps = ["Artifacts", "Gettext_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Libiconv_jll", "Libmount_jll", "PCRE2_jll", "Zlib_jll"]
git-tree-sha1 = "7c82e6a6cd34e9d935e9aa4051b66c6ff3af59ba"
uuid = "7746bdde-850d-59dc-9ae8-88ece973131d"
version = "2.80.2+0"
[[deps.Glob]]
git-tree-sha1 = "97285bbd5230dd766e9ef6749b80fc617126d496"
uuid = "c27321d9-0574-5035-807b-f59d2c89b15c"
version = "1.3.1"
[[deps.Graphics]]
deps = ["Colors", "LinearAlgebra", "NaNMath"]
git-tree-sha1 = "d61890399bc535850c4bf08e4e0d3a7ad0f21cbd"
uuid = "a2bd30eb-e257-5431-a919-1863eab51364"
version = "1.1.2"
[[deps.Graphite2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "344bf40dcab1073aca04aa0df4fb092f920e4011"
uuid = "3b182d85-2403-5c21-9c21-1e1f0cc25472"
version = "1.3.14+0"
[[deps.GridLayoutBase]]
deps = ["GeometryBasics", "InteractiveUtils", "Observables"]
git-tree-sha1 = "fc713f007cff99ff9e50accba6373624ddd33588"
uuid = "3955a311-db13-416c-9275-1d80ed98e5e9"
version = "0.11.0"
[[deps.Grisu]]
git-tree-sha1 = "53bb909d1151e57e2484c3d1b53e19552b887fb2"
uuid = "42e2da0e-8278-4e71-bc24-59509adca0fe"
version = "1.0.2"
[[deps.HTTP]]
deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"]
git-tree-sha1 = "d1d712be3164d61d1fb98e7ce9bcbc6cc06b45ed"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "1.10.8"
[[deps.HarfBuzz_jll]]
deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll"]
git-tree-sha1 = "401e4f3f30f43af2c8478fc008da50096ea5240f"
uuid = "2e76f6c2-a576-52d4-95c1-20adfe4de566"
version = "8.3.1+0"
[[deps.HostCPUFeatures]]
deps = ["BitTwiddlingConvenienceFunctions", "IfElse", "Libdl", "Static"]
git-tree-sha1 = "8e070b599339d622e9a081d17230d74a5c473293"
uuid = "3e5b6fbb-0976-4d2c-9146-d79de83f2fb0"
version = "0.1.17"
[[deps.HypergeometricFunctions]]
deps = ["LinearAlgebra", "OpenLibm_jll", "SpecialFunctions"]
git-tree-sha1 = "7c4195be1649ae622304031ed46a2f4df989f1eb"
uuid = "34004b35-14d8-5ef3-9330-4cdb6864b03a"
version = "0.3.24"
[[deps.Hyperscript]]
deps = ["Test"]
git-tree-sha1 = "179267cfa5e712760cd43dcae385d7ea90cc25a4"
uuid = "47d2ed2b-36de-50cf-bf87-49c2cf4b8b91"
version = "0.0.5"
[[deps.HypertextLiteral]]
deps = ["Tricks"]
git-tree-sha1 = "7134810b1afce04bbc1045ca1985fbe81ce17653"
uuid = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2"
version = "0.9.5"
[[deps.IOCapture]]
deps = ["Logging", "Random"]
git-tree-sha1 = "b6d6bfdd7ce25b0f9b2f6b3dd56b2673a66c8770"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.2.5"
[[deps.IfElse]]
git-tree-sha1 = "debdd00ffef04665ccbb3e150747a77560e8fad1"
uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173"
version = "0.1.1"
[[deps.ImageAxes]]
deps = ["AxisArrays", "ImageBase", "ImageCore", "Reexport", "SimpleTraits"]
git-tree-sha1 = "2e4520d67b0cef90865b3ef727594d2a58e0e1f8"
uuid = "2803e5a7-5153-5ecf-9a86-9b4c37f5f5ac"
version = "0.6.11"
[[deps.ImageBase]]
deps = ["ImageCore", "Reexport"]
git-tree-sha1 = "eb49b82c172811fd2c86759fa0553a2221feb909"
uuid = "c817782e-172a-44cc-b673-b171935fbb9e"
version = "0.1.7"
[[deps.ImageCore]]
deps = ["ColorVectorSpace", "Colors", "FixedPointNumbers", "MappedArrays", "MosaicViews", "OffsetArrays", "PaddedViews", "PrecompileTools", "Reexport"]
git-tree-sha1 = "b2a7eaa169c13f5bcae8131a83bc30eff8f71be0"
uuid = "a09fc81d-aa75-5fe9-8630-4744c3626534"
version = "0.10.2"
[[deps.ImageIO]]
deps = ["FileIO", "IndirectArrays", "JpegTurbo", "LazyModules", "Netpbm", "OpenEXR", "PNGFiles", "QOI", "Sixel", "TiffImages", "UUIDs"]
git-tree-sha1 = "437abb322a41d527c197fa800455f79d414f0a3c"
uuid = "82e4d734-157c-48bb-816b-45c225c6df19"
version = "0.6.8"
[[deps.ImageMetadata]]
deps = ["AxisArrays", "ImageAxes", "ImageBase", "ImageCore"]
git-tree-sha1 = "355e2b974f2e3212a75dfb60519de21361ad3cb7"
uuid = "bc367c6b-8a6b-528e-b4bd-a4b897500b49"
version = "0.9.9"
[[deps.Imath_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "0936ba688c6d201805a83da835b55c61a180db52"
uuid = "905a6f67-0a94-5f89-b386-d35d92009cd1"
version = "3.1.11+0"
[[deps.IndirectArrays]]
git-tree-sha1 = "012e604e1c7458645cb8b436f8fba789a51b257f"
uuid = "9b13fd28-a010-5f03-acff-a1bbcff69959"
version = "1.0.0"
[[deps.Inflate]]
git-tree-sha1 = "d1b1b796e47d94588b3757fe84fbf65a5ec4a80d"
uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9"
version = "0.1.5"
[[deps.InlineStrings]]
git-tree-sha1 = "45521d31238e87ee9f9732561bfee12d4eebd52d"
uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48"
version = "1.4.2"
[deps.InlineStrings.extensions]
ArrowTypesExt = "ArrowTypes"
ParsersExt = "Parsers"
[deps.InlineStrings.weakdeps]
ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd"
Parsers = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
[[deps.IntelOpenMP_jll]]
deps = ["Artifacts", "JLLWrappers", "LazyArtifacts", "Libdl"]
git-tree-sha1 = "10bd689145d2c3b2a9844005d01087cc1194e79e"
uuid = "1d5cc7b8-4909-519e-a0f8-d0f5ad9712d0"
version = "2024.2.1+0"
[[deps.InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[deps.Interpolations]]
deps = ["Adapt", "AxisAlgorithms", "ChainRulesCore", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "Requires", "SharedArrays", "SparseArrays", "StaticArrays", "WoodburyMatrices"]
git-tree-sha1 = "88a101217d7cb38a7b481ccd50d21876e1d1b0e0"
uuid = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59"
version = "0.15.1"
weakdeps = ["Unitful"]
[deps.Interpolations.extensions]
InterpolationsUnitfulExt = "Unitful"
[[deps.IntervalArithmetic]]
deps = ["CRlibm_jll", "MacroTools", "RoundingEmulator"]
git-tree-sha1 = "01fa84a20be8c7c867edf3b9ef33ac15f4089c1a"
uuid = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
version = "0.22.15"
weakdeps = ["DiffRules", "ForwardDiff", "IntervalSets", "RecipesBase"]
[deps.IntervalArithmetic.extensions]
IntervalArithmeticDiffRulesExt = "DiffRules"
IntervalArithmeticForwardDiffExt = "ForwardDiff"
IntervalArithmeticRecipesBaseExt = "RecipesBase"
IntervalArithmeticsIntervalSetsExt = "IntervalSets"
[[deps.IntervalSets]]
git-tree-sha1 = "dba9ddf07f77f60450fe5d2e2beb9854d9a49bd0"
uuid = "8197267c-284f-5f27-9208-e0e47529a953"
version = "0.7.10"
weakdeps = ["Random", "RecipesBase", "Statistics"]
[deps.IntervalSets.extensions]
IntervalSetsRandomExt = "Random"
IntervalSetsRecipesBaseExt = "RecipesBase"
IntervalSetsStatisticsExt = "Statistics"
[[deps.InvertedIndices]]
git-tree-sha1 = "0dc7b50b8d436461be01300fd8cd45aa0274b038"
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
version = "1.3.0"
[[deps.IrrationalConstants]]
git-tree-sha1 = "630b497eafcc20001bba38a4651b327dcfc491d2"
uuid = "92d709cd-6900-40b7-9082-c6be49f344b6"
version = "0.2.2"
[[deps.Isoband]]
deps = ["isoband_jll"]
git-tree-sha1 = "f9b6d97355599074dc867318950adaa6f9946137"
uuid = "f1662d9f-8043-43de-a69a-05efc1cc6ff4"
version = "0.1.1"
[[deps.IterTools]]
git-tree-sha1 = "42d5f897009e7ff2cf88db414a389e5ed1bdd023"
uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
version = "1.10.0"
[[deps.IterativeSolvers]]
deps = ["LinearAlgebra", "Printf", "Random", "RecipesBase", "SparseArrays"]
git-tree-sha1 = "59545b0a2b27208b0650df0a46b8e3019f85055b"
uuid = "42fd0dbc-a981-5370-80f2-aaf504508153"
version = "0.9.4"
[[deps.IteratorInterfaceExtensions]]
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
uuid = "82899510-4779-5014-852e-03e436cf321d"
version = "1.0.0"
[[deps.JLD2]]
deps = ["FileIO", "MacroTools", "Mmap", "OrderedCollections", "PrecompileTools", "Requires", "TranscodingStreams"]
git-tree-sha1 = "a0746c21bdc986d0dc293efa6b1faee112c37c28"
uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
version = "0.4.53"
[[deps.JLLWrappers]]
deps = ["Artifacts", "Preferences"]
git-tree-sha1 = "f389674c99bfcde17dc57454011aa44d5a260a40"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.6.0"
[[deps.JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "31e996f0a15c7b280ba9f76636b3ff9e2ae58c9a"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.4"
[[deps.JpegTurbo]]
deps = ["CEnum", "FileIO", "ImageCore", "JpegTurbo_jll", "TOML"]
git-tree-sha1 = "fa6d0bcff8583bac20f1ffa708c3913ca605c611"
uuid = "b835a17e-a41a-41e7-81f0-2f016b05efe0"
version = "0.1.5"
[[deps.JpegTurbo_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "c84a835e1a09b289ffcd2271bf2a337bbdda6637"
uuid = "aacddb02-875f-59d6-b918-886e6ef4fbf8"
version = "3.0.3+0"
[[deps.KahanSummation]]
git-tree-sha1 = "6292e7878fe190651e74148edb11356dbbc2e194"
uuid = "8e2b3108-d4c1-50be-a7a2-16352aec75c3"
version = "0.3.1"
[[deps.KernelAbstractions]]
deps = ["Adapt", "Atomix", "InteractiveUtils", "MacroTools", "PrecompileTools", "Requires", "StaticArrays", "UUIDs", "UnsafeAtomics", "UnsafeAtomicsLLVM"]
git-tree-sha1 = "cb1cff88ef2f3a157cbad75bbe6b229e1975e498"
uuid = "63c18a36-062a-441e-b654-da1e3ab1ce7c"
version = "0.9.25"
[deps.KernelAbstractions.extensions]
EnzymeExt = "EnzymeCore"
LinearAlgebraExt = "LinearAlgebra"
SparseArraysExt = "SparseArrays"
[deps.KernelAbstractions.weakdeps]
EnzymeCore = "f151be2c-9106-41f4-ab19-57ee4f262869"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[deps.KernelDensity]]
deps = ["Distributions", "DocStringExtensions", "FFTW", "Interpolations", "StatsBase"]
git-tree-sha1 = "7d703202e65efa1369de1279c162b915e245eed1"
uuid = "5ab0869b-81aa-558d-bb23-cbf5423bbe9b"
version = "0.6.9"
[[deps.Krylov]]
deps = ["LinearAlgebra", "Printf", "SparseArrays"]
git-tree-sha1 = "267dad6b4b7b5d529c76d40ff48d33f7e94cb834"
uuid = "ba0b0d4f-ebba-5204-a429-3ac8c609bfb7"
version = "0.9.6"
[[deps.LAME_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "170b660facf5df5de098d866564877e119141cbd"
uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d"
version = "3.100.2+0"
[[deps.LDLFactorizations]]
deps = ["AMD", "LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "70f582b446a1c3ad82cf87e62b878668beef9d13"
uuid = "40e66cde-538c-5869-a4ad-c39174c6795b"
version = "0.10.1"
[[deps.LLVM]]
deps = ["CEnum", "LLVMExtra_jll", "Libdl", "Preferences", "Printf", "Requires", "Unicode"]
git-tree-sha1 = "b351d72436ddecd27381a07c242ba27282a6c8a7"
uuid = "929cbde3-209d-540e-8aea-75f648917ca0"
version = "9.0.0"
[deps.LLVM.extensions]
BFloat16sExt = "BFloat16s"
[deps.LLVM.weakdeps]
BFloat16s = "ab4f0b2a-ad5b-11e8-123f-65d77653426b"
[[deps.LLVMExtra_jll]]
deps = ["Artifacts", "JLLWrappers", "LazyArtifacts", "Libdl", "TOML"]
git-tree-sha1 = "f42bec1e12f42ec251541f6d0482d520a4638b17"
uuid = "dad2f222-ce93-54a1-a47d-0025e8a3acab"
version = "0.0.33+0"
[[deps.LLVMOpenMP_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e16271d212accd09d52ee0ae98956b8a05c4b626"
uuid = "1d63c593-3942-5779-bab2-d838dc0a180e"
version = "17.0.6+0"
[[deps.LZO_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "70c5da094887fd2cae843b8db33920bac4b6f07d"
uuid = "dd4b983a-f0e5-5f8d-a1b7-129d4a5fb1ac"
version = "2.10.2+0"
[[deps.LaTeXStrings]]
git-tree-sha1 = "50901ebc375ed41dbf8058da26f9de442febbbec"
uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f"
version = "1.3.1"
[[deps.LayoutPointers]]
deps = ["ArrayInterface", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface"]
git-tree-sha1 = "a9eaadb366f5493a5654e843864c13d8b107548c"
uuid = "10f19ff3-798f-405d-979b-55457f8fc047"
version = "0.1.17"
[[deps.LazyArtifacts]]
deps = ["Artifacts", "Pkg"]
uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3"
[[deps.LazyModules]]
git-tree-sha1 = "a560dd966b386ac9ae60bdd3a3d3a326062d3c3e"
uuid = "8cdb02fc-e678-4876-92c5-9defec4f444e"
version = "0.3.1"
[[deps.LibCURL]]
deps = ["LibCURL_jll", "MozillaCACerts_jll"]
uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
version = "0.6.4"
[[deps.LibCURL_jll]]
deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"]
uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
version = "8.4.0+0"
[[deps.LibGit2]]
deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[deps.LibGit2_jll]]
deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"]
uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5"
version = "1.6.4+0"
[[deps.LibSSH2_jll]]
deps = ["Artifacts", "Libdl", "MbedTLS_jll"]
uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8"
version = "1.11.0+1"
[[deps.Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[deps.Libffi_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "0b4a5d71f3e5200a7dff793393e09dfc2d874290"
uuid = "e9f186c6-92d2-5b65-8a66-fee21dc1b490"
version = "3.2.2+1"
[[deps.Libgcrypt_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgpg_error_jll"]
git-tree-sha1 = "9fd170c4bbfd8b935fdc5f8b7aa33532c991a673"
uuid = "d4300ac3-e22c-5743-9152-c294e39db1e4"
version = "1.8.11+0"
[[deps.Libgpg_error_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "fbb1f2bef882392312feb1ede3615ddc1e9b99ed"
uuid = "7add5ba3-2f88-524e-9cd5-f83b8a55f7b8"
version = "1.49.0+0"
[[deps.Libiconv_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "f9557a255370125b405568f9767d6d195822a175"
uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531"
version = "1.17.0+0"
[[deps.Libmount_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "0c4f9c4f1a50d8f35048fa0532dabbadf702f81e"
uuid = "4b2f31a3-9ecc-558c-b454-b3730dcb73e9"
version = "2.40.1+0"
[[deps.Libuuid_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "5ee6203157c120d79034c748a2acba45b82b8807"
uuid = "38a345b3-de98-5d2b-a5d3-14cd9215e700"
version = "2.40.1+0"
[[deps.LinearAlgebra]]
deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[deps.LinearOperators]]
deps = ["FastClosures", "LinearAlgebra", "Printf", "Requires", "SparseArrays", "TimerOutputs"]
git-tree-sha1 = "ae5d90280094348c32fda8bc8b5a88bb16514d43"
uuid = "5c8ed15e-5a4c-59e4-a42b-c7e8811fb125"
version = "2.8.0"
[deps.LinearOperators.extensions]
LinearOperatorsCUDAExt = "CUDA"
LinearOperatorsChainRulesCoreExt = "ChainRulesCore"
LinearOperatorsLDLFactorizationsExt = "LDLFactorizations"
[deps.LinearOperators.weakdeps]
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
LDLFactorizations = "40e66cde-538c-5869-a4ad-c39174c6795b"
[[deps.LogExpFunctions]]
deps = ["DocStringExtensions", "IrrationalConstants", "LinearAlgebra"]
git-tree-sha1 = "a2d09619db4e765091ee5c6ffe8872849de0feea"
uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688"
version = "0.3.28"
[deps.LogExpFunctions.extensions]
LogExpFunctionsChainRulesCoreExt = "ChainRulesCore"
LogExpFunctionsChangesOfVariablesExt = "ChangesOfVariables"
LogExpFunctionsInverseFunctionsExt = "InverseFunctions"
[deps.LogExpFunctions.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
ChangesOfVariables = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[deps.LoggingExtras]]
deps = ["Dates", "Logging"]
git-tree-sha1 = "c1dd6d7978c12545b4179fb6153b9250c96b0075"
uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36"
version = "1.0.3"
[[deps.LoopVectorization]]
deps = ["ArrayInterface", "CPUSummary", "CloseOpenIntervals", "DocStringExtensions", "HostCPUFeatures", "IfElse", "LayoutPointers", "LinearAlgebra", "OffsetArrays", "PolyesterWeave", "PrecompileTools", "SIMDTypes", "SLEEFPirates", "Static", "StaticArrayInterface", "ThreadingUtilities", "UnPack", "VectorizationBase"]
git-tree-sha1 = "8084c25a250e00ae427a379a5b607e7aed96a2dd"
uuid = "bdcacae8-1622-11e9-2a5c-532679323890"
version = "0.12.171"
weakdeps = ["ChainRulesCore", "ForwardDiff", "SpecialFunctions"]
[deps.LoopVectorization.extensions]
ForwardDiffExt = ["ChainRulesCore", "ForwardDiff"]
SpecialFunctionsExt = "SpecialFunctions"
[[deps.MIMEs]]
git-tree-sha1 = "65f28ad4b594aebe22157d6fac869786a255b7eb"
uuid = "6c6e2e6c-3030-632d-7369-2d6c69616d65"
version = "0.1.4"
[[deps.MKL_jll]]
deps = ["Artifacts", "IntelOpenMP_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "oneTBB_jll"]
git-tree-sha1 = "f046ccd0c6db2832a9f639e2c669c6fe867e5f4f"
uuid = "856f044c-d86e-5d09-b602-aeab76dc8ba7"
version = "2024.2.0+0"
[[deps.MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "2fa9ee3e63fd3a4f7a9a4f4744a52f4856de82df"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.13"
[[deps.Makie]]
deps = ["Animations", "Base64", "CRC32c", "ColorBrewer", "ColorSchemes", "ColorTypes", "Colors", "Contour", "Dates", "DelaunayTriangulation", "Distributions", "DocStringExtensions", "Downloads", "FFMPEG_jll", "FileIO", "FilePaths", "FixedPointNumbers", "Format", "FreeType", "FreeTypeAbstraction", "GeometryBasics", "GridLayoutBase", "ImageIO", "InteractiveUtils", "IntervalSets", "Isoband", "KernelDensity", "LaTeXStrings", "LinearAlgebra", "MacroTools", "MakieCore", "Markdown", "MathTeXEngine", "Observables", "OffsetArrays", "Packing", "PlotUtils", "PolygonOps", "PrecompileTools", "Printf", "REPL", "Random", "RelocatableFolders", "Scratch", "ShaderAbstractions", "Showoff", "SignedDistanceFields", "SparseArrays", "Statistics", "StatsBase", "StatsFuns", "StructArrays", "TriplotBase", "UnicodeFun", "Unitful"]
git-tree-sha1 = "204f06860af9008fa08b3a4842f48116e1209a2c"
uuid = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
version = "0.21.9"
[[deps.MakieCore]]
deps = ["ColorTypes", "GeometryBasics", "IntervalSets", "Observables"]
git-tree-sha1 = "b0e2e3473af351011e598f9219afb521121edd2b"
uuid = "20f20a25-4f0e-4fdf-b5d1-57303727442b"
version = "0.8.6"
[[deps.ManualMemory]]
git-tree-sha1 = "bcaef4fc7a0cfe2cba636d84cda54b5e4e4ca3cd"
uuid = "d125e4d3-2237-4719-b19c-fa641b8a4667"
version = "0.1.8"
[[deps.MappedArrays]]
git-tree-sha1 = "2dab0221fe2b0f2cb6754eaa743cc266339f527e"
uuid = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900"
version = "0.4.2"
[[deps.Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[deps.MathOptInterface]]
deps = ["BenchmarkTools", "CodecBzip2", "CodecZlib", "DataStructures", "ForwardDiff", "JSON", "LinearAlgebra", "MutableArithmetics", "NaNMath", "OrderedCollections", "PrecompileTools", "Printf", "SparseArrays", "SpecialFunctions", "Test", "Unicode"]
git-tree-sha1 = "5b246fca5420ae176d65ed43a2d0ee5897775216"
uuid = "b8f27783-ece8-5eb3-8dc8-9495eed66fee"
version = "1.31.2"
[[deps.MathTeXEngine]]
deps = ["AbstractTrees", "Automa", "DataStructures", "FreeTypeAbstraction", "GeometryBasics", "LaTeXStrings", "REPL", "RelocatableFolders", "UnicodeFun"]
git-tree-sha1 = "e1641f32ae592e415e3dbae7f4a188b5316d4b62"
uuid = "0a4f8689-d25c-4efe-a92b-7142dfc1aa53"
version = "0.6.1"
[[deps.MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "MozillaCACerts_jll", "NetworkOptions", "Random", "Sockets"]
git-tree-sha1 = "c067a280ddc25f196b5e7df3877c6b226d390aaf"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.1.9"
[[deps.MbedTLS_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.28.2+1"
[[deps.MeshArrays]]
deps = ["CatViews", "Dates", "LazyArtifacts", "NearestNeighbors", "Pkg", "Printf", "SparseArrays", "Statistics", "Unitful"]
git-tree-sha1 = "c1c5c8afde5293b643aea2d5ccc8554b1e593bf0"
uuid = "cb8c808f-1acf-59a3-9d2b-6e38d009f683"
version = "0.3.11"
[deps.MeshArrays.extensions]
MeshArraysDataDepsExt = ["DataDeps"]
MeshArraysGeoJSONExt = ["GeoJSON"]
MeshArraysJLD2Ext = ["JLD2"]
MeshArraysMakieExt = ["Makie"]
MeshArraysProjExt = ["Proj"]
MeshArraysShapefileExt = ["Shapefile"]
[deps.MeshArrays.weakdeps]
DataDeps = "124859b0-ceae-595e-8997-d05f6a7a8dfe"
GeoJSON = "61d90e0f-e114-555e-ac52-39dfb47a3ef9"
JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
Proj = "c94c279d-25a6-4763-9509-64d165bea63e"
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
[[deps.Missings]]
deps = ["DataAPI"]
git-tree-sha1 = "ec4f7fbeab05d7747bdf98eb74d130a2a2ed298d"
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
version = "1.2.0"
[[deps.Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[deps.MosaicViews]]
deps = ["MappedArrays", "OffsetArrays", "PaddedViews", "StackViews"]
git-tree-sha1 = "7b86a5d4d70a9f5cdf2dacb3cbe6d251d1a61dbe"
uuid = "e94cdb99-869f-56ef-bcf0-1ae2bcbe0389"
version = "0.3.4"
[[deps.MozillaCACerts_jll]]
uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
version = "2023.1.10"
[[deps.MutableArithmetics]]
deps = ["LinearAlgebra", "SparseArrays", "Test"]
git-tree-sha1 = "d0a6b1096b584a2b88efb70a92f8cb8c881eb38a"
uuid = "d8a4904e-b15c-11e9-3269-09a3773c0cb0"
version = "1.4.6"
[[deps.NNlib]]
deps = ["Adapt", "Atomix", "ChainRulesCore", "GPUArraysCore", "KernelAbstractions", "LinearAlgebra", "Pkg", "Random", "Requires", "Statistics"]
git-tree-sha1 = "72240e3f5ca031937bd536182cb2c031da5f46dd"
uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd"
version = "0.8.21"
[deps.NNlib.extensions]
NNlibAMDGPUExt = "AMDGPU"
[deps.NNlib.weakdeps]
AMDGPU = "21141c5a-9bdb-4563-92ae-f87d6854732e"
[[deps.NaNMath]]
deps = ["OpenLibm_jll"]
git-tree-sha1 = "0877504529a3e5c3343c6f8b4c0381e57e4387e4"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "1.0.2"
[[deps.NearestNeighbors]]
deps = ["Distances", "StaticArrays"]
git-tree-sha1 = "91a67b4d73842da90b526011fa85c5c4c9343fe0"
uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce"
version = "0.4.18"
[[deps.Netpbm]]
deps = ["FileIO", "ImageCore", "ImageMetadata"]
git-tree-sha1 = "d92b107dbb887293622df7697a2223f9f8176fcd"
uuid = "f09324ee-3d7c-5217-9330-fc30815ba969"
version = "1.1.1"
[[deps.NetworkOptions]]
uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
version = "1.2.0"
[[deps.Observables]]
git-tree-sha1 = "7438a59546cf62428fc9d1bc94729146d37a7225"
uuid = "510215fc-4207-5dde-b226-833fc4488ee2"
version = "0.5.5"
[[deps.OffsetArrays]]
git-tree-sha1 = "1a27764e945a152f7ca7efa04de513d473e9542e"
uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881"
version = "1.14.1"
weakdeps = ["Adapt"]
[deps.OffsetArrays.extensions]
OffsetArraysAdaptExt = "Adapt"
[[deps.Ogg_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "887579a3eb005446d514ab7aeac5d1d027658b8f"
uuid = "e7412a2a-1a6e-54c0-be00-318e2571c051"
version = "1.3.5+1"
[[deps.OpenBLAS_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"]
uuid = "4536629a-c528-5b80-bd46-f80d51c5b363"
version = "0.3.23+4"
[[deps.OpenEXR]]
deps = ["Colors", "FileIO", "OpenEXR_jll"]
git-tree-sha1 = "327f53360fdb54df7ecd01e96ef1983536d1e633"
uuid = "52e1d378-f018-4a11-a4be-720524705ac7"
version = "0.3.2"
[[deps.OpenEXR_jll]]
deps = ["Artifacts", "Imath_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "8292dd5c8a38257111ada2174000a33745b06d4e"
uuid = "18a262bb-aa17-5467-a713-aee519bc75cb"
version = "3.2.4+0"
[[deps.OpenLibm_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "05823500-19ac-5b8b-9628-191a04bc5112"
version = "0.8.1+2"
[[deps.OpenSSL]]
deps = ["BitFlags", "Dates", "MozillaCACerts_jll", "OpenSSL_jll", "Sockets"]
git-tree-sha1 = "38cb508d080d21dc1128f7fb04f20387ed4c0af4"
uuid = "4d8831e6-92b7-49fb-bdf8-b643e874388c"
version = "1.4.3"
[[deps.OpenSSL_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1b35263570443fdd9e76c76b7062116e2f374ab8"
uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95"
version = "3.0.15+0"
[[deps.OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.5+0"
[[deps.OptimalTransport]]
deps = ["ExactOptimalTransport", "IterativeSolvers", "LinearAlgebra", "LogExpFunctions", "NNlib", "Reexport"]
git-tree-sha1 = "79ba1dab46dfc7b677278ebe892a431788da86a9"
uuid = "7e02d93a-ae51-4f58-b602-d97af76e3b33"
version = "0.3.19"
[[deps.Opus_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "6703a85cb3781bd5909d48730a67205f3f31a575"
uuid = "91d4177d-7536-5919-b921-800302f37372"
version = "1.3.3+0"
[[deps.OrderedCollections]]
git-tree-sha1 = "dfdf5519f235516220579f949664f1bf44e741c5"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.6.3"
[[deps.PCRE2_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15"
version = "10.42.0+1"
[[deps.PDMats]]
deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"]
git-tree-sha1 = "949347156c25054de2db3b166c52ac4728cbad65"
uuid = "90014a1f-27ba-587c-ab20-58faa44d9150"
version = "0.11.31"
[[deps.PNGFiles]]
deps = ["Base64", "CEnum", "ImageCore", "IndirectArrays", "OffsetArrays", "libpng_jll"]
git-tree-sha1 = "67186a2bc9a90f9f85ff3cc8277868961fb57cbd"
uuid = "f57f5aa1-a3ce-4bc8-8ab9-96f992907883"
version = "0.4.3"
[[deps.Packing]]
deps = ["GeometryBasics"]
git-tree-sha1 = "ec3edfe723df33528e085e632414499f26650501"
uuid = "19eb6ba3-879d-56ad-ad62-d5c202156566"
version = "0.5.0"
[[deps.PaddedViews]]
deps = ["OffsetArrays"]
git-tree-sha1 = "0fac6313486baae819364c52b4f483450a9d793f"
uuid = "5432bcbf-9aad-5242-b902-cca2824c8663"
version = "0.5.12"
[[deps.Pango_jll]]
deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "FriBidi_jll", "Glib_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e127b609fb9ecba6f201ba7ab753d5a605d53801"
uuid = "36c8627f-9965-5494-a995-c6b170f724f3"
version = "1.54.1+0"
[[deps.Parsers]]
deps = ["Dates", "PrecompileTools", "UUIDs"]
git-tree-sha1 = "8489905bcdbcfac64d1daa51ca07c0d8f0283821"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "2.8.1"
[[deps.Pixman_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LLVMOpenMP_jll", "Libdl"]
git-tree-sha1 = "35621f10a7531bc8fa58f74610b1bfb70a3cfc6b"
uuid = "30392449-352a-5448-841d-b1acce4e97dc"
version = "0.43.4+0"
[[deps.Pkg]]
deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
version = "1.10.0"
[[deps.PkgVersion]]
deps = ["Pkg"]
git-tree-sha1 = "f9501cc0430a26bc3d156ae1b5b0c1b47af4d6da"
uuid = "eebad327-c553-4316-9ea0-9fa01ccd7688"
version = "0.3.3"
[[deps.PlotUtils]]
deps = ["ColorSchemes", "Colors", "Dates", "PrecompileTools", "Printf", "Random", "Reexport", "Statistics"]
git-tree-sha1 = "7b1a9df27f072ac4c9c7cbe5efb198489258d1f5"
uuid = "995b91a9-d308-5afd-9ec6-746e21dbc043"
version = "1.4.1"
[[deps.PlutoUI]]
deps = ["AbstractPlutoDingetjes", "Base64", "ColorTypes", "Dates", "FixedPointNumbers", "Hyperscript", "HypertextLiteral", "IOCapture", "InteractiveUtils", "JSON", "Logging", "MIMEs", "Markdown", "Random", "Reexport", "URIs", "UUIDs"]
git-tree-sha1 = "eba4810d5e6a01f612b948c9fa94f905b49087b0"
uuid = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
version = "0.7.60"
[[deps.Polyester]]
deps = ["ArrayInterface", "BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "ManualMemory", "PolyesterWeave", "Static", "StaticArrayInterface", "StrideArraysCore", "ThreadingUtilities"]
git-tree-sha1 = "6d38fea02d983051776a856b7df75b30cf9a3c1f"
uuid = "f517fe37-dbe3-4b94-8317-1923a5111588"
version = "0.7.16"
[[deps.PolyesterWeave]]
deps = ["BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "Static", "ThreadingUtilities"]
git-tree-sha1 = "645bed98cd47f72f67316fd42fc47dee771aefcd"
uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad"
version = "0.2.2"
[[deps.PolygonOps]]
git-tree-sha1 = "77b3d3605fc1cd0b42d95eba87dfcd2bf67d5ff6"
uuid = "647866c9-e3ac-4575-94e7-e3d426903924"
version = "0.1.2"
[[deps.PooledArrays]]
deps = ["DataAPI", "Future"]
git-tree-sha1 = "36d8b4b899628fb92c2749eb488d884a926614d3"
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
version = "1.4.3"
[[deps.PrecompileTools]]
deps = ["Preferences"]
git-tree-sha1 = "5aa36f7049a63a1528fe8f7c3f2113413ffd4e1f"
uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
version = "1.2.1"
[[deps.Preferences]]
deps = ["TOML"]
git-tree-sha1 = "9306f6085165d270f7e3db02af26a400d580f5c6"
uuid = "21216c6a-2e73-6563-6e65-726566657250"
version = "1.4.3"
[[deps.PrettyTables]]
deps = ["Crayons", "LaTeXStrings", "Markdown", "PrecompileTools", "Printf", "Reexport", "StringManipulation", "Tables"]
git-tree-sha1 = "66b20dd35966a748321d3b2537c4584cf40387c7"
uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
version = "2.3.2"
[[deps.Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[deps.Profile]]
deps = ["Printf"]
uuid = "9abbd945-dff8-562f-b5e8-e1ebf5ef1b79"
[[deps.ProgressMeter]]
deps = ["Distributed", "Printf"]
git-tree-sha1 = "8f6bc219586aef8baf0ff9a5fe16ee9c70cb65e4"
uuid = "92933f4c-e287-5a05-a399-4b506db050ca"
version = "1.10.2"
[[deps.PtrArrays]]
git-tree-sha1 = "77a42d78b6a92df47ab37e177b2deac405e1c88f"
uuid = "43287f4e-b6f4-7ad1-bb20-aadabca52c3d"
version = "1.2.1"
[[deps.QOI]]
deps = ["ColorTypes", "FileIO", "FixedPointNumbers"]
git-tree-sha1 = "18e8f4d1426e965c7b532ddd260599e1510d26ce"
uuid = "4b34888f-f399-49d4-9bb3-47ed5cae4e65"
version = "1.0.0"
[[deps.QPSReader]]
deps = ["Logging", "Pkg"]
git-tree-sha1 = "374833c0c1fd5cc623278581bf0c8259d7b35f10"
uuid = "10f199a5-22af-520b-b891-7ce84a7b1bd0"
version = "0.2.1"
[[deps.QuadGK]]
deps = ["DataStructures", "LinearAlgebra"]
git-tree-sha1 = "1d587203cf851a51bf1ea31ad7ff89eff8d625ea"
uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc"
version = "2.11.0"
[deps.QuadGK.extensions]
QuadGKEnzymeExt = "Enzyme"
[deps.QuadGK.weakdeps]
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
[[deps.REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[deps.Random]]
deps = ["SHA"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[deps.RangeArrays]]
git-tree-sha1 = "b9039e93773ddcfc828f12aadf7115b4b4d225f5"
uuid = "b3c3ace0-ae52-54e7-9d0b-2c1406fd6b9d"
version = "0.3.2"
[[deps.Ratios]]
deps = ["Requires"]
git-tree-sha1 = "1342a47bf3260ee108163042310d26f2be5ec90b"
uuid = "c84ed2f1-dad5-54f0-aa8e-dbefe2724439"
version = "0.4.5"
weakdeps = ["FixedPointNumbers"]
[deps.Ratios.extensions]
RatiosFixedPointNumbersExt = "FixedPointNumbers"
[[deps.RecipesBase]]
deps = ["PrecompileTools"]
git-tree-sha1 = "5c3d09cc4f31f5fc6af001c250bf1278733100ff"
uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
version = "1.3.4"
[[deps.Reexport]]
git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b"
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
version = "1.2.2"
[[deps.RelocatableFolders]]
deps = ["SHA", "Scratch"]
git-tree-sha1 = "ffdaf70d81cf6ff22c2b6e733c900c3321cab864"
uuid = "05181044-ff0b-4ac5-8273-598c1e38db00"
version = "1.0.1"
[[deps.Requires]]
deps = ["UUIDs"]
git-tree-sha1 = "838a3a4188e2ded87a4f9f184b4b0d78a1e91cb7"
uuid = "ae029012-a4dd-5104-9daa-d747884805df"
version = "1.3.0"
[[deps.Rmath]]
deps = ["Random", "Rmath_jll"]
git-tree-sha1 = "f65dcb5fa46aee0cf9ed6274ccbd597adc49aa7b"
uuid = "79098fc4-a85e-5d69-aa6a-4863f24498fa"
version = "0.7.1"
[[deps.Rmath_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e60724fd3beea548353984dc61c943ecddb0e29a"
uuid = "f50d1b31-88e8-58de-be2c-1cc44531875f"
version = "0.4.3+0"
[[deps.RollingFunctions]]
deps = ["AccurateArithmetic", "FastBroadcast", "KahanSummation", "LinearAlgebra", "LoopVectorization", "Statistics", "StatsBase", "Tables"]
git-tree-sha1 = "4a54152985fea23b0b0e99a77566a87137221a0a"
uuid = "b0e4dd01-7b14-53d8-9b45-175a3e362653"
version = "0.8.0"
[[deps.RoundingEmulator]]
git-tree-sha1 = "40b9edad2e5287e05bd413a38f61a8ff55b9557b"
uuid = "5eaf0fd0-dfba-4ccb-bf02-d820a40db705"
version = "0.2.1"
[[deps.SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
version = "0.7.0"
[[deps.SIMD]]
deps = ["PrecompileTools"]
git-tree-sha1 = "2803cab51702db743f3fda07dd1745aadfbf43bd"
uuid = "fdea26ae-647d-5447-a871-4b548cad5224"
version = "3.5.0"
[[deps.SIMDTypes]]
git-tree-sha1 = "330289636fb8107c5f32088d2741e9fd7a061a5c"
uuid = "94e857df-77ce-4151-89e5-788b33177be4"
version = "0.1.0"
[[deps.SLEEFPirates]]
deps = ["IfElse", "Static", "VectorizationBase"]
git-tree-sha1 = "456f610ca2fbd1c14f5fcf31c6bfadc55e7d66e0"
uuid = "476501e8-09a2-5ece-8869-fb82de89a1fa"
version = "0.6.43"
[[deps.Scratch]]
deps = ["Dates"]
git-tree-sha1 = "3bac05bc7e74a75fd9cba4295cde4045d9fe2386"
uuid = "6c6a2e73-6563-6170-7368-637461726353"
version = "1.2.1"
[[deps.SentinelArrays]]
deps = ["Dates", "Random"]
git-tree-sha1 = "ff11acffdb082493657550959d4feb4b6149e73a"
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
version = "1.4.5"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[deps.ShaderAbstractions]]
deps = ["ColorTypes", "FixedPointNumbers", "GeometryBasics", "LinearAlgebra", "Observables", "StaticArrays", "StructArrays", "Tables"]
git-tree-sha1 = "79123bc60c5507f035e6d1d9e563bb2971954ec8"
uuid = "65257c39-d410-5151-9873-9b3e5be5013e"
version = "0.4.1"
[[deps.SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[deps.Showoff]]
deps = ["Dates", "Grisu"]
git-tree-sha1 = "91eddf657aca81df9ae6ceb20b959ae5653ad1de"
uuid = "992d4aef-0814-514b-bc4d-f2e9a6c4116f"
version = "1.0.3"
[[deps.SignedDistanceFields]]
deps = ["Random", "Statistics", "Test"]
git-tree-sha1 = "d263a08ec505853a5ff1c1ebde2070419e3f28e9"
uuid = "73760f76-fbc4-59ce-8f25-708e95d2df96"
version = "0.4.0"
[[deps.SimpleBufferStream]]
git-tree-sha1 = "874e8867b33a00e784c8a7e4b60afe9e037b74e1"
uuid = "777ac1f9-54b0-4bf8-805c-2214025038e7"
version = "1.1.0"
[[deps.SimpleTraits]]
deps = ["InteractiveUtils", "MacroTools"]
git-tree-sha1 = "5d7e3f4e11935503d3ecaf7186eac40602e7d231"
uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d"
version = "0.9.4"
[[deps.Sixel]]
deps = ["Dates", "FileIO", "ImageCore", "IndirectArrays", "OffsetArrays", "REPL", "libsixel_jll"]
git-tree-sha1 = "2da10356e31327c7096832eb9cd86307a50b1eb6"
uuid = "45858cf5-a6b0-47a3-bbea-62219f50df47"
version = "0.1.3"
[[deps.Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[deps.SortingAlgorithms]]
deps = ["DataStructures"]
git-tree-sha1 = "66e0a8e672a0bdfca2c3f5937efb8538b9ddc085"
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
version = "1.2.1"
[[deps.SparseArrays]]
deps = ["Libdl", "LinearAlgebra", "Random", "Serialization", "SuiteSparse_jll"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
version = "1.10.0"
[[deps.SpecialFunctions]]
deps = ["IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"]
git-tree-sha1 = "2f5d4697f21388cbe1ff299430dd169ef97d7e14"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "2.4.0"
weakdeps = ["ChainRulesCore"]
[deps.SpecialFunctions.extensions]
SpecialFunctionsChainRulesCoreExt = "ChainRulesCore"
[[deps.StackViews]]
deps = ["OffsetArrays"]
git-tree-sha1 = "46e589465204cd0c08b4bd97385e4fa79a0c770c"
uuid = "cae243ae-269e-4f55-b966-ac2d0dc13c15"
version = "0.1.1"
[[deps.Static]]
deps = ["CommonWorldInvalidations", "IfElse", "PrecompileTools"]
git-tree-sha1 = "87d51a3ee9a4b0d2fe054bdd3fc2436258db2603"
uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3"
version = "1.1.1"
[[deps.StaticArrayInterface]]
deps = ["ArrayInterface", "Compat", "IfElse", "LinearAlgebra", "PrecompileTools", "Static"]
git-tree-sha1 = "96381d50f1ce85f2663584c8e886a6ca97e60554"
uuid = "0d7ed370-da01-4f52-bd93-41d350b8b718"
version = "1.8.0"
weakdeps = ["OffsetArrays", "StaticArrays"]
[deps.StaticArrayInterface.extensions]
StaticArrayInterfaceOffsetArraysExt = "OffsetArrays"
StaticArrayInterfaceStaticArraysExt = "StaticArrays"
[[deps.StaticArrays]]
deps = ["LinearAlgebra", "PrecompileTools", "Random", "StaticArraysCore"]
git-tree-sha1 = "eeafab08ae20c62c44c8399ccb9354a04b80db50"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "1.9.7"
weakdeps = ["ChainRulesCore", "Statistics"]
[deps.StaticArrays.extensions]
StaticArraysChainRulesCoreExt = "ChainRulesCore"
StaticArraysStatisticsExt = "Statistics"
[[deps.StaticArraysCore]]
git-tree-sha1 = "192954ef1208c7019899fbf8049e717f92959682"
uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c"
version = "1.4.3"
[[deps.Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
version = "1.10.0"
[[deps.StatsAPI]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "1ff449ad350c9c4cbc756624d6f8a8c3ef56d3ed"
uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0"
version = "1.7.0"
[[deps.StatsBase]]
deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"]
git-tree-sha1 = "5cf7606d6cef84b543b483848d4ae08ad9832b21"
uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
version = "0.34.3"
[[deps.StatsFuns]]
deps = ["HypergeometricFunctions", "IrrationalConstants", "LogExpFunctions", "Reexport", "Rmath", "SpecialFunctions"]
git-tree-sha1 = "cef0472124fab0695b58ca35a77c6fb942fdab8a"
uuid = "4c63d2b9-4356-54db-8cca-17b64c39e42c"
version = "1.3.1"
[deps.StatsFuns.extensions]
StatsFunsChainRulesCoreExt = "ChainRulesCore"
StatsFunsInverseFunctionsExt = "InverseFunctions"
[deps.StatsFuns.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.StrideArraysCore]]
deps = ["ArrayInterface", "CloseOpenIntervals", "IfElse", "LayoutPointers", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface", "ThreadingUtilities"]
git-tree-sha1 = "f35f6ab602df8413a50c4a25ca14de821e8605fb"
uuid = "7792a7ef-975c-4747-a70f-980b88e8d1da"
version = "0.5.7"
[[deps.StringManipulation]]
deps = ["PrecompileTools"]
git-tree-sha1 = "a04cabe79c5f01f4d723cc6704070ada0b9d46d5"
uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e"
version = "0.3.4"
[[deps.StructArrays]]
deps = ["ConstructionBase", "DataAPI", "Tables"]
git-tree-sha1 = "f4dc295e983502292c4c3f951dbb4e985e35b3be"
uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a"
version = "0.6.18"
weakdeps = ["Adapt", "GPUArraysCore", "SparseArrays", "StaticArrays"]
[deps.StructArrays.extensions]
StructArraysAdaptExt = "Adapt"
StructArraysGPUArraysCoreExt = "GPUArraysCore"
StructArraysSparseArraysExt = "SparseArrays"
StructArraysStaticArraysExt = "StaticArrays"
[[deps.SuiteSparse]]
deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"]
uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9"
[[deps.SuiteSparse_jll]]
deps = ["Artifacts", "Libdl", "libblastrampoline_jll"]
uuid = "bea87d4a-7f5b-5778-9afe-8cc45184846c"
version = "7.2.1+1"
[[deps.TOML]]
deps = ["Dates"]
uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
version = "1.0.3"
[[deps.TableTraits]]
deps = ["IteratorInterfaceExtensions"]
git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39"
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
version = "1.0.1"
[[deps.Tables]]
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "OrderedCollections", "TableTraits"]
git-tree-sha1 = "598cd7c1f68d1e205689b1c2fe65a9f85846f297"
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
version = "1.12.0"
[[deps.Tar]]
deps = ["ArgTools", "SHA"]
uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
version = "1.10.0"
[[deps.TensorCore]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "1feb45f88d133a655e001435632f019a9a1bcdb6"
uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50"
version = "0.1.1"
[[deps.Test]]
deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[deps.ThreadingUtilities]]
deps = ["ManualMemory"]
git-tree-sha1 = "eda08f7e9818eb53661b3deb74e3159460dfbc27"
uuid = "8290d209-cae3-49c0-8002-c8c24d57dab5"
version = "0.5.2"
[[deps.TiffImages]]
deps = ["ColorTypes", "DataStructures", "DocStringExtensions", "FileIO", "FixedPointNumbers", "IndirectArrays", "Inflate", "Mmap", "OffsetArrays", "PkgVersion", "ProgressMeter", "SIMD", "UUIDs"]
git-tree-sha1 = "bc7fd5c91041f44636b2c134041f7e5263ce58ae"
uuid = "731e570b-9d59-4bfa-96dc-6df516fadf69"
version = "0.10.0"
[[deps.TimerOutputs]]
deps = ["ExprTools", "Printf"]
git-tree-sha1 = "5a13ae8a41237cff5ecf34f73eb1b8f42fff6531"
uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f"
version = "0.5.24"
[[deps.TranscodingStreams]]
git-tree-sha1 = "e84b3a11b9bece70d14cce63406bbc79ed3464d2"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.11.2"
[[deps.Tricks]]
git-tree-sha1 = "7822b97e99a1672bfb1b49b668a6d46d58d8cbcb"
uuid = "410a4b4d-49e4-4fbc-ab6d-cb71b17b3775"
version = "0.1.9"
[[deps.TriplotBase]]
git-tree-sha1 = "4d4ed7f294cda19382ff7de4c137d24d16adc89b"
uuid = "981d1d27-644d-49a2-9326-4793e63143c3"
version = "0.1.0"
[[deps.Tulip]]
deps = ["CodecBzip2", "CodecZlib", "Krylov", "LDLFactorizations", "LinearAlgebra", "LinearOperators", "Logging", "MathOptInterface", "Printf", "QPSReader", "SparseArrays", "SuiteSparse", "TOML", "Test", "TimerOutputs"]
git-tree-sha1 = "c80e43731afecf9f96ed19dee6ead8f8fa8d0651"
uuid = "6dd1b50a-3aae-11e9-10b5-ef983d2400fa"
version = "0.9.6"
[[deps.URIs]]
git-tree-sha1 = "67db6cc7b3821e19ebe75791a9dd19c9b1188f2b"
uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4"
version = "1.5.1"
[[deps.UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[deps.UnPack]]
git-tree-sha1 = "387c1f73762231e86e0c9c5443ce3b4a0a9a0c2b"
uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed"
version = "1.0.2"
[[deps.Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[deps.UnicodeFun]]
deps = ["REPL"]
git-tree-sha1 = "53915e50200959667e78a92a418594b428dffddf"
uuid = "1cfade01-22cf-5700-b092-accc4b62d6e1"
version = "0.4.1"
[[deps.Unitful]]
deps = ["Dates", "LinearAlgebra", "Random"]
git-tree-sha1 = "d95fe458f26209c66a187b1114df96fd70839efd"
uuid = "1986cc42-f94f-5a68-af5c-568840ba703d"
version = "1.21.0"
[deps.Unitful.extensions]
ConstructionBaseUnitfulExt = "ConstructionBase"
InverseFunctionsUnitfulExt = "InverseFunctions"
[deps.Unitful.weakdeps]
ConstructionBase = "187b0558-2788-49d3-abe0-74a17ed4e7c9"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.UnsafeAtomics]]
git-tree-sha1 = "6331ac3440856ea1988316b46045303bef658278"
uuid = "013be700-e6cd-48c3-b4a1-df204f14c38f"
version = "0.2.1"
[[deps.UnsafeAtomicsLLVM]]
deps = ["LLVM", "UnsafeAtomics"]
git-tree-sha1 = "2d17fabcd17e67d7625ce9c531fb9f40b7c42ce4"
uuid = "d80eeb9a-aca5-4d75-85e5-170c8b632249"
version = "0.2.1"
[[deps.VectorizationBase]]
deps = ["ArrayInterface", "CPUSummary", "HostCPUFeatures", "IfElse", "LayoutPointers", "Libdl", "LinearAlgebra", "SIMDTypes", "Static", "StaticArrayInterface"]
git-tree-sha1 = "e7f5b81c65eb858bed630fe006837b935518aca5"
uuid = "3d5dd08c-fd9d-11e8-17fa-ed2836048c2f"
version = "0.21.70"
[[deps.WeakRefStrings]]
deps = ["DataAPI", "InlineStrings", "Parsers"]
git-tree-sha1 = "b1be2855ed9ed8eac54e5caff2afcdb442d52c23"
uuid = "ea10d353-3f73-51f8-a26c-33c1cb351aa5"
version = "1.4.2"
[[deps.WoodburyMatrices]]
deps = ["LinearAlgebra", "SparseArrays"]
git-tree-sha1 = "c1a7aa6219628fcd757dede0ca95e245c5cd9511"
uuid = "efce3f68-66dc-5838-9240-27a6d6f5f9b6"
version = "1.0.0"
[[deps.WorkerUtilities]]
git-tree-sha1 = "cd1659ba0d57b71a464a29e64dbc67cfe83d54e7"
uuid = "76eceee3-57b5-4d4a-8e66-0e911cebbf60"
version = "1.6.1"
[[deps.XML2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libiconv_jll", "Zlib_jll"]
git-tree-sha1 = "1165b0443d0eca63ac1e32b8c0eb69ed2f4f8127"
uuid = "02c8fc9c-b97f-50b9-bbe4-9be30ff0a78a"
version = "2.13.3+0"
[[deps.XSLT_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgcrypt_jll", "Libgpg_error_jll", "Libiconv_jll", "XML2_jll", "Zlib_jll"]
git-tree-sha1 = "a54ee957f4c86b526460a720dbc882fa5edcbefc"
uuid = "aed1982a-8fda-507f-9586-7b0439959a61"
version = "1.1.41+0"
[[deps.Xorg_libX11_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxcb_jll", "Xorg_xtrans_jll"]
git-tree-sha1 = "afead5aba5aa507ad5a3bf01f58f82c8d1403495"
uuid = "4f6342f7-b3d2-589e-9d20-edeb45f2b2bc"
version = "1.8.6+0"
[[deps.Xorg_libXau_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "6035850dcc70518ca32f012e46015b9beeda49d8"
uuid = "0c0b7dd1-d40b-584c-a123-a41640f87eec"
version = "1.0.11+0"
[[deps.Xorg_libXdmcp_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "34d526d318358a859d7de23da945578e8e8727b7"
uuid = "a3789734-cfe1-5b06-b2d0-1dd0d9d62d05"
version = "1.1.4+0"
[[deps.Xorg_libXext_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"]
git-tree-sha1 = "d2d1a5c49fae4ba39983f63de6afcbea47194e85"
uuid = "1082639a-0dae-5f34-9b06-72781eeb8cb3"
version = "1.3.6+0"
[[deps.Xorg_libXrender_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"]
git-tree-sha1 = "47e45cd78224c53109495b3e324df0c37bb61fbe"
uuid = "ea2f1a96-1ddc-540d-b46f-429655e07cfa"
version = "0.9.11+0"
[[deps.Xorg_libpthread_stubs_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "8fdda4c692503d44d04a0603d9ac0982054635f9"
uuid = "14d82f49-176c-5ed1-bb49-ad3f5cbd8c74"
version = "0.1.1+0"
[[deps.Xorg_libxcb_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "XSLT_jll", "Xorg_libXau_jll", "Xorg_libXdmcp_jll", "Xorg_libpthread_stubs_jll"]
git-tree-sha1 = "bcd466676fef0878338c61e655629fa7bbc69d8e"
uuid = "c7cfdc94-dc32-55de-ac96-5a1b8d977c5b"
version = "1.17.0+0"
[[deps.Xorg_xtrans_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e92a1a012a10506618f10b7047e478403a046c77"
uuid = "c5fb5394-a638-5e4d-96e5-b29de1b5cf10"
version = "1.5.0+0"
[[deps.ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "f492b7fe1698e623024e873244f10d89c95c340a"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.10.1"
[[deps.Zlib_jll]]
deps = ["Libdl"]
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.13+1"
[[deps.isoband_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "51b5eeb3f98367157a7a12a1fb0aa5328946c03c"
uuid = "9a68df92-36a6-505f-a73e-abb412b6bfb4"
version = "0.2.3+0"
[[deps.libaom_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1827acba325fdcdf1d2647fc8d5301dd9ba43a9d"
uuid = "a4ae2306-e953-59d6-aa16-d00cac43593b"
version = "3.9.0+0"
[[deps.libass_jll]]
deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "e17c115d55c5fbb7e52ebedb427a0dca79d4484e"
uuid = "0ac62f75-1d6f-5e53-bd7c-93b484bb37c0"
version = "0.15.2+0"
[[deps.libblastrampoline_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "8e850b90-86db-534c-a0d3-1478176c7d93"
version = "5.8.0+1"
[[deps.libfdk_aac_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "8a22cf860a7d27e4f3498a0fe0811a7957badb38"
uuid = "f638f0a6-7fb0-5443-88ba-1cc74229b280"
version = "2.0.3+0"
[[deps.libpng_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "d7015d2e18a5fd9a4f47de711837e980519781a4"
uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f"
version = "1.6.43+1"
[[deps.libsixel_jll]]
deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Pkg", "libpng_jll"]
git-tree-sha1 = "d4f63314c8aa1e48cd22aa0c17ed76cd1ae48c3c"
uuid = "075b6546-f08a-558a-be8f-8157d0f608a5"
version = "1.10.3+0"
[[deps.libvorbis_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Ogg_jll", "Pkg"]
git-tree-sha1 = "490376214c4721cdaca654041f635213c6165cb3"
uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a"
version = "1.3.7+2"
[[deps.nghttp2_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d"
version = "1.52.0+1"
[[deps.oneTBB_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "7d0ea0f4895ef2f5cb83645fa689e52cb55cf493"
uuid = "1317d2d5-d96f-522e-a858-c73665f53c3e"
version = "2021.12.0+0"
[[deps.p7zip_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
version = "17.4.0+2"
[[deps.x264_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "35976a1216d6c066ea32cba2150c4fa682b276fc"
uuid = "1270edf5-f2f9-52d2-97e9-ab00b5d0237a"
version = "10164.0.0+0"
[[deps.x265_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "dcc541bb19ed5b0ede95581fb2e41ecf179527d2"
uuid = "dfaa095f-4041-5dcd-9319-2fabd8486b76"
version = "3.6.0+0"
"""
# βββ‘ Cell order:
# ββ8d867c72-2924-46a0-8a60-7c6e52f71a67
# ββc1df03d1-6205-4caa-9bdf-7daa5ba59d3a
# ββa5301146-6eac-4bcd-97d9-3bfd6fe4f213
# ββda9cc45d-8529-4965-b213-61b2657fce28
# ββfe0ac519-7995-419a-a8ac-02af958342cd
# ββ29b6a32d-9003-4bc7-8351-0d1881153bf6
# ββdb98d796-c0d2-11ec-2c96-f7510a6d771c
# ββ973f46d5-83b7-466a-a8c3-406643f7dbc5
# ββab49655b-ab30-457c-a476-9f6dd310ab4b
# ββ7796c8e9-a090-4aab-a073-50f839ceab22
# ββ00000000-0000-0000-0000-000000000001
# ββ00000000-0000-0000-0000-000000000002
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 5822 | using Distributed
calc_SatToSat=true
calc_ModToMod=false
calc_ModToSat=false
zm_test_case=true
choice_method="emd2" #only for 2D case
test_methods=false
println(calc_SatToSat)
println(calc_ModToMod)
println(calc_ModToSat)
println(choice_method)
println(zm_test_case)
##
pth_output=joinpath(tempdir(),"OptimalTransport_example")
!isdir(pth_output) ? mkdir(pth_output) : nothing
@everywhere using Distributed, DistributedArrays, SharedArrays
@everywhere using OptimalTransport, Statistics, LinearAlgebra
@everywhere using Tulip, Distances, JLD2, Tables, CSV, DataFrames
#@everywhere Cost=load("examples/example_Cost.jld2")["Cost"]
@everywhere M=Tables.matrix(CSV.read("examples/M.csv",DataFrame))
@everywhere S=Tables.matrix(CSV.read("examples/S.csv",DataFrame))
@everywhere nx=size(M,1)
## functions that use the "zonal sum" test case
@everywhere function ModToMod_MS(i,j)
Cost=Float64.([abs(i-j) for i in 1:nx, j in 1:nx])
emd2(M[:,i],M[:,j], Cost, Tulip.Optimizer())
end
@everywhere function SatToSat_MS(i,j)
Cost=Float64.([abs(i-j) for i in 1:nx, j in 1:nx])
emd2(S[:,i],S[:,j], Cost, Tulip.Optimizer())
end
@everywhere function ModToSat_MS(i,j)
Cost=Float64.([abs(i-j) for i in 1:nx, j in 1:nx])
emd2(M[:,i],S[:,j], Cost, Tulip.Optimizer())
#Ξ΅ = 0.01
#Ξ³ = sinkhorn_stabilized_epsscaling(M[:,i],S[:,j], Cost, Ξ΅; maxiter=5_000)
#dot(Ξ³, Cost) #compute optimal cost, directly
end
## functions that use the full 2D case
@everywhere function ModToSat(i,j)
a=Chl_from_Mod[:,:,i][:]
b=Chl_from_Sat[:,:,j][:]
a,b=preprocess_Chl(a,b)
if choice_method=="sinkhorn2"
Ξ΅ = 0.05
sinkhorn2(a,b, Cost, Ξ΅)
elseif choice_method=="emd2"
emd2(a,b, Cost, Tulip.Optimizer())
elseif choice_method=="epsscaling"
Ξ΅ = 0.01
Ξ³ = sinkhorn_stabilized_epsscaling(a,b, Cost, Ξ΅; maxiter=5_000)
dot(Ξ³, Cost) #compute optimal cost, directly
end
end
@everywhere function ModToMod(i,j)
a=Chl_from_Mod[:,:,i][:]
b=Chl_from_Mod[:,:,j][:]
a,b=preprocess_Chl(a,b)
if choice_method=="sinkhorn2"
Ξ΅ = 0.05
sinkhorn2(a,b, Cost, Ξ΅)
elseif choice_method=="emd2"
emd2(a,b, Cost, Tulip.Optimizer())
elseif choice_method=="epsscaling"
Ξ΅ = 0.01
Ξ³ = sinkhorn_stabilized_epsscaling(a,b, Cost, Ξ΅; maxiter=5_000)
dot(Ξ³, Cost) #compute optimal cost, directly
end
end
@everywhere function SatToSat(i,j)
a=Chl_from_Sat[:,:,i][:]
b=Chl_from_Sat[:,:,j][:]
a,b=preprocess_Chl(a,b)
if choice_method=="sinkhorn2"
Ξ΅ = 0.05
sinkhorn2(a,b, Cost, Ξ΅)
elseif choice_method=="emd2"
emd2(a,b, Cost, Tulip.Optimizer())
elseif choice_method=="epsscaling"
Ξ΅ = 0.01
Ξ³ = sinkhorn_stabilized_epsscaling(a,b, Cost, Ξ΅; maxiter=5_000)
dot(Ξ³, Cost) #compute optimal cost, directly
end
end
##
@everywhere include("OptimalTransport_setup.jl")
II=[[i,j] for i in 1:12, j in 1:12][:];
using Random; JJ=shuffle(II);
if calc_ModToMod
d = SharedArray{Float64}(12,12)
t0=[time()]
for kk in 1:36
@sync @distributed for k in (kk-1)*4 .+ collect(1:4)
i=JJ[k][1]
j=JJ[k][2]
zm_test_case ? d[i,j]=ModToMod_MS(i,j) : d[i,j]=ModToMod(i,j)
end
dt=time()-t0[1]
println("ModToMod $(kk) $(dt)")
t0[1]=time()
jldsave(joinpath(pth_output,"ModToMod_$(choice_method).jld2"); d = d.s)
end
end
if calc_SatToSat
d = SharedArray{Float64}(12,12)
t0=[time()]
for kk in 1:36
@sync @distributed for k in (kk-1)*4 .+ collect(1:4)
i=JJ[k][1]
j=JJ[k][2]
zm_test_case ? d[i,j]=SatToSat_MS(i,j) : d[i,j]=SatToSat(i,j)
end
dt=time()-t0[1]
println("SatToSat $(kk) $(dt)")
t0[1]=time()
jldsave(joinpath(pth_output,"SatToSat.jld2"); d = d.s)
end
end
if calc_ModToSat
d = SharedArray{Float64}(12,12)
t0=[time()]
for kk in 1:36
@sync @distributed for k in (kk-1)*4 .+ collect(1:4)
i=JJ[k][1]
j=JJ[k][2]
zm_test_case ? d[i,j]=ModToSat_MS(i,j) : d[i,j]=ModToSat(i,j)
end
dt=time()-t0[1]
println("ModToSat $(kk) $(dt)")
t0[1]=time()
jldsave(joinpath(pth_output,"ModToSat.jld2"); d = d.s)
end
end
## function used only for testing several methods at once
@everywhere function ModToMod_methods(i,j,mthd=1)
a=Chl_from_Mod[:,:,i][:]
b=Chl_from_Mod[:,:,j][:]
a,b=preprocess_Chl(a,b)
a=sum(reshape(a,(120,140)),dims=1)[:]
b=sum(reshape(b,(120,140)),dims=1)[:]
Cost=Float64.([abs(i-j) for i in 1:140, j in 1:140])
if mthd==1
Ξ΅ = 0.05
sinkhorn2(a,b, Cost, Ξ΅)
elseif mthd==2
emd2(a,b, Cost, Tulip.Optimizer())
elseif mthd==3
Ξ΅ = 0.005
Ξ³ = sinkhorn_stabilized(a,b, Cost, Ξ΅; maxiter=5_000)
dot(Ξ³, Cost) #compute optimal cost, directly
elseif mthd==4
Ξ΅ = 0.005
Ξ³ = sinkhorn_stabilized_epsscaling(a,b, Cost, Ξ΅; maxiter=5_000)
dot(Ξ³, Cost) #compute optimal cost, directly
# elseif mthd==5
# Ξ΅ = 0.05
# Ξ³ = quadreg(a,b, Cost, Ξ΅; maxiter=100)
# dot(Ξ³, Cost) #compute optimal cost, directly
end
end
if test_methods
#KK=([1,1],[1,2],[1,9])
KK=[[1,j] for j in 1:12]
d = SharedArray{Float64}(6,length(KK))
t0=[time()]
for k in 1:4
for kk in 1:12
i=KK[kk][1]
j=KK[kk][2]
try
d[k,kk]=ModToMod_methods(i,j,k)
catch
d[k,kk]=NaN
end
println("$(k) $(kk) $(d[k,kk])")
end
dt=time()-t0[1]
println("ModToMod_methods $(k) $(dt)")
t0[1]=time()
jldsave(joinpath(pth_output,"ModToMod_methods.jld2"); d = d.s)
end
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 1635 |
#using Climatology, MeshArrays, NCTiles
using JLD2
import CairoMakie as Mkie
pth_output=joinpath(tempdir(),"OptimalTransport_example")
function EMD_plot(fil)
d=load(fil)["d"];
d[findall(d.==0.0)].=NaN;
fig = Mkie.Figure(resolution = (600,400), backgroundcolor = :grey95, fontsize=12)
ax = Mkie.Axis(fig[1,1])
hm=Mkie.heatmap!(d)
Mkie.Colorbar(fig[1,2], hm, height = Mkie.Relative(0.65))
fig
end
function EMD_plot_all(pth=pth_output)
fil1=joinpath(pth,"ModToMod.jld2")
fil2=joinpath(pth,"SatToSat.jld2")
fil3=joinpath(pth,"ModToSat.jld2")
d1=load(fil1)["d"]; d1[findall(d1.==0.0)].=NaN;
d2=load(fil2)["d"]; d2[findall(d2.==0.0)].=NaN;
d3=load(fil3)["d"]; d3[findall(d3.==0.0)].=NaN;
#just to check the alignment of dimensions d3[1:end,1].=NaN
#cr=(0.07, 0.15)
cr=(0.0, 10.0)
fig = Mkie.Figure(resolution = (600,400), backgroundcolor = :grey95, fontsize=12)
ax = Mkie.Axis(fig[1,1])
hm=Mkie.heatmap!(d1, colorrange = cr, colormap=:inferno)
Mkie.ylims!(ax, (12.5, 0.5)); Mkie.xlims!(ax, (0.5,12.5))
ax = Mkie.Axis(fig[1,2])
hm=Mkie.heatmap!(transpose(d3), colorrange = cr, colormap=:inferno)
Mkie.ylims!(ax, (12.5, 0.5)); Mkie.xlims!(ax, (0.5,12.5))
ax = Mkie.Axis(fig[2,1])
hm=Mkie.heatmap!(d3, colorrange = cr, colormap=:inferno)
Mkie.ylims!(ax, (12.5, 0.5)); Mkie.xlims!(ax, (0.5,12.5))
ax = Mkie.Axis(fig[2,2])
hm=Mkie.heatmap!(d2, colorrange = cr, colormap=:inferno)
Mkie.ylims!(ax, (12.5, 0.5)); Mkie.xlims!(ax, (0.5,12.5))
Mkie.Colorbar(fig[1:2,3], hm, height = Mkie.Relative(0.65))
fig
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 2249 |
"""
object: setup to compute optimal transport between model and/or satellite climatologies
date: 2021/10/28
author: GaΓ«l Forget
- examples/CBIOMES_climatology_compare.jl
"""
import Climatology, NCTiles
using Statistics, LinearAlgebra, JLD2
## load files
fil_out=joinpath(datadep"CBIOMES-clim1","CBIOMES-global-alpha-climatology.nc")
nc=NCTiles.NCDataset(fil_out,"r")
lon=nc["lon"][:]
lat=nc["lat"][:]
uni=nc["Chl"].attrib["units"]
## region and base distance (Cost) definition
i1=findall( (lon.>-180.0).*(lon.<-120.0) )
j1=findall( (lat.>-20.0).*(lat.<50.0) )
## main arrays
Chl_from_Mod=nc["Chl"][i1,j1,:]
#DataDeps?
fil_sat="examples_climatology_prep/gridded_geospatial_montly_clim_360_720_ver_0_2.nc"
Chl_from_Sat=NCTiles.NCDataset(fil_sat,"r")["Chl"][i1,j1,:]
## cost matrix
if !isfile("examples_EMD_paper_exploration/example_Cost.jld2")
#this only needs to be done one
#C = [[i,j] for i in i1, j in j1]
C = [[lon[i],lat[j]] for i in i1, j in j1]
C=C[:]
gcdist(lo1,lo2,la1,la2) = acos(sind(la1)*sind(la2)+cosd(la1)*cosd(la2)*cosd(lo1-lo2))
#C=[gcdist(C[i][1],C[j][1],C[i][2],C[j][2]) for i in 1:length(C), j in 1:length(C)]
nx=length(C)
Cost=zeros(nx,nx)
for i in 1:length(C), j in 1:length(C)
i!==j ? Cost[i,j]=gcdist(C[i][1],C[j][1],C[i][2],C[j][2]) : nothing
end
@save "examples_EMD_paper_exploration/example_Cost.jld2" Cost
end
Cost=load("examples_EMD_paper_exploration/example_Cost.jld2")["Cost"]
println("reusing Cost matrix computed previously\n")
## helper functions
function preprocess_Chl(a,b)
k=findall(ismissing.(a).|ismissing.(b));
a[k].=0.0; b[k].=0.0;
k=findall((a.<0).|(b.<0));
a[k].=0.0; b[k].=0.0;
k=findall(isnan.(a).|isnan.(b));
a[k].=0.0; b[k].=0.0;
M=0.1
k=findall((a.>M).|(b.>M));
a[findall(a.>M)].=M;
b[findall(b.>M)].=M;
a=Float64.(a); a=a/sum(a)
b=Float64.(b); b=b/sum(b)
a,b
end
##
function export_zm()
M=NaN*zeros(140,12)
S=NaN*zeros(140,12)
for t in 1:12
a=Chl_from_Mod[:,:,t][:]
b=Chl_from_Sat[:,:,t][:]
a,b=preprocess_Chl(a,b)
M[:,t]=sum(reshape(a,(120,140)),dims=1)[:]
S[:,t]=sum(reshape(b,(120,140)),dims=1)[:]
end
(M,S)
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 79638 | ### A Pluto.jl notebook ###
# v0.19.46
using Markdown
using InteractiveUtils
# This Pluto notebook uses @bind for interactivity. When running this notebook outside of Pluto, the following 'mock version' of @bind gives bound variables a default value (instead of an error).
macro bind(def, element)
quote
local iv = try Base.loaded_modules[Base.PkgId(Base.UUID("6e696c72-6542-2067-7265-42206c756150"), "AbstractPlutoDingetjes")].Bonds.initial_value catch; b -> missing; end
local el = $(esc(element))
global $(esc(def)) = Core.applicable(Base.get, el) ? Base.get(el) : iv(el)
el
end
end
# βββ‘ a8e0b727-a416-4aad-b660-69e5470c7e9e
begin
using Climatology, NCDatasets, CairoMakie, Dataverse, ArchGDAL, PlutoUI
ClimatologyMakieExt=Base.get_extension(Climatology, :ClimatologyMakieExt)
"Done with Julia packages"
end
# βββ‘ 71e87ed3-5a9f-49aa-99af-cf144501c678
md"""# Regional Sea Level
Visualize dynamic sea level anomaly (colors) and ocean bathymetry/topography (contours) in the region of the Azores as a function of space and time.
!!! tip
Choose between two data sets (sources: NASA PODAAC, ESA CMEMS), select time, or generate animation.
"""
# βββ‘ a58cc4b4-7023-4dcf-a5f4-6366be8047a3
TableOfContents()
# βββ‘ 62e0b8a9-0025-4ce7-9538-b6114d97b762
md"""## Visualize Data
- Color shading is sea level anomaly from a gridded data product based on satellite measurement (altimetry).
- Contours show the relief (topography, bathymetry). Light pink contours correspond to the Azores islands.
"""
# βββ‘ c93dde18-e639-4edd-9192-f6c9eed0cb89
@bind fil Select(["sla_podaac.nc","sla_cmems.nc"])
# βββ‘ 0c9fdfb0-bddc-4def-9954-526978491a84
dates=SLA_MAIN.sla_dates(fil)
# βββ‘ 8ff180a4-dd71-4a70-82ab-70bc80427abb
@bind d0 Select(dates)
# βββ‘ ff7dd5eb-5b1b-4314-9553-b8c05c4d7376
md"""## Data Set"""
# βββ‘ 9b3c3856-9fe1-43ba-97a2-abcd5b385c1d
sla=read(SeaLevelAnomaly(name=fil[1:end-3],path=tempname()))
# βββ‘ a45bbdbd-3793-4e69-b042-39a4a1ac7ed7
plot(sla) #,topo=topo)
# βββ‘ 1cf2cdb9-3c09-4b39-81cf-49318c16f531
md"""## Apendix
### Julia Codes"""
# βββ‘ 50b75406-e55f-433d-bd7b-089d975e5001
t0=findall(dates.==d0)[1]
# βββ‘ af68228e-710c-4a5a-be48-c716592f8f45
md"""### Data Sources
- Topography, bathymetry : NOAA [etopo-global-relief-model](https://www.ncei.noaa.gov/products/etopo-global-relief-model)
- Sea Level Anomaly #1 : NASA PODAAC [page 1](https://sealevel.nasa.gov/data/dataset/?identifier=SLCP_SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205_2205), [page 2](https://podaac.jpl.nasa.gov/dataset/SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205)
- Sea Level Anomaly #2 : ESA CMEMS [here](https://data.marine.copernicus.eu/product/SEALEVEL_GLO_PHY_L4_MY_008_047/description)
The SLA data sets used in this notebook are available in [this Dataverse repo](https://doi.org/10.7910/DVN/OYBLGK).
They were generated using `OceanRobots.podaac_sla.subset()` from the above sources.
"""
# βββ‘ aa82d9bd-8ba2-4e18-8f65-e71b0361f5cb
begin
DOI="doi:10.7910/DVN/OYBLGK"
df=Dataverse.file_list(DOI)
@bind filename Select(df.filename,default="exportImage_60arc.tiff")
end
# βββ‘ 24a9fc25-b85b-4582-a36b-0a43e04ee799
begin
pth0=joinpath(tempdir(),"azores_region_data")
file0=joinpath(pth0,filename)
!ispath(pth0) ? mkdir(pth0) : nothing
!isfile(file0) ? Dataverse.file_download(df,filename,pth0) : nothing
"Downloaded to "*file0
end
# βββ‘ eeb9d308-ef62-4dcc-ba90-a2a1912ef2bd
topo = begin
dataset = ArchGDAL.read(joinpath(pth0,"exportImage_60arc.tiff"))
band =ArchGDAL.getband(dataset, 1)
geotransform = ArchGDAL.getgeotransform(dataset)
(nx,ny)=size(band)
lon=geotransform[1] .+ geotransform[2]*(0.5:nx-0.5)
lat=geotransform[4] .+ geotransform[6]*(0.5:ny-0.5)
(lon=lon,lat=lat,z=band[:,:])
end
# βββ‘ 5fec1029-34a1-4d43-9183-7e6095194a3a
md"""### Create Animation"""
# βββ‘ ec8cbf44-82d9-11ed-0131-1bdea9285f79
begin
nt=size(sla.data[1]["SLA"],3)
framerate=Int(floor(nt/120))
end
# βββ‘ 8fbd1b1d-affe-4e30-a3b2-f2584e459003
#fil_mp4=ClimatologyMakieExt.make_movie(sla.data[1],1:nt,framerate=framerate,dates=dates)
# βββ‘ 2d5611a9-b8ea-4d26-8ca3-edff9f2ebfdd
begin
url_mp4="http://www.gaelforget.net/notebooks/sla_podaac.mp4"
RemoteResource(url_mp4,:width=>400)
end
# βββ‘ 00000000-0000-0000-0000-000000000001
PLUTO_PROJECT_TOML_CONTENTS = """
[deps]
ArchGDAL = "c9ce4bd3-c3d5-55b8-8973-c0e20141b8c3"
CairoMakie = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
Climatology = "9e9a4d37-2d2e-41e3-8b85-f7978328d9c7"
Dataverse = "9c0b9be8-e31e-490f-90fe-77697562404d"
NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab"
PlutoUI = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
[compat]
ArchGDAL = "~0.10.4"
CairoMakie = "~0.12.9"
Dataverse = "~0.2.5"
NCDatasets = "~0.14.5"
PlutoUI = "~0.7.60"
"""
# βββ‘ 00000000-0000-0000-0000-000000000002
PLUTO_MANIFEST_TOML_CONTENTS = """
# This file is machine-generated - editing it directly is not advised
julia_version = "1.10.4"
manifest_format = "2.0"
project_hash = "62f140118185b625594d8eb29cc75b6d8798d796"
[[deps.AbstractFFTs]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "d92ad398961a3ed262d8bf04a1a2b8340f915fef"
uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c"
version = "1.5.0"
weakdeps = ["ChainRulesCore", "Test"]
[deps.AbstractFFTs.extensions]
AbstractFFTsChainRulesCoreExt = "ChainRulesCore"
AbstractFFTsTestExt = "Test"
[[deps.AbstractPlutoDingetjes]]
deps = ["Pkg"]
git-tree-sha1 = "6e1d2a35f2f90a4bc7c2ed98079b2ba09c35b83a"
uuid = "6e696c72-6542-2067-7265-42206c756150"
version = "1.3.2"
[[deps.AbstractTrees]]
git-tree-sha1 = "2d9c9a55f9c93e8887ad391fbae72f8ef55e1177"
uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c"
version = "0.4.5"
[[deps.AccurateArithmetic]]
deps = ["LinearAlgebra", "Random", "VectorizationBase"]
git-tree-sha1 = "07af26e8d08c211ef85918f3e25d4c0990d20d70"
uuid = "22286c92-06ac-501d-9306-4abd417d9753"
version = "0.3.8"
[[deps.Adapt]]
deps = ["LinearAlgebra", "Requires"]
git-tree-sha1 = "6a55b747d1812e699320963ffde36f1ebdda4099"
uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
version = "4.0.4"
weakdeps = ["StaticArrays"]
[deps.Adapt.extensions]
AdaptStaticArraysExt = "StaticArrays"
[[deps.AdaptivePredicates]]
git-tree-sha1 = "7d5da5dd472490d048b081ca1bda4a7821b06456"
uuid = "35492f91-a3bd-45ad-95db-fcad7dcfedb7"
version = "1.1.1"
[[deps.AliasTables]]
deps = ["PtrArrays", "Random"]
git-tree-sha1 = "9876e1e164b144ca45e9e3198d0b689cadfed9ff"
uuid = "66dad0bd-aa9a-41b7-9441-69ab47430ed8"
version = "1.1.3"
[[deps.Animations]]
deps = ["Colors"]
git-tree-sha1 = "e81c509d2c8e49592413bfb0bb3b08150056c79d"
uuid = "27a7e980-b3e6-11e9-2bcd-0b925532e340"
version = "0.4.1"
[[deps.ArchGDAL]]
deps = ["CEnum", "ColorTypes", "Dates", "DiskArrays", "Extents", "GDAL", "GeoFormatTypes", "GeoInterface", "GeoInterfaceMakie", "GeoInterfaceRecipes", "ImageCore", "Tables"]
git-tree-sha1 = "0504a357f95f37497b69bf282a137410f6be3b9c"
uuid = "c9ce4bd3-c3d5-55b8-8973-c0e20141b8c3"
version = "0.10.4"
weakdeps = ["Makie"]
[deps.ArchGDAL.extensions]
ArchGDALMakieExt = "Makie"
[[deps.ArgTools]]
uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
version = "1.1.1"
[[deps.ArrayInterface]]
deps = ["Adapt", "LinearAlgebra"]
git-tree-sha1 = "3640d077b6dafd64ceb8fd5c1ec76f7ca53bcf76"
uuid = "4fba245c-0d91-5ea0-9b3e-6abc04ee57a9"
version = "7.16.0"
[deps.ArrayInterface.extensions]
ArrayInterfaceBandedMatricesExt = "BandedMatrices"
ArrayInterfaceBlockBandedMatricesExt = "BlockBandedMatrices"
ArrayInterfaceCUDAExt = "CUDA"
ArrayInterfaceCUDSSExt = "CUDSS"
ArrayInterfaceChainRulesExt = "ChainRules"
ArrayInterfaceGPUArraysCoreExt = "GPUArraysCore"
ArrayInterfaceReverseDiffExt = "ReverseDiff"
ArrayInterfaceSparseArraysExt = "SparseArrays"
ArrayInterfaceStaticArraysCoreExt = "StaticArraysCore"
ArrayInterfaceTrackerExt = "Tracker"
[deps.ArrayInterface.weakdeps]
BandedMatrices = "aae01518-5342-5314-be14-df237901396f"
BlockBandedMatrices = "ffab5731-97b5-5995-9138-79e8c1846df0"
CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba"
CUDSS = "45b445bb-4962-46a0-9369-b4df9d0f772e"
ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
ReverseDiff = "37e2e3b7-166d-5795-8a7a-e32c996b4267"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StaticArraysCore = "1e83bf80-4336-4d27-bf5d-d5a4f845583c"
Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c"
[[deps.Arrow_jll]]
deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Lz4_jll", "Pkg", "Thrift_jll", "Zlib_jll", "boost_jll", "snappy_jll"]
git-tree-sha1 = "d64cb60c0e6a138fbe5ea65bcbeea47813a9a700"
uuid = "8ce61222-c28f-5041-a97a-c2198fb817bf"
version = "10.0.0+1"
[[deps.Artifacts]]
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
[[deps.Automa]]
deps = ["PrecompileTools", "TranscodingStreams"]
git-tree-sha1 = "014bc22d6c400a7703c0f5dc1fdc302440cf88be"
uuid = "67c07d97-cdcb-5c2c-af73-a7f9c32a568b"
version = "1.0.4"
[[deps.AxisAlgorithms]]
deps = ["LinearAlgebra", "Random", "SparseArrays", "WoodburyMatrices"]
git-tree-sha1 = "01b8ccb13d68535d73d2b0c23e39bd23155fb712"
uuid = "13072b0f-2c55-5437-9ae7-d433b7a33950"
version = "1.1.0"
[[deps.AxisArrays]]
deps = ["Dates", "IntervalSets", "IterTools", "RangeArrays"]
git-tree-sha1 = "16351be62963a67ac4083f748fdb3cca58bfd52f"
uuid = "39de3d68-74b9-583c-8d2d-e117c070f3a9"
version = "0.4.7"
[[deps.Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[deps.BitFlags]]
git-tree-sha1 = "0691e34b3bb8be9307330f88d1a3c3f25466c24d"
uuid = "d1d4a3ce-64b1-5f1a-9ba4-7e7e69966f35"
version = "0.1.9"
[[deps.BitTwiddlingConvenienceFunctions]]
deps = ["Static"]
git-tree-sha1 = "f21cfd4950cb9f0587d5067e69405ad2acd27b87"
uuid = "62783981-4cbd-42fc-bca8-16325de8dc4b"
version = "0.1.6"
[[deps.Blosc_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Lz4_jll", "Zlib_jll", "Zstd_jll"]
git-tree-sha1 = "19b98ee7e3db3b4eff74c5c9c72bf32144e24f10"
uuid = "0b7ba130-8d10-5ba8-a3d6-c5182647fed9"
version = "1.21.5+0"
[[deps.Bzip2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "9e2a6b69137e6969bab0152632dcb3bc108c8bdd"
uuid = "6e34b625-4abd-537c-b88f-471c36dfa7a0"
version = "1.0.8+1"
[[deps.CEnum]]
git-tree-sha1 = "389ad5c84de1ae7cf0e28e381131c98ea87d54fc"
uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82"
version = "0.5.0"
[[deps.CFTime]]
deps = ["Dates", "Printf"]
git-tree-sha1 = "5afb5c5ba2688ca43a9ad2e5a91cbb93921ccfa1"
uuid = "179af706-886a-5703-950a-314cd64e0468"
version = "0.1.3"
[[deps.CPUSummary]]
deps = ["CpuId", "IfElse", "PrecompileTools", "Static"]
git-tree-sha1 = "5a97e67919535d6841172016c9530fd69494e5ec"
uuid = "2a0fbf3d-bb9c-48f3-b0a9-814d99fd7ab9"
version = "0.2.6"
[[deps.CRC32c]]
uuid = "8bf52ea8-c179-5cab-976a-9e18b702a9bc"
[[deps.CRlibm_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "e329286945d0cfc04456972ea732551869af1cfc"
uuid = "4e9b3aee-d8a1-5a3d-ad8b-7d824db253f0"
version = "1.0.1+0"
[[deps.CSV]]
deps = ["CodecZlib", "Dates", "FilePathsBase", "InlineStrings", "Mmap", "Parsers", "PooledArrays", "PrecompileTools", "SentinelArrays", "Tables", "Unicode", "WeakRefStrings", "WorkerUtilities"]
git-tree-sha1 = "6c834533dc1fabd820c1db03c839bf97e45a3fab"
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
version = "0.10.14"
[[deps.Cairo]]
deps = ["Cairo_jll", "Colors", "Glib_jll", "Graphics", "Libdl", "Pango_jll"]
git-tree-sha1 = "7b6ad8c35f4bc3bca8eb78127c8b99719506a5fb"
uuid = "159f3aea-2a34-519c-b102-8c37f9878175"
version = "1.1.0"
[[deps.CairoMakie]]
deps = ["CRC32c", "Cairo", "Cairo_jll", "Colors", "FileIO", "FreeType", "GeometryBasics", "LinearAlgebra", "Makie", "PrecompileTools"]
git-tree-sha1 = "361dec06290d76b6d70d0c7dc888038eec9df63a"
uuid = "13f3f980-e62b-5c42-98c6-ff1f3baf88f0"
version = "0.12.9"
[[deps.Cairo_jll]]
deps = ["Artifacts", "Bzip2_jll", "CompilerSupportLibraries_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "JLLWrappers", "LZO_jll", "Libdl", "Pixman_jll", "Xorg_libXext_jll", "Xorg_libXrender_jll", "Zlib_jll", "libpng_jll"]
git-tree-sha1 = "a2f1c8c668c8e3cb4cca4e57a8efdb09067bb3fd"
uuid = "83423d85-b0ee-5818-9007-b63ccbeb887a"
version = "1.18.0+2"
[[deps.CatViews]]
deps = ["Random", "Test"]
git-tree-sha1 = "23d1f1e10d4e24374112fcf800ac981d14a54b24"
uuid = "81a5f4ea-a946-549a-aa7e-2a7f63a27d31"
version = "1.0.0"
[[deps.ChainRulesCore]]
deps = ["Compat", "LinearAlgebra"]
git-tree-sha1 = "71acdbf594aab5bbb2cec89b208c41b4c411e49f"
uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
version = "1.24.0"
weakdeps = ["SparseArrays"]
[deps.ChainRulesCore.extensions]
ChainRulesCoreSparseArraysExt = "SparseArrays"
[[deps.Climatology]]
deps = ["CSV", "DataDeps", "DataFrames", "DataStructures", "Dataverse", "Dates", "Distributed", "Glob", "JLD2", "MeshArrays", "Pkg", "Printf", "RollingFunctions", "Scratch", "SharedArrays", "Statistics", "TOML", "URIs"]
git-tree-sha1 = "371a6c8522f3822b4c0042cf9529f472e4893a4f"
uuid = "9e9a4d37-2d2e-41e3-8b85-f7978328d9c7"
version = "0.5.9"
[deps.Climatology.extensions]
ClimatologyMITgcmExt = ["MITgcm"]
ClimatologyMakieExt = ["Makie"]
ClimatologyNCDatasetsExt = ["NCDatasets"]
[deps.Climatology.weakdeps]
MITgcm = "dce5fa8e-68ce-4431-a242-9469c69627a0"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
NCDatasets = "85f8d34a-cbdd-5861-8df4-14fed0d494ab"
[[deps.CloseOpenIntervals]]
deps = ["Static", "StaticArrayInterface"]
git-tree-sha1 = "05ba0d07cd4fd8b7a39541e31a7b0254704ea581"
uuid = "fb6a15b2-703c-40df-9091-08a04967cfa9"
version = "0.1.13"
[[deps.CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "bce6804e5e6044c6daab27bb533d1295e4a2e759"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.6"
[[deps.ColorBrewer]]
deps = ["Colors", "JSON", "Test"]
git-tree-sha1 = "61c5334f33d91e570e1d0c3eb5465835242582c4"
uuid = "a2cac450-b92f-5266-8821-25eda20663c8"
version = "0.4.0"
[[deps.ColorSchemes]]
deps = ["ColorTypes", "ColorVectorSpace", "Colors", "FixedPointNumbers", "PrecompileTools", "Random"]
git-tree-sha1 = "b5278586822443594ff615963b0c09755771b3e0"
uuid = "35d6a980-a343-548e-a6ea-1d62b119f2f4"
version = "3.26.0"
[[deps.ColorTypes]]
deps = ["FixedPointNumbers", "Random"]
git-tree-sha1 = "b10d0b65641d57b8b4d5e234446582de5047050d"
uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f"
version = "0.11.5"
[[deps.ColorVectorSpace]]
deps = ["ColorTypes", "FixedPointNumbers", "LinearAlgebra", "Requires", "Statistics", "TensorCore"]
git-tree-sha1 = "a1f44953f2382ebb937d60dafbe2deea4bd23249"
uuid = "c3611d14-8923-5661-9e6a-0046d554d3a4"
version = "0.10.0"
weakdeps = ["SpecialFunctions"]
[deps.ColorVectorSpace.extensions]
SpecialFunctionsExt = "SpecialFunctions"
[[deps.Colors]]
deps = ["ColorTypes", "FixedPointNumbers", "Reexport"]
git-tree-sha1 = "362a287c3aa50601b0bc359053d5c2468f0e7ce0"
uuid = "5ae59095-9a9b-59fe-a467-6f913c188581"
version = "0.12.11"
[[deps.CommonDataModel]]
deps = ["CFTime", "DataStructures", "Dates", "Preferences", "Printf", "Statistics"]
git-tree-sha1 = "d6fb5bf939a2753c74984b11434ea25d6c397a58"
uuid = "1fbeeb36-5f17-413c-809b-666fb144f157"
version = "0.3.6"
[[deps.CommonWorldInvalidations]]
git-tree-sha1 = "ae52d1c52048455e85a387fbee9be553ec2b68d0"
uuid = "f70d9fcc-98c5-4d4a-abd7-e4cdeebd8ca8"
version = "1.0.0"
[[deps.Compat]]
deps = ["TOML", "UUIDs"]
git-tree-sha1 = "8ae8d32e09f0dcf42a36b90d4e17f5dd2e4c4215"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "4.16.0"
weakdeps = ["Dates", "LinearAlgebra"]
[deps.Compat.extensions]
CompatLinearAlgebraExt = "LinearAlgebra"
[[deps.CompilerSupportLibraries_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
version = "1.1.1+0"
[[deps.ConcurrentUtilities]]
deps = ["Serialization", "Sockets"]
git-tree-sha1 = "ea32b83ca4fefa1768dc84e504cc0a94fb1ab8d1"
uuid = "f0e56b4a-5159-44fe-b623-3e5288b988bb"
version = "2.4.2"
[[deps.ConstructionBase]]
git-tree-sha1 = "76219f1ed5771adbb096743bff43fb5fdd4c1157"
uuid = "187b0558-2788-49d3-abe0-74a17ed4e7c9"
version = "1.5.8"
weakdeps = ["IntervalSets", "LinearAlgebra", "StaticArrays"]
[deps.ConstructionBase.extensions]
ConstructionBaseIntervalSetsExt = "IntervalSets"
ConstructionBaseLinearAlgebraExt = "LinearAlgebra"
ConstructionBaseStaticArraysExt = "StaticArrays"
[[deps.Contour]]
git-tree-sha1 = "439e35b0b36e2e5881738abc8857bd92ad6ff9a8"
uuid = "d38c429a-6771-53c6-b99e-75d170b6e991"
version = "0.6.3"
[[deps.CpuId]]
deps = ["Markdown"]
git-tree-sha1 = "fcbb72b032692610bfbdb15018ac16a36cf2e406"
uuid = "adafc99b-e345-5852-983c-f28acb93d879"
version = "0.3.1"
[[deps.Crayons]]
git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15"
uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
version = "4.1.1"
[[deps.DataAPI]]
git-tree-sha1 = "abe83f3a2f1b857aac70ef8b269080af17764bbe"
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
version = "1.16.0"
[[deps.DataDeps]]
deps = ["HTTP", "Libdl", "Reexport", "SHA", "Scratch", "p7zip_jll"]
git-tree-sha1 = "8ae085b71c462c2cb1cfedcb10c3c877ec6cf03f"
uuid = "124859b0-ceae-595e-8997-d05f6a7a8dfe"
version = "0.7.13"
[[deps.DataFrames]]
deps = ["Compat", "DataAPI", "DataStructures", "Future", "InlineStrings", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrecompileTools", "PrettyTables", "Printf", "REPL", "Random", "Reexport", "SentinelArrays", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
git-tree-sha1 = "04c738083f29f86e62c8afc341f0967d8717bdb8"
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
version = "1.6.1"
[[deps.DataStructures]]
deps = ["Compat", "InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "1d0a14036acb104d9e89698bd408f63ab58cdc82"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.18.20"
[[deps.DataValueInterfaces]]
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
version = "1.0.0"
[[deps.Dataverse]]
deps = ["CSV", "CodecZlib", "DataFrames", "Downloads", "HTTP", "JSON", "Tar", "ZipFile"]
git-tree-sha1 = "9c24c5cf1552251d4bb44a76f9437b2d84e86302"
uuid = "9c0b9be8-e31e-490f-90fe-77697562404d"
version = "0.2.5"
[deps.Dataverse.extensions]
DataverseCondaExt = ["Conda"]
DataversePyCallExt = ["PyCall"]
[deps.Dataverse.weakdeps]
Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d"
PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0"
[[deps.Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[deps.DelaunayTriangulation]]
deps = ["AdaptivePredicates", "EnumX", "ExactPredicates", "Random"]
git-tree-sha1 = "9903123ab7fc5e55053292aff04ff5d7aff92633"
uuid = "927a84f5-c5f4-47a5-9785-b46e178433df"
version = "1.3.0"
[[deps.DiskArrays]]
deps = ["LRUCache", "OffsetArrays"]
git-tree-sha1 = "ef25c513cad08d7ebbed158c91768ae32f308336"
uuid = "3c3547ce-8d99-4f5e-a174-61eb10b00ae3"
version = "0.3.23"
[[deps.Distances]]
deps = ["LinearAlgebra", "Statistics", "StatsAPI"]
git-tree-sha1 = "66c4c81f259586e8f002eacebc177e1fb06363b0"
uuid = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
version = "0.10.11"
weakdeps = ["ChainRulesCore", "SparseArrays"]
[deps.Distances.extensions]
DistancesChainRulesCoreExt = "ChainRulesCore"
DistancesSparseArraysExt = "SparseArrays"
[[deps.Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[deps.Distributions]]
deps = ["AliasTables", "FillArrays", "LinearAlgebra", "PDMats", "Printf", "QuadGK", "Random", "SpecialFunctions", "Statistics", "StatsAPI", "StatsBase", "StatsFuns"]
git-tree-sha1 = "e6c693a0e4394f8fda0e51a5bdf5aef26f8235e9"
uuid = "31c24e10-a181-5473-b8eb-7969acd0382f"
version = "0.25.111"
[deps.Distributions.extensions]
DistributionsChainRulesCoreExt = "ChainRulesCore"
DistributionsDensityInterfaceExt = "DensityInterface"
DistributionsTestExt = "Test"
[deps.Distributions.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
DensityInterface = "b429d917-457f-4dbc-8f4c-0cc954292b1d"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[deps.DocStringExtensions]]
deps = ["LibGit2"]
git-tree-sha1 = "2fb1e02f2b635d0845df5d7c167fec4dd739b00d"
uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
version = "0.9.3"
[[deps.Downloads]]
deps = ["ArgTools", "FileWatching", "LibCURL", "NetworkOptions"]
uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
version = "1.6.0"
[[deps.EarCut_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "e3290f2d49e661fbd94046d7e3726ffcb2d41053"
uuid = "5ae413db-bbd1-5e63-b57d-d24a61df00f5"
version = "2.2.4+0"
[[deps.EnumX]]
git-tree-sha1 = "bdb1942cd4c45e3c678fd11569d5cccd80976237"
uuid = "4e289a0a-7415-4d19-859d-a7e5c4648b56"
version = "1.0.4"
[[deps.ExactPredicates]]
deps = ["IntervalArithmetic", "Random", "StaticArrays"]
git-tree-sha1 = "b3f2ff58735b5f024c392fde763f29b057e4b025"
uuid = "429591f6-91af-11e9-00e2-59fbe8cec110"
version = "2.2.8"
[[deps.ExceptionUnwrapping]]
deps = ["Test"]
git-tree-sha1 = "dcb08a0d93ec0b1cdc4af184b26b591e9695423a"
uuid = "460bff9d-24e4-43bc-9d9f-a8973cb893f4"
version = "0.1.10"
[[deps.Expat_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1c6317308b9dc757616f0b5cb379db10494443a7"
uuid = "2e619515-83b5-522b-bb60-26c02a35a201"
version = "2.6.2+0"
[[deps.Extents]]
git-tree-sha1 = "81023caa0021a41712685887db1fc03db26f41f5"
uuid = "411431e0-e8b7-467b-b5e0-f676ba4f2910"
version = "0.1.4"
[[deps.FFMPEG_jll]]
deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "JLLWrappers", "LAME_jll", "Libdl", "Ogg_jll", "OpenSSL_jll", "Opus_jll", "PCRE2_jll", "Zlib_jll", "libaom_jll", "libass_jll", "libfdk_aac_jll", "libvorbis_jll", "x264_jll", "x265_jll"]
git-tree-sha1 = "8cc47f299902e13f90405ddb5bf87e5d474c0d38"
uuid = "b22a6f82-2f65-5046-a5b2-351ab43fb4e5"
version = "6.1.2+0"
[[deps.FFTW]]
deps = ["AbstractFFTs", "FFTW_jll", "LinearAlgebra", "MKL_jll", "Preferences", "Reexport"]
git-tree-sha1 = "4820348781ae578893311153d69049a93d05f39d"
uuid = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341"
version = "1.8.0"
[[deps.FFTW_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "c6033cc3892d0ef5bb9cd29b7f2f0331ea5184ea"
uuid = "f5851436-0d7a-5f13-b9de-f02708fd171a"
version = "3.3.10+0"
[[deps.FastBroadcast]]
deps = ["ArrayInterface", "LinearAlgebra", "Polyester", "Static", "StaticArrayInterface", "StrideArraysCore"]
git-tree-sha1 = "ab1b34570bcdf272899062e1a56285a53ecaae08"
uuid = "7034ab61-46d4-4ed7-9d0f-46aef9175898"
version = "0.3.5"
[[deps.FileIO]]
deps = ["Pkg", "Requires", "UUIDs"]
git-tree-sha1 = "82d8afa92ecf4b52d78d869f038ebfb881267322"
uuid = "5789e2e9-d7fb-5bc7-8068-2c6fae9b9549"
version = "1.16.3"
[[deps.FilePaths]]
deps = ["FilePathsBase", "MacroTools", "Reexport", "Requires"]
git-tree-sha1 = "919d9412dbf53a2e6fe74af62a73ceed0bce0629"
uuid = "8fc22ac5-c921-52a6-82fd-178b2807b824"
version = "0.8.3"
[[deps.FilePathsBase]]
deps = ["Compat", "Dates"]
git-tree-sha1 = "7878ff7172a8e6beedd1dea14bd27c3c6340d361"
uuid = "48062228-2e41-5def-b9a4-89aafe57970f"
version = "0.9.22"
weakdeps = ["Mmap", "Test"]
[deps.FilePathsBase.extensions]
FilePathsBaseMmapExt = "Mmap"
FilePathsBaseTestExt = "Test"
[[deps.FileWatching]]
uuid = "7b1f6079-737a-58dc-b8bc-7a2ca5c1b5ee"
[[deps.FillArrays]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "6a70198746448456524cb442b8af316927ff3e1a"
uuid = "1a297f60-69ca-5386-bcde-b61e274b549b"
version = "1.13.0"
weakdeps = ["PDMats", "SparseArrays", "Statistics"]
[deps.FillArrays.extensions]
FillArraysPDMatsExt = "PDMats"
FillArraysSparseArraysExt = "SparseArrays"
FillArraysStatisticsExt = "Statistics"
[[deps.FixedPointNumbers]]
deps = ["Statistics"]
git-tree-sha1 = "05882d6995ae5c12bb5f36dd2ed3f61c98cbb172"
uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93"
version = "0.8.5"
[[deps.Fontconfig_jll]]
deps = ["Artifacts", "Bzip2_jll", "Expat_jll", "FreeType2_jll", "JLLWrappers", "Libdl", "Libuuid_jll", "Zlib_jll"]
git-tree-sha1 = "db16beca600632c95fc8aca29890d83788dd8b23"
uuid = "a3f928ae-7b40-5064-980b-68af3947d34b"
version = "2.13.96+0"
[[deps.Format]]
git-tree-sha1 = "9c68794ef81b08086aeb32eeaf33531668d5f5fc"
uuid = "1fa38f19-a742-5d3f-a2b9-30dd87b9d5f8"
version = "1.3.7"
[[deps.FreeType]]
deps = ["CEnum", "FreeType2_jll"]
git-tree-sha1 = "907369da0f8e80728ab49c1c7e09327bf0d6d999"
uuid = "b38be410-82b0-50bf-ab77-7b57e271db43"
version = "4.1.1"
[[deps.FreeType2_jll]]
deps = ["Artifacts", "Bzip2_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "5c1d8ae0efc6c2e7b1fc502cbe25def8f661b7bc"
uuid = "d7e528f0-a631-5988-bf34-fe36492bcfd7"
version = "2.13.2+0"
[[deps.FreeTypeAbstraction]]
deps = ["ColorVectorSpace", "Colors", "FreeType", "GeometryBasics"]
git-tree-sha1 = "2493cdfd0740015955a8e46de4ef28f49460d8bc"
uuid = "663a7486-cb36-511b-a19d-713bb74d65c9"
version = "0.10.3"
[[deps.FriBidi_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1ed150b39aebcc805c26b93a8d0122c940f64ce2"
uuid = "559328eb-81f9-559d-9380-de523a88c83c"
version = "1.0.14+0"
[[deps.Future]]
deps = ["Random"]
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
[[deps.GDAL]]
deps = ["CEnum", "GDAL_jll", "NetworkOptions", "PROJ_jll"]
git-tree-sha1 = "50bfa3f63b47ed1873c35f7fea19893a288f6785"
uuid = "add2ef01-049f-52c4-9ee2-e494f65e021a"
version = "1.7.1"
[[deps.GDAL_jll]]
deps = ["Arrow_jll", "Artifacts", "Expat_jll", "GEOS_jll", "HDF5_jll", "JLLWrappers", "LibCURL_jll", "LibPQ_jll", "Libdl", "Libtiff_jll", "NetCDF_jll", "OpenJpeg_jll", "PROJ_jll", "SQLite_jll", "Zlib_jll", "Zstd_jll", "libgeotiff_jll"]
git-tree-sha1 = "caa356adadf2b2d980414d494fb957d8f3521552"
uuid = "a7073274-a066-55f0-b90d-d619367d196c"
version = "301.800.400+0"
[[deps.GEOS_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "f561356a09a8f650b622f6697a30f2ad76fc29ce"
uuid = "d604d12d-fa86-5845-992e-78dc15976526"
version = "3.13.0+0"
[[deps.GMP_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "781609d7-10c4-51f6-84f2-b8444358ff6d"
version = "6.2.1+6"
[[deps.GeoFormatTypes]]
git-tree-sha1 = "59107c179a586f0fe667024c5eb7033e81333271"
uuid = "68eda718-8dee-11e9-39e7-89f7f65f511f"
version = "0.4.2"
[[deps.GeoInterface]]
deps = ["Extents", "GeoFormatTypes"]
git-tree-sha1 = "5921fc0704e40c024571eca551800c699f86ceb4"
uuid = "cf35fbd7-0cd7-5166-be24-54bfbe79505f"
version = "1.3.6"
[[deps.GeoInterfaceMakie]]
deps = ["GeoInterface", "GeometryBasics", "MakieCore"]
git-tree-sha1 = "3f87fd8414194dd25ea5d0371c3950985e3c8d86"
uuid = "0edc0954-3250-4c18-859d-ec71c1660c08"
version = "0.1.8"
[[deps.GeoInterfaceRecipes]]
deps = ["GeoInterface", "RecipesBase"]
git-tree-sha1 = "fb1156076f24f1dfee45b3feadb31d05730a49ac"
uuid = "0329782f-3d07-4b52-b9f6-d3137cf03c7a"
version = "1.0.2"
[[deps.GeometryBasics]]
deps = ["EarCut_jll", "Extents", "GeoInterface", "IterTools", "LinearAlgebra", "StaticArrays", "StructArrays", "Tables"]
git-tree-sha1 = "b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134"
uuid = "5c1252a2-5f33-56bf-86c9-59e7332b4326"
version = "0.4.11"
[[deps.Gettext_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Libiconv_jll", "Pkg", "XML2_jll"]
git-tree-sha1 = "9b02998aba7bf074d14de89f9d37ca24a1a0b046"
uuid = "78b55507-aeef-58d4-861c-77aaff3498b1"
version = "0.21.0+0"
[[deps.Glib_jll]]
deps = ["Artifacts", "Gettext_jll", "JLLWrappers", "Libdl", "Libffi_jll", "Libiconv_jll", "Libmount_jll", "PCRE2_jll", "Zlib_jll"]
git-tree-sha1 = "7c82e6a6cd34e9d935e9aa4051b66c6ff3af59ba"
uuid = "7746bdde-850d-59dc-9ae8-88ece973131d"
version = "2.80.2+0"
[[deps.Glob]]
git-tree-sha1 = "97285bbd5230dd766e9ef6749b80fc617126d496"
uuid = "c27321d9-0574-5035-807b-f59d2c89b15c"
version = "1.3.1"
[[deps.GnuTLS_jll]]
deps = ["Artifacts", "GMP_jll", "JLLWrappers", "Libdl", "Nettle_jll", "P11Kit_jll", "Zlib_jll"]
git-tree-sha1 = "383db7d3f900f4c1f47a8a04115b053c095e48d3"
uuid = "0951126a-58fd-58f1-b5b3-b08c7c4a876d"
version = "3.8.4+0"
[[deps.Graphics]]
deps = ["Colors", "LinearAlgebra", "NaNMath"]
git-tree-sha1 = "d61890399bc535850c4bf08e4e0d3a7ad0f21cbd"
uuid = "a2bd30eb-e257-5431-a919-1863eab51364"
version = "1.1.2"
[[deps.Graphite2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "344bf40dcab1073aca04aa0df4fb092f920e4011"
uuid = "3b182d85-2403-5c21-9c21-1e1f0cc25472"
version = "1.3.14+0"
[[deps.GridLayoutBase]]
deps = ["GeometryBasics", "InteractiveUtils", "Observables"]
git-tree-sha1 = "fc713f007cff99ff9e50accba6373624ddd33588"
uuid = "3955a311-db13-416c-9275-1d80ed98e5e9"
version = "0.11.0"
[[deps.Grisu]]
git-tree-sha1 = "53bb909d1151e57e2484c3d1b53e19552b887fb2"
uuid = "42e2da0e-8278-4e71-bc24-59509adca0fe"
version = "1.0.2"
[[deps.HDF5_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LLVMOpenMP_jll", "LazyArtifacts", "LibCURL_jll", "Libdl", "MPICH_jll", "MPIPreferences", "MPItrampoline_jll", "MicrosoftMPI_jll", "OpenMPI_jll", "OpenSSL_jll", "TOML", "Zlib_jll", "libaec_jll"]
git-tree-sha1 = "38c8874692d48d5440d5752d6c74b0c6b0b60739"
uuid = "0234f1f7-429e-5d53-9886-15a909be8d59"
version = "1.14.2+1"
[[deps.HTTP]]
deps = ["Base64", "CodecZlib", "ConcurrentUtilities", "Dates", "ExceptionUnwrapping", "Logging", "LoggingExtras", "MbedTLS", "NetworkOptions", "OpenSSL", "Random", "SimpleBufferStream", "Sockets", "URIs", "UUIDs"]
git-tree-sha1 = "d1d712be3164d61d1fb98e7ce9bcbc6cc06b45ed"
uuid = "cd3eb016-35fb-5094-929b-558a96fad6f3"
version = "1.10.8"
[[deps.HarfBuzz_jll]]
deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "Glib_jll", "Graphite2_jll", "JLLWrappers", "Libdl", "Libffi_jll"]
git-tree-sha1 = "401e4f3f30f43af2c8478fc008da50096ea5240f"
uuid = "2e76f6c2-a576-52d4-95c1-20adfe4de566"
version = "8.3.1+0"
[[deps.HostCPUFeatures]]
deps = ["BitTwiddlingConvenienceFunctions", "IfElse", "Libdl", "Static"]
git-tree-sha1 = "8e070b599339d622e9a081d17230d74a5c473293"
uuid = "3e5b6fbb-0976-4d2c-9146-d79de83f2fb0"
version = "0.1.17"
[[deps.Hwloc_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "5e19e1e4fa3e71b774ce746274364aef0234634e"
uuid = "e33a78d0-f292-5ffc-b300-72abe9b543c8"
version = "2.11.1+0"
[[deps.HypergeometricFunctions]]
deps = ["LinearAlgebra", "OpenLibm_jll", "SpecialFunctions"]
git-tree-sha1 = "7c4195be1649ae622304031ed46a2f4df989f1eb"
uuid = "34004b35-14d8-5ef3-9330-4cdb6864b03a"
version = "0.3.24"
[[deps.Hyperscript]]
deps = ["Test"]
git-tree-sha1 = "179267cfa5e712760cd43dcae385d7ea90cc25a4"
uuid = "47d2ed2b-36de-50cf-bf87-49c2cf4b8b91"
version = "0.0.5"
[[deps.HypertextLiteral]]
deps = ["Tricks"]
git-tree-sha1 = "7134810b1afce04bbc1045ca1985fbe81ce17653"
uuid = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2"
version = "0.9.5"
[[deps.ICU_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "20b6765a3016e1fca0c9c93c80d50061b94218b7"
uuid = "a51ab1cf-af8e-5615-a023-bc2c838bba6b"
version = "69.1.0+0"
[[deps.IOCapture]]
deps = ["Logging", "Random"]
git-tree-sha1 = "b6d6bfdd7ce25b0f9b2f6b3dd56b2673a66c8770"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.2.5"
[[deps.IfElse]]
git-tree-sha1 = "debdd00ffef04665ccbb3e150747a77560e8fad1"
uuid = "615f187c-cbe4-4ef1-ba3b-2fcf58d6d173"
version = "0.1.1"
[[deps.ImageAxes]]
deps = ["AxisArrays", "ImageBase", "ImageCore", "Reexport", "SimpleTraits"]
git-tree-sha1 = "2e4520d67b0cef90865b3ef727594d2a58e0e1f8"
uuid = "2803e5a7-5153-5ecf-9a86-9b4c37f5f5ac"
version = "0.6.11"
[[deps.ImageBase]]
deps = ["ImageCore", "Reexport"]
git-tree-sha1 = "eb49b82c172811fd2c86759fa0553a2221feb909"
uuid = "c817782e-172a-44cc-b673-b171935fbb9e"
version = "0.1.7"
[[deps.ImageCore]]
deps = ["ColorVectorSpace", "Colors", "FixedPointNumbers", "MappedArrays", "MosaicViews", "OffsetArrays", "PaddedViews", "PrecompileTools", "Reexport"]
git-tree-sha1 = "b2a7eaa169c13f5bcae8131a83bc30eff8f71be0"
uuid = "a09fc81d-aa75-5fe9-8630-4744c3626534"
version = "0.10.2"
[[deps.ImageIO]]
deps = ["FileIO", "IndirectArrays", "JpegTurbo", "LazyModules", "Netpbm", "OpenEXR", "PNGFiles", "QOI", "Sixel", "TiffImages", "UUIDs"]
git-tree-sha1 = "437abb322a41d527c197fa800455f79d414f0a3c"
uuid = "82e4d734-157c-48bb-816b-45c225c6df19"
version = "0.6.8"
[[deps.ImageMetadata]]
deps = ["AxisArrays", "ImageAxes", "ImageBase", "ImageCore"]
git-tree-sha1 = "355e2b974f2e3212a75dfb60519de21361ad3cb7"
uuid = "bc367c6b-8a6b-528e-b4bd-a4b897500b49"
version = "0.9.9"
[[deps.Imath_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "0936ba688c6d201805a83da835b55c61a180db52"
uuid = "905a6f67-0a94-5f89-b386-d35d92009cd1"
version = "3.1.11+0"
[[deps.IndirectArrays]]
git-tree-sha1 = "012e604e1c7458645cb8b436f8fba789a51b257f"
uuid = "9b13fd28-a010-5f03-acff-a1bbcff69959"
version = "1.0.0"
[[deps.Inflate]]
git-tree-sha1 = "d1b1b796e47d94588b3757fe84fbf65a5ec4a80d"
uuid = "d25df0c9-e2be-5dd7-82c8-3ad0b3e990b9"
version = "0.1.5"
[[deps.InlineStrings]]
git-tree-sha1 = "45521d31238e87ee9f9732561bfee12d4eebd52d"
uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48"
version = "1.4.2"
[deps.InlineStrings.extensions]
ArrowTypesExt = "ArrowTypes"
ParsersExt = "Parsers"
[deps.InlineStrings.weakdeps]
ArrowTypes = "31f734f8-188a-4ce0-8406-c8a06bd891cd"
Parsers = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
[[deps.IntelOpenMP_jll]]
deps = ["Artifacts", "JLLWrappers", "LazyArtifacts", "Libdl"]
git-tree-sha1 = "10bd689145d2c3b2a9844005d01087cc1194e79e"
uuid = "1d5cc7b8-4909-519e-a0f8-d0f5ad9712d0"
version = "2024.2.1+0"
[[deps.InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[deps.Interpolations]]
deps = ["Adapt", "AxisAlgorithms", "ChainRulesCore", "LinearAlgebra", "OffsetArrays", "Random", "Ratios", "Requires", "SharedArrays", "SparseArrays", "StaticArrays", "WoodburyMatrices"]
git-tree-sha1 = "88a101217d7cb38a7b481ccd50d21876e1d1b0e0"
uuid = "a98d9a8b-a2ab-59e6-89dd-64a1c18fca59"
version = "0.15.1"
weakdeps = ["Unitful"]
[deps.Interpolations.extensions]
InterpolationsUnitfulExt = "Unitful"
[[deps.IntervalArithmetic]]
deps = ["CRlibm_jll", "MacroTools", "RoundingEmulator"]
git-tree-sha1 = "fe30dec78e68f27fc416901629c6e24e9d5f057b"
uuid = "d1acc4aa-44c8-5952-acd4-ba5d80a2a253"
version = "0.22.16"
[deps.IntervalArithmetic.extensions]
IntervalArithmeticDiffRulesExt = "DiffRules"
IntervalArithmeticForwardDiffExt = "ForwardDiff"
IntervalArithmeticIntervalSetsExt = "IntervalSets"
IntervalArithmeticLinearAlgebraExt = "LinearAlgebra"
IntervalArithmeticRecipesBaseExt = "RecipesBase"
[deps.IntervalArithmetic.weakdeps]
DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
IntervalSets = "8197267c-284f-5f27-9208-e0e47529a953"
LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
RecipesBase = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
[[deps.IntervalSets]]
git-tree-sha1 = "dba9ddf07f77f60450fe5d2e2beb9854d9a49bd0"
uuid = "8197267c-284f-5f27-9208-e0e47529a953"
version = "0.7.10"
weakdeps = ["Random", "RecipesBase", "Statistics"]
[deps.IntervalSets.extensions]
IntervalSetsRandomExt = "Random"
IntervalSetsRecipesBaseExt = "RecipesBase"
IntervalSetsStatisticsExt = "Statistics"
[[deps.InvertedIndices]]
git-tree-sha1 = "0dc7b50b8d436461be01300fd8cd45aa0274b038"
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
version = "1.3.0"
[[deps.IrrationalConstants]]
git-tree-sha1 = "630b497eafcc20001bba38a4651b327dcfc491d2"
uuid = "92d709cd-6900-40b7-9082-c6be49f344b6"
version = "0.2.2"
[[deps.Isoband]]
deps = ["isoband_jll"]
git-tree-sha1 = "f9b6d97355599074dc867318950adaa6f9946137"
uuid = "f1662d9f-8043-43de-a69a-05efc1cc6ff4"
version = "0.1.1"
[[deps.IterTools]]
git-tree-sha1 = "42d5f897009e7ff2cf88db414a389e5ed1bdd023"
uuid = "c8e1da08-722c-5040-9ed9-7db0dc04731e"
version = "1.10.0"
[[deps.IteratorInterfaceExtensions]]
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
uuid = "82899510-4779-5014-852e-03e436cf321d"
version = "1.0.0"
[[deps.JLD2]]
deps = ["FileIO", "MacroTools", "Mmap", "OrderedCollections", "PrecompileTools", "Requires", "TranscodingStreams"]
git-tree-sha1 = "a0746c21bdc986d0dc293efa6b1faee112c37c28"
uuid = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
version = "0.4.53"
[[deps.JLLWrappers]]
deps = ["Artifacts", "Preferences"]
git-tree-sha1 = "f389674c99bfcde17dc57454011aa44d5a260a40"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.6.0"
[[deps.JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "31e996f0a15c7b280ba9f76636b3ff9e2ae58c9a"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.4"
[[deps.JpegTurbo]]
deps = ["CEnum", "FileIO", "ImageCore", "JpegTurbo_jll", "TOML"]
git-tree-sha1 = "fa6d0bcff8583bac20f1ffa708c3913ca605c611"
uuid = "b835a17e-a41a-41e7-81f0-2f016b05efe0"
version = "0.1.5"
[[deps.JpegTurbo_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "c84a835e1a09b289ffcd2271bf2a337bbdda6637"
uuid = "aacddb02-875f-59d6-b918-886e6ef4fbf8"
version = "3.0.3+0"
[[deps.KahanSummation]]
git-tree-sha1 = "6292e7878fe190651e74148edb11356dbbc2e194"
uuid = "8e2b3108-d4c1-50be-a7a2-16352aec75c3"
version = "0.3.1"
[[deps.Kerberos_krb5_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "60274b4ab38e8d1248216fe6b6ace75ae09b0502"
uuid = "b39eb1a6-c29a-53d7-8c32-632cd16f18da"
version = "1.19.3+0"
[[deps.KernelDensity]]
deps = ["Distributions", "DocStringExtensions", "FFTW", "Interpolations", "StatsBase"]
git-tree-sha1 = "7d703202e65efa1369de1279c162b915e245eed1"
uuid = "5ab0869b-81aa-558d-bb23-cbf5423bbe9b"
version = "0.6.9"
[[deps.LAME_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "170b660facf5df5de098d866564877e119141cbd"
uuid = "c1c5ebd0-6772-5130-a774-d5fcae4a789d"
version = "3.100.2+0"
[[deps.LERC_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "bf36f528eec6634efc60d7ec062008f171071434"
uuid = "88015f11-f218-50d7-93a8-a6af411a945d"
version = "3.0.0+1"
[[deps.LLVMOpenMP_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e16271d212accd09d52ee0ae98956b8a05c4b626"
uuid = "1d63c593-3942-5779-bab2-d838dc0a180e"
version = "17.0.6+0"
[[deps.LRUCache]]
git-tree-sha1 = "b3cc6698599b10e652832c2f23db3cab99d51b59"
uuid = "8ac3fa9e-de4c-5943-b1dc-09c6b5f20637"
version = "1.6.1"
weakdeps = ["Serialization"]
[deps.LRUCache.extensions]
SerializationExt = ["Serialization"]
[[deps.LZO_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "70c5da094887fd2cae843b8db33920bac4b6f07d"
uuid = "dd4b983a-f0e5-5f8d-a1b7-129d4a5fb1ac"
version = "2.10.2+0"
[[deps.LaTeXStrings]]
git-tree-sha1 = "50901ebc375ed41dbf8058da26f9de442febbbec"
uuid = "b964fa9f-0449-5b57-a5c2-d3ea65f4040f"
version = "1.3.1"
[[deps.LayoutPointers]]
deps = ["ArrayInterface", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface"]
git-tree-sha1 = "a9eaadb366f5493a5654e843864c13d8b107548c"
uuid = "10f19ff3-798f-405d-979b-55457f8fc047"
version = "0.1.17"
[[deps.LazyArtifacts]]
deps = ["Artifacts", "Pkg"]
uuid = "4af54fe1-eca0-43a8-85a7-787d91b784e3"
[[deps.LazyModules]]
git-tree-sha1 = "a560dd966b386ac9ae60bdd3a3d3a326062d3c3e"
uuid = "8cdb02fc-e678-4876-92c5-9defec4f444e"
version = "0.3.1"
[[deps.LibCURL]]
deps = ["LibCURL_jll", "MozillaCACerts_jll"]
uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
version = "0.6.4"
[[deps.LibCURL_jll]]
deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"]
uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
version = "8.4.0+0"
[[deps.LibGit2]]
deps = ["Base64", "LibGit2_jll", "NetworkOptions", "Printf", "SHA"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[deps.LibGit2_jll]]
deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll"]
uuid = "e37daf67-58a4-590a-8e99-b0245dd2ffc5"
version = "1.6.4+0"
[[deps.LibPQ_jll]]
deps = ["Artifacts", "ICU_jll", "JLLWrappers", "Kerberos_krb5_jll", "Libdl", "OpenSSL_jll", "Zstd_jll"]
git-tree-sha1 = "09163f837936c8cc44f4691cb41d805eb1769642"
uuid = "08be9ffa-1c94-5ee5-a977-46a84ec9b350"
version = "16.0.0+0"
[[deps.LibSSH2_jll]]
deps = ["Artifacts", "Libdl", "MbedTLS_jll"]
uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8"
version = "1.11.0+1"
[[deps.Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[deps.Libffi_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "0b4a5d71f3e5200a7dff793393e09dfc2d874290"
uuid = "e9f186c6-92d2-5b65-8a66-fee21dc1b490"
version = "3.2.2+1"
[[deps.Libgcrypt_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgpg_error_jll"]
git-tree-sha1 = "9fd170c4bbfd8b935fdc5f8b7aa33532c991a673"
uuid = "d4300ac3-e22c-5743-9152-c294e39db1e4"
version = "1.8.11+0"
[[deps.Libgpg_error_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "fbb1f2bef882392312feb1ede3615ddc1e9b99ed"
uuid = "7add5ba3-2f88-524e-9cd5-f83b8a55f7b8"
version = "1.49.0+0"
[[deps.Libiconv_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "f9557a255370125b405568f9767d6d195822a175"
uuid = "94ce4f54-9a6c-5748-9c1c-f9c7231a4531"
version = "1.17.0+0"
[[deps.Libmount_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "0c4f9c4f1a50d8f35048fa0532dabbadf702f81e"
uuid = "4b2f31a3-9ecc-558c-b454-b3730dcb73e9"
version = "2.40.1+0"
[[deps.Libtiff_jll]]
deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "LERC_jll", "Libdl", "XZ_jll", "Zlib_jll", "Zstd_jll"]
git-tree-sha1 = "2da088d113af58221c52828a80378e16be7d037a"
uuid = "89763e89-9b03-5906-acba-b20f662cd828"
version = "4.5.1+1"
[[deps.Libuuid_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "5ee6203157c120d79034c748a2acba45b82b8807"
uuid = "38a345b3-de98-5d2b-a5d3-14cd9215e700"
version = "2.40.1+0"
[[deps.LinearAlgebra]]
deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[deps.LittleCMS_jll]]
deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll"]
git-tree-sha1 = "fa7fd067dca76cadd880f1ca937b4f387975a9f5"
uuid = "d3a379c0-f9a3-5b72-a4c0-6bf4d2e8af0f"
version = "2.16.0+0"
[[deps.LogExpFunctions]]
deps = ["DocStringExtensions", "IrrationalConstants", "LinearAlgebra"]
git-tree-sha1 = "a2d09619db4e765091ee5c6ffe8872849de0feea"
uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688"
version = "0.3.28"
[deps.LogExpFunctions.extensions]
LogExpFunctionsChainRulesCoreExt = "ChainRulesCore"
LogExpFunctionsChangesOfVariablesExt = "ChangesOfVariables"
LogExpFunctionsInverseFunctionsExt = "InverseFunctions"
[deps.LogExpFunctions.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
ChangesOfVariables = "9e997f8a-9a97-42d5-a9f1-ce6bfc15e2c0"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[deps.LoggingExtras]]
deps = ["Dates", "Logging"]
git-tree-sha1 = "c1dd6d7978c12545b4179fb6153b9250c96b0075"
uuid = "e6f89c97-d47a-5376-807f-9c37f3926c36"
version = "1.0.3"
[[deps.LoopVectorization]]
deps = ["ArrayInterface", "CPUSummary", "CloseOpenIntervals", "DocStringExtensions", "HostCPUFeatures", "IfElse", "LayoutPointers", "LinearAlgebra", "OffsetArrays", "PolyesterWeave", "PrecompileTools", "SIMDTypes", "SLEEFPirates", "Static", "StaticArrayInterface", "ThreadingUtilities", "UnPack", "VectorizationBase"]
git-tree-sha1 = "8084c25a250e00ae427a379a5b607e7aed96a2dd"
uuid = "bdcacae8-1622-11e9-2a5c-532679323890"
version = "0.12.171"
[deps.LoopVectorization.extensions]
ForwardDiffExt = ["ChainRulesCore", "ForwardDiff"]
SpecialFunctionsExt = "SpecialFunctions"
[deps.LoopVectorization.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b"
[[deps.Lz4_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "7f26c8fc5229e68484e0b3447312c98e16207d11"
uuid = "5ced341a-0733-55b8-9ab6-a4889d929147"
version = "1.10.0+0"
[[deps.MIMEs]]
git-tree-sha1 = "65f28ad4b594aebe22157d6fac869786a255b7eb"
uuid = "6c6e2e6c-3030-632d-7369-2d6c69616d65"
version = "0.1.4"
[[deps.MKL_jll]]
deps = ["Artifacts", "IntelOpenMP_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "oneTBB_jll"]
git-tree-sha1 = "f046ccd0c6db2832a9f639e2c669c6fe867e5f4f"
uuid = "856f044c-d86e-5d09-b602-aeab76dc8ba7"
version = "2024.2.0+0"
[[deps.MPICH_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "Hwloc_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "MPIPreferences", "TOML"]
git-tree-sha1 = "19d4bd098928a3263693991500d05d74dbdc2004"
uuid = "7cb0a576-ebde-5e09-9194-50597f1243b4"
version = "4.2.2+0"
[[deps.MPIPreferences]]
deps = ["Libdl", "Preferences"]
git-tree-sha1 = "c105fe467859e7f6e9a852cb15cb4301126fac07"
uuid = "3da0fdf6-3ccc-4f1b-acd9-58baa6c99267"
version = "0.1.11"
[[deps.MPItrampoline_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "MPIPreferences", "TOML"]
git-tree-sha1 = "8c35d5420193841b2f367e658540e8d9e0601ed0"
uuid = "f1f71cc9-e9ae-5b93-9b94-4fe0e1ad3748"
version = "5.4.0+0"
[[deps.MacroTools]]
deps = ["Markdown", "Random"]
git-tree-sha1 = "2fa9ee3e63fd3a4f7a9a4f4744a52f4856de82df"
uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09"
version = "0.5.13"
[[deps.Makie]]
deps = ["Animations", "Base64", "CRC32c", "ColorBrewer", "ColorSchemes", "ColorTypes", "Colors", "Contour", "Dates", "DelaunayTriangulation", "Distributions", "DocStringExtensions", "Downloads", "FFMPEG_jll", "FileIO", "FilePaths", "FixedPointNumbers", "Format", "FreeType", "FreeTypeAbstraction", "GeometryBasics", "GridLayoutBase", "ImageIO", "InteractiveUtils", "IntervalSets", "Isoband", "KernelDensity", "LaTeXStrings", "LinearAlgebra", "MacroTools", "MakieCore", "Markdown", "MathTeXEngine", "Observables", "OffsetArrays", "Packing", "PlotUtils", "PolygonOps", "PrecompileTools", "Printf", "REPL", "Random", "RelocatableFolders", "Scratch", "ShaderAbstractions", "Showoff", "SignedDistanceFields", "SparseArrays", "Statistics", "StatsBase", "StatsFuns", "StructArrays", "TriplotBase", "UnicodeFun", "Unitful"]
git-tree-sha1 = "204f06860af9008fa08b3a4842f48116e1209a2c"
uuid = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
version = "0.21.9"
[[deps.MakieCore]]
deps = ["ColorTypes", "GeometryBasics", "IntervalSets", "Observables"]
git-tree-sha1 = "b0e2e3473af351011e598f9219afb521121edd2b"
uuid = "20f20a25-4f0e-4fdf-b5d1-57303727442b"
version = "0.8.6"
[[deps.ManualMemory]]
git-tree-sha1 = "bcaef4fc7a0cfe2cba636d84cda54b5e4e4ca3cd"
uuid = "d125e4d3-2237-4719-b19c-fa641b8a4667"
version = "0.1.8"
[[deps.MappedArrays]]
git-tree-sha1 = "2dab0221fe2b0f2cb6754eaa743cc266339f527e"
uuid = "dbb5928d-eab1-5f90-85c2-b9b0edb7c900"
version = "0.4.2"
[[deps.Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[deps.MathTeXEngine]]
deps = ["AbstractTrees", "Automa", "DataStructures", "FreeTypeAbstraction", "GeometryBasics", "LaTeXStrings", "REPL", "RelocatableFolders", "UnicodeFun"]
git-tree-sha1 = "e1641f32ae592e415e3dbae7f4a188b5316d4b62"
uuid = "0a4f8689-d25c-4efe-a92b-7142dfc1aa53"
version = "0.6.1"
[[deps.MbedTLS]]
deps = ["Dates", "MbedTLS_jll", "MozillaCACerts_jll", "NetworkOptions", "Random", "Sockets"]
git-tree-sha1 = "c067a280ddc25f196b5e7df3877c6b226d390aaf"
uuid = "739be429-bea8-5141-9913-cc70e7f3736d"
version = "1.1.9"
[[deps.MbedTLS_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
version = "2.28.2+1"
[[deps.MeshArrays]]
deps = ["CatViews", "Dates", "LazyArtifacts", "NearestNeighbors", "Pkg", "Printf", "SparseArrays", "Statistics", "Unitful"]
git-tree-sha1 = "c1c5c8afde5293b643aea2d5ccc8554b1e593bf0"
uuid = "cb8c808f-1acf-59a3-9d2b-6e38d009f683"
version = "0.3.11"
[deps.MeshArrays.extensions]
MeshArraysDataDepsExt = ["DataDeps"]
MeshArraysGeoJSONExt = ["GeoJSON"]
MeshArraysJLD2Ext = ["JLD2"]
MeshArraysMakieExt = ["Makie"]
MeshArraysProjExt = ["Proj"]
MeshArraysShapefileExt = ["Shapefile"]
[deps.MeshArrays.weakdeps]
DataDeps = "124859b0-ceae-595e-8997-d05f6a7a8dfe"
GeoJSON = "61d90e0f-e114-555e-ac52-39dfb47a3ef9"
JLD2 = "033835bb-8acc-5ee8-8aae-3f567f8a3819"
Makie = "ee78f7c6-11fb-53f2-987a-cfe4a2b5a57a"
Proj = "c94c279d-25a6-4763-9509-64d165bea63e"
Shapefile = "8e980c4a-a4fe-5da2-b3a7-4b4b0353a2f4"
[[deps.MicrosoftMPI_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "f12a29c4400ba812841c6ace3f4efbb6dbb3ba01"
uuid = "9237b28f-5490-5468-be7b-bb81f5f5e6cf"
version = "10.1.4+2"
[[deps.Missings]]
deps = ["DataAPI"]
git-tree-sha1 = "ec4f7fbeab05d7747bdf98eb74d130a2a2ed298d"
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
version = "1.2.0"
[[deps.Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[deps.MosaicViews]]
deps = ["MappedArrays", "OffsetArrays", "PaddedViews", "StackViews"]
git-tree-sha1 = "7b86a5d4d70a9f5cdf2dacb3cbe6d251d1a61dbe"
uuid = "e94cdb99-869f-56ef-bcf0-1ae2bcbe0389"
version = "0.3.4"
[[deps.MozillaCACerts_jll]]
uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
version = "2023.1.10"
[[deps.NCDatasets]]
deps = ["CFTime", "CommonDataModel", "DataStructures", "Dates", "DiskArrays", "NetCDF_jll", "NetworkOptions", "Printf"]
git-tree-sha1 = "77df6d3708ec0eb3441551e1f20f7503b37c2393"
uuid = "85f8d34a-cbdd-5861-8df4-14fed0d494ab"
version = "0.14.5"
[[deps.NaNMath]]
deps = ["OpenLibm_jll"]
git-tree-sha1 = "0877504529a3e5c3343c6f8b4c0381e57e4387e4"
uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3"
version = "1.0.2"
[[deps.NearestNeighbors]]
deps = ["Distances", "StaticArrays"]
git-tree-sha1 = "91a67b4d73842da90b526011fa85c5c4c9343fe0"
uuid = "b8a86587-4115-5ab1-83bc-aa920d37bbce"
version = "0.4.18"
[[deps.NetCDF_jll]]
deps = ["Artifacts", "Blosc_jll", "Bzip2_jll", "HDF5_jll", "JLLWrappers", "LibCURL_jll", "Libdl", "OpenMPI_jll", "XML2_jll", "Zlib_jll", "Zstd_jll", "libzip_jll"]
git-tree-sha1 = "a8af1798e4eb9ff768ce7fdefc0e957097793f15"
uuid = "7243133f-43d8-5620-bbf4-c2c921802cf3"
version = "400.902.209+0"
[[deps.Netpbm]]
deps = ["FileIO", "ImageCore", "ImageMetadata"]
git-tree-sha1 = "d92b107dbb887293622df7697a2223f9f8176fcd"
uuid = "f09324ee-3d7c-5217-9330-fc30815ba969"
version = "1.1.1"
[[deps.Nettle_jll]]
deps = ["Artifacts", "GMP_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "eca63e3847dad608cfa6a3329b95ef674c7160b4"
uuid = "4c82536e-c426-54e4-b420-14f461c4ed8b"
version = "3.7.2+0"
[[deps.NetworkOptions]]
uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
version = "1.2.0"
[[deps.Observables]]
git-tree-sha1 = "7438a59546cf62428fc9d1bc94729146d37a7225"
uuid = "510215fc-4207-5dde-b226-833fc4488ee2"
version = "0.5.5"
[[deps.OffsetArrays]]
git-tree-sha1 = "1a27764e945a152f7ca7efa04de513d473e9542e"
uuid = "6fe1bfb0-de20-5000-8ca7-80f57d26f881"
version = "1.14.1"
weakdeps = ["Adapt"]
[deps.OffsetArrays.extensions]
OffsetArraysAdaptExt = "Adapt"
[[deps.Ogg_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "887579a3eb005446d514ab7aeac5d1d027658b8f"
uuid = "e7412a2a-1a6e-54c0-be00-318e2571c051"
version = "1.3.5+1"
[[deps.OpenBLAS_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"]
uuid = "4536629a-c528-5b80-bd46-f80d51c5b363"
version = "0.3.23+4"
[[deps.OpenEXR]]
deps = ["Colors", "FileIO", "OpenEXR_jll"]
git-tree-sha1 = "327f53360fdb54df7ecd01e96ef1983536d1e633"
uuid = "52e1d378-f018-4a11-a4be-720524705ac7"
version = "0.3.2"
[[deps.OpenEXR_jll]]
deps = ["Artifacts", "Imath_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "8292dd5c8a38257111ada2174000a33745b06d4e"
uuid = "18a262bb-aa17-5467-a713-aee519bc75cb"
version = "3.2.4+0"
[[deps.OpenJpeg_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libtiff_jll", "LittleCMS_jll", "libpng_jll"]
git-tree-sha1 = "f4cb457ffac5f5cf695699f82c537073958a6a6c"
uuid = "643b3616-a352-519d-856d-80112ee9badc"
version = "2.5.2+0"
[[deps.OpenLibm_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "05823500-19ac-5b8b-9628-191a04bc5112"
version = "0.8.1+2"
[[deps.OpenMPI_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "Hwloc_jll", "JLLWrappers", "LazyArtifacts", "Libdl", "MPIPreferences", "TOML", "Zlib_jll"]
git-tree-sha1 = "bfce6d523861a6c562721b262c0d1aaeead2647f"
uuid = "fe0851c0-eecd-5654-98d4-656369965a5c"
version = "5.0.5+0"
[[deps.OpenSSL]]
deps = ["BitFlags", "Dates", "MozillaCACerts_jll", "OpenSSL_jll", "Sockets"]
git-tree-sha1 = "38cb508d080d21dc1128f7fb04f20387ed4c0af4"
uuid = "4d8831e6-92b7-49fb-bdf8-b643e874388c"
version = "1.4.3"
[[deps.OpenSSL_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1b35263570443fdd9e76c76b7062116e2f374ab8"
uuid = "458c3c95-2e84-50aa-8efc-19380b2a3a95"
version = "3.0.15+0"
[[deps.OpenSpecFun_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1"
uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e"
version = "0.5.5+0"
[[deps.Opus_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "6703a85cb3781bd5909d48730a67205f3f31a575"
uuid = "91d4177d-7536-5919-b921-800302f37372"
version = "1.3.3+0"
[[deps.OrderedCollections]]
git-tree-sha1 = "dfdf5519f235516220579f949664f1bf44e741c5"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.6.3"
[[deps.P11Kit_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "2cd396108e178f3ae8dedbd8e938a18726ab2fbf"
uuid = "c2071276-7c44-58a7-b746-946036e04d0a"
version = "0.24.1+0"
[[deps.PCRE2_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "efcefdf7-47ab-520b-bdef-62a2eaa19f15"
version = "10.42.0+1"
[[deps.PDMats]]
deps = ["LinearAlgebra", "SparseArrays", "SuiteSparse"]
git-tree-sha1 = "949347156c25054de2db3b166c52ac4728cbad65"
uuid = "90014a1f-27ba-587c-ab20-58faa44d9150"
version = "0.11.31"
[[deps.PNGFiles]]
deps = ["Base64", "CEnum", "ImageCore", "IndirectArrays", "OffsetArrays", "libpng_jll"]
git-tree-sha1 = "67186a2bc9a90f9f85ff3cc8277868961fb57cbd"
uuid = "f57f5aa1-a3ce-4bc8-8ab9-96f992907883"
version = "0.4.3"
[[deps.PROJ_jll]]
deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "Libtiff_jll", "SQLite_jll"]
git-tree-sha1 = "84aa844bd56f62282116b413fbefb45e370e54d6"
uuid = "58948b4f-47e0-5654-a9ad-f609743f8632"
version = "901.300.0+1"
[[deps.Packing]]
deps = ["GeometryBasics"]
git-tree-sha1 = "ec3edfe723df33528e085e632414499f26650501"
uuid = "19eb6ba3-879d-56ad-ad62-d5c202156566"
version = "0.5.0"
[[deps.PaddedViews]]
deps = ["OffsetArrays"]
git-tree-sha1 = "0fac6313486baae819364c52b4f483450a9d793f"
uuid = "5432bcbf-9aad-5242-b902-cca2824c8663"
version = "0.5.12"
[[deps.Pango_jll]]
deps = ["Artifacts", "Cairo_jll", "Fontconfig_jll", "FreeType2_jll", "FriBidi_jll", "Glib_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e127b609fb9ecba6f201ba7ab753d5a605d53801"
uuid = "36c8627f-9965-5494-a995-c6b170f724f3"
version = "1.54.1+0"
[[deps.Parsers]]
deps = ["Dates", "PrecompileTools", "UUIDs"]
git-tree-sha1 = "8489905bcdbcfac64d1daa51ca07c0d8f0283821"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "2.8.1"
[[deps.Pixman_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "LLVMOpenMP_jll", "Libdl"]
git-tree-sha1 = "35621f10a7531bc8fa58f74610b1bfb70a3cfc6b"
uuid = "30392449-352a-5448-841d-b1acce4e97dc"
version = "0.43.4+0"
[[deps.Pkg]]
deps = ["Artifacts", "Dates", "Downloads", "FileWatching", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
version = "1.10.0"
[[deps.PkgVersion]]
deps = ["Pkg"]
git-tree-sha1 = "f9501cc0430a26bc3d156ae1b5b0c1b47af4d6da"
uuid = "eebad327-c553-4316-9ea0-9fa01ccd7688"
version = "0.3.3"
[[deps.PlotUtils]]
deps = ["ColorSchemes", "Colors", "Dates", "PrecompileTools", "Printf", "Random", "Reexport", "Statistics"]
git-tree-sha1 = "7b1a9df27f072ac4c9c7cbe5efb198489258d1f5"
uuid = "995b91a9-d308-5afd-9ec6-746e21dbc043"
version = "1.4.1"
[[deps.PlutoUI]]
deps = ["AbstractPlutoDingetjes", "Base64", "ColorTypes", "Dates", "FixedPointNumbers", "Hyperscript", "HypertextLiteral", "IOCapture", "InteractiveUtils", "JSON", "Logging", "MIMEs", "Markdown", "Random", "Reexport", "URIs", "UUIDs"]
git-tree-sha1 = "eba4810d5e6a01f612b948c9fa94f905b49087b0"
uuid = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
version = "0.7.60"
[[deps.Polyester]]
deps = ["ArrayInterface", "BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "ManualMemory", "PolyesterWeave", "Static", "StaticArrayInterface", "StrideArraysCore", "ThreadingUtilities"]
git-tree-sha1 = "6d38fea02d983051776a856b7df75b30cf9a3c1f"
uuid = "f517fe37-dbe3-4b94-8317-1923a5111588"
version = "0.7.16"
[[deps.PolyesterWeave]]
deps = ["BitTwiddlingConvenienceFunctions", "CPUSummary", "IfElse", "Static", "ThreadingUtilities"]
git-tree-sha1 = "645bed98cd47f72f67316fd42fc47dee771aefcd"
uuid = "1d0040c9-8b98-4ee7-8388-3f51789ca0ad"
version = "0.2.2"
[[deps.PolygonOps]]
git-tree-sha1 = "77b3d3605fc1cd0b42d95eba87dfcd2bf67d5ff6"
uuid = "647866c9-e3ac-4575-94e7-e3d426903924"
version = "0.1.2"
[[deps.PooledArrays]]
deps = ["DataAPI", "Future"]
git-tree-sha1 = "36d8b4b899628fb92c2749eb488d884a926614d3"
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
version = "1.4.3"
[[deps.PrecompileTools]]
deps = ["Preferences"]
git-tree-sha1 = "5aa36f7049a63a1528fe8f7c3f2113413ffd4e1f"
uuid = "aea7be01-6a6a-4083-8856-8a6e6704d82a"
version = "1.2.1"
[[deps.Preferences]]
deps = ["TOML"]
git-tree-sha1 = "9306f6085165d270f7e3db02af26a400d580f5c6"
uuid = "21216c6a-2e73-6563-6e65-726566657250"
version = "1.4.3"
[[deps.PrettyTables]]
deps = ["Crayons", "LaTeXStrings", "Markdown", "PrecompileTools", "Printf", "Reexport", "StringManipulation", "Tables"]
git-tree-sha1 = "66b20dd35966a748321d3b2537c4584cf40387c7"
uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
version = "2.3.2"
[[deps.Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[deps.ProgressMeter]]
deps = ["Distributed", "Printf"]
git-tree-sha1 = "8f6bc219586aef8baf0ff9a5fe16ee9c70cb65e4"
uuid = "92933f4c-e287-5a05-a399-4b506db050ca"
version = "1.10.2"
[[deps.PtrArrays]]
git-tree-sha1 = "77a42d78b6a92df47ab37e177b2deac405e1c88f"
uuid = "43287f4e-b6f4-7ad1-bb20-aadabca52c3d"
version = "1.2.1"
[[deps.QOI]]
deps = ["ColorTypes", "FileIO", "FixedPointNumbers"]
git-tree-sha1 = "18e8f4d1426e965c7b532ddd260599e1510d26ce"
uuid = "4b34888f-f399-49d4-9bb3-47ed5cae4e65"
version = "1.0.0"
[[deps.QuadGK]]
deps = ["DataStructures", "LinearAlgebra"]
git-tree-sha1 = "1d587203cf851a51bf1ea31ad7ff89eff8d625ea"
uuid = "1fd47b50-473d-5c70-9696-f719f8f3bcdc"
version = "2.11.0"
[deps.QuadGK.extensions]
QuadGKEnzymeExt = "Enzyme"
[deps.QuadGK.weakdeps]
Enzyme = "7da242da-08ed-463a-9acd-ee780be4f1d9"
[[deps.REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[deps.Random]]
deps = ["SHA"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[deps.RangeArrays]]
git-tree-sha1 = "b9039e93773ddcfc828f12aadf7115b4b4d225f5"
uuid = "b3c3ace0-ae52-54e7-9d0b-2c1406fd6b9d"
version = "0.3.2"
[[deps.Ratios]]
deps = ["Requires"]
git-tree-sha1 = "1342a47bf3260ee108163042310d26f2be5ec90b"
uuid = "c84ed2f1-dad5-54f0-aa8e-dbefe2724439"
version = "0.4.5"
weakdeps = ["FixedPointNumbers"]
[deps.Ratios.extensions]
RatiosFixedPointNumbersExt = "FixedPointNumbers"
[[deps.RecipesBase]]
deps = ["PrecompileTools"]
git-tree-sha1 = "5c3d09cc4f31f5fc6af001c250bf1278733100ff"
uuid = "3cdcf5f2-1ef4-517c-9805-6587b60abb01"
version = "1.3.4"
[[deps.Reexport]]
git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b"
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
version = "1.2.2"
[[deps.RelocatableFolders]]
deps = ["SHA", "Scratch"]
git-tree-sha1 = "ffdaf70d81cf6ff22c2b6e733c900c3321cab864"
uuid = "05181044-ff0b-4ac5-8273-598c1e38db00"
version = "1.0.1"
[[deps.Requires]]
deps = ["UUIDs"]
git-tree-sha1 = "838a3a4188e2ded87a4f9f184b4b0d78a1e91cb7"
uuid = "ae029012-a4dd-5104-9daa-d747884805df"
version = "1.3.0"
[[deps.Rmath]]
deps = ["Random", "Rmath_jll"]
git-tree-sha1 = "f65dcb5fa46aee0cf9ed6274ccbd597adc49aa7b"
uuid = "79098fc4-a85e-5d69-aa6a-4863f24498fa"
version = "0.7.1"
[[deps.Rmath_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e60724fd3beea548353984dc61c943ecddb0e29a"
uuid = "f50d1b31-88e8-58de-be2c-1cc44531875f"
version = "0.4.3+0"
[[deps.RollingFunctions]]
deps = ["AccurateArithmetic", "FastBroadcast", "KahanSummation", "LinearAlgebra", "LoopVectorization", "Statistics", "StatsBase", "Tables"]
git-tree-sha1 = "4a54152985fea23b0b0e99a77566a87137221a0a"
uuid = "b0e4dd01-7b14-53d8-9b45-175a3e362653"
version = "0.8.0"
[[deps.RoundingEmulator]]
git-tree-sha1 = "40b9edad2e5287e05bd413a38f61a8ff55b9557b"
uuid = "5eaf0fd0-dfba-4ccb-bf02-d820a40db705"
version = "0.2.1"
[[deps.SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
version = "0.7.0"
[[deps.SIMD]]
deps = ["PrecompileTools"]
git-tree-sha1 = "2803cab51702db743f3fda07dd1745aadfbf43bd"
uuid = "fdea26ae-647d-5447-a871-4b548cad5224"
version = "3.5.0"
[[deps.SIMDTypes]]
git-tree-sha1 = "330289636fb8107c5f32088d2741e9fd7a061a5c"
uuid = "94e857df-77ce-4151-89e5-788b33177be4"
version = "0.1.0"
[[deps.SLEEFPirates]]
deps = ["IfElse", "Static", "VectorizationBase"]
git-tree-sha1 = "456f610ca2fbd1c14f5fcf31c6bfadc55e7d66e0"
uuid = "476501e8-09a2-5ece-8869-fb82de89a1fa"
version = "0.6.43"
[[deps.SQLite_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "004fffbe2711abdc7263a980bbb1af9620781dd9"
uuid = "76ed43ae-9a5d-5a62-8c75-30186b810ce8"
version = "3.45.3+0"
[[deps.Scratch]]
deps = ["Dates"]
git-tree-sha1 = "3bac05bc7e74a75fd9cba4295cde4045d9fe2386"
uuid = "6c6a2e73-6563-6170-7368-637461726353"
version = "1.2.1"
[[deps.SentinelArrays]]
deps = ["Dates", "Random"]
git-tree-sha1 = "ff11acffdb082493657550959d4feb4b6149e73a"
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
version = "1.4.5"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[deps.ShaderAbstractions]]
deps = ["ColorTypes", "FixedPointNumbers", "GeometryBasics", "LinearAlgebra", "Observables", "StaticArrays", "StructArrays", "Tables"]
git-tree-sha1 = "79123bc60c5507f035e6d1d9e563bb2971954ec8"
uuid = "65257c39-d410-5151-9873-9b3e5be5013e"
version = "0.4.1"
[[deps.SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[deps.Showoff]]
deps = ["Dates", "Grisu"]
git-tree-sha1 = "91eddf657aca81df9ae6ceb20b959ae5653ad1de"
uuid = "992d4aef-0814-514b-bc4d-f2e9a6c4116f"
version = "1.0.3"
[[deps.SignedDistanceFields]]
deps = ["Random", "Statistics", "Test"]
git-tree-sha1 = "d263a08ec505853a5ff1c1ebde2070419e3f28e9"
uuid = "73760f76-fbc4-59ce-8f25-708e95d2df96"
version = "0.4.0"
[[deps.SimpleBufferStream]]
git-tree-sha1 = "874e8867b33a00e784c8a7e4b60afe9e037b74e1"
uuid = "777ac1f9-54b0-4bf8-805c-2214025038e7"
version = "1.1.0"
[[deps.SimpleTraits]]
deps = ["InteractiveUtils", "MacroTools"]
git-tree-sha1 = "5d7e3f4e11935503d3ecaf7186eac40602e7d231"
uuid = "699a6c99-e7fa-54fc-8d76-47d257e15c1d"
version = "0.9.4"
[[deps.Sixel]]
deps = ["Dates", "FileIO", "ImageCore", "IndirectArrays", "OffsetArrays", "REPL", "libsixel_jll"]
git-tree-sha1 = "2da10356e31327c7096832eb9cd86307a50b1eb6"
uuid = "45858cf5-a6b0-47a3-bbea-62219f50df47"
version = "0.1.3"
[[deps.Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[deps.SortingAlgorithms]]
deps = ["DataStructures"]
git-tree-sha1 = "66e0a8e672a0bdfca2c3f5937efb8538b9ddc085"
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
version = "1.2.1"
[[deps.SparseArrays]]
deps = ["Libdl", "LinearAlgebra", "Random", "Serialization", "SuiteSparse_jll"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
version = "1.10.0"
[[deps.SpecialFunctions]]
deps = ["IrrationalConstants", "LogExpFunctions", "OpenLibm_jll", "OpenSpecFun_jll"]
git-tree-sha1 = "2f5d4697f21388cbe1ff299430dd169ef97d7e14"
uuid = "276daf66-3868-5448-9aa4-cd146d93841b"
version = "2.4.0"
weakdeps = ["ChainRulesCore"]
[deps.SpecialFunctions.extensions]
SpecialFunctionsChainRulesCoreExt = "ChainRulesCore"
[[deps.StackViews]]
deps = ["OffsetArrays"]
git-tree-sha1 = "46e589465204cd0c08b4bd97385e4fa79a0c770c"
uuid = "cae243ae-269e-4f55-b966-ac2d0dc13c15"
version = "0.1.1"
[[deps.Static]]
deps = ["CommonWorldInvalidations", "IfElse", "PrecompileTools"]
git-tree-sha1 = "87d51a3ee9a4b0d2fe054bdd3fc2436258db2603"
uuid = "aedffcd0-7271-4cad-89d0-dc628f76c6d3"
version = "1.1.1"
[[deps.StaticArrayInterface]]
deps = ["ArrayInterface", "Compat", "IfElse", "LinearAlgebra", "PrecompileTools", "Static"]
git-tree-sha1 = "96381d50f1ce85f2663584c8e886a6ca97e60554"
uuid = "0d7ed370-da01-4f52-bd93-41d350b8b718"
version = "1.8.0"
weakdeps = ["OffsetArrays", "StaticArrays"]
[deps.StaticArrayInterface.extensions]
StaticArrayInterfaceOffsetArraysExt = "OffsetArrays"
StaticArrayInterfaceStaticArraysExt = "StaticArrays"
[[deps.StaticArrays]]
deps = ["LinearAlgebra", "PrecompileTools", "Random", "StaticArraysCore"]
git-tree-sha1 = "eeafab08ae20c62c44c8399ccb9354a04b80db50"
uuid = "90137ffa-7385-5640-81b9-e52037218182"
version = "1.9.7"
weakdeps = ["ChainRulesCore", "Statistics"]
[deps.StaticArrays.extensions]
StaticArraysChainRulesCoreExt = "ChainRulesCore"
StaticArraysStatisticsExt = "Statistics"
[[deps.StaticArraysCore]]
git-tree-sha1 = "192954ef1208c7019899fbf8049e717f92959682"
uuid = "1e83bf80-4336-4d27-bf5d-d5a4f845583c"
version = "1.4.3"
[[deps.Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
version = "1.10.0"
[[deps.StatsAPI]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "1ff449ad350c9c4cbc756624d6f8a8c3ef56d3ed"
uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0"
version = "1.7.0"
[[deps.StatsBase]]
deps = ["DataAPI", "DataStructures", "LinearAlgebra", "LogExpFunctions", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"]
git-tree-sha1 = "5cf7606d6cef84b543b483848d4ae08ad9832b21"
uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
version = "0.34.3"
[[deps.StatsFuns]]
deps = ["HypergeometricFunctions", "IrrationalConstants", "LogExpFunctions", "Reexport", "Rmath", "SpecialFunctions"]
git-tree-sha1 = "cef0472124fab0695b58ca35a77c6fb942fdab8a"
uuid = "4c63d2b9-4356-54db-8cca-17b64c39e42c"
version = "1.3.1"
[deps.StatsFuns.extensions]
StatsFunsChainRulesCoreExt = "ChainRulesCore"
StatsFunsInverseFunctionsExt = "InverseFunctions"
[deps.StatsFuns.weakdeps]
ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.StrideArraysCore]]
deps = ["ArrayInterface", "CloseOpenIntervals", "IfElse", "LayoutPointers", "LinearAlgebra", "ManualMemory", "SIMDTypes", "Static", "StaticArrayInterface", "ThreadingUtilities"]
git-tree-sha1 = "f35f6ab602df8413a50c4a25ca14de821e8605fb"
uuid = "7792a7ef-975c-4747-a70f-980b88e8d1da"
version = "0.5.7"
[[deps.StringManipulation]]
deps = ["PrecompileTools"]
git-tree-sha1 = "a04cabe79c5f01f4d723cc6704070ada0b9d46d5"
uuid = "892a3eda-7b42-436c-8928-eab12a02cf0e"
version = "0.3.4"
[[deps.StructArrays]]
deps = ["ConstructionBase", "DataAPI", "Tables"]
git-tree-sha1 = "f4dc295e983502292c4c3f951dbb4e985e35b3be"
uuid = "09ab397b-f2b6-538f-b94a-2f83cf4a842a"
version = "0.6.18"
[deps.StructArrays.extensions]
StructArraysAdaptExt = "Adapt"
StructArraysGPUArraysCoreExt = "GPUArraysCore"
StructArraysSparseArraysExt = "SparseArrays"
StructArraysStaticArraysExt = "StaticArrays"
[deps.StructArrays.weakdeps]
Adapt = "79e6a3ab-5dfb-504d-930d-738a2a938a0e"
GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
StaticArrays = "90137ffa-7385-5640-81b9-e52037218182"
[[deps.SuiteSparse]]
deps = ["Libdl", "LinearAlgebra", "Serialization", "SparseArrays"]
uuid = "4607b0f0-06f3-5cda-b6b1-a6196a1729e9"
[[deps.SuiteSparse_jll]]
deps = ["Artifacts", "Libdl", "libblastrampoline_jll"]
uuid = "bea87d4a-7f5b-5778-9afe-8cc45184846c"
version = "7.2.1+1"
[[deps.TOML]]
deps = ["Dates"]
uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
version = "1.0.3"
[[deps.TableTraits]]
deps = ["IteratorInterfaceExtensions"]
git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39"
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
version = "1.0.1"
[[deps.Tables]]
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "OrderedCollections", "TableTraits"]
git-tree-sha1 = "598cd7c1f68d1e205689b1c2fe65a9f85846f297"
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
version = "1.12.0"
[[deps.Tar]]
deps = ["ArgTools", "SHA"]
uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
version = "1.10.0"
[[deps.TensorCore]]
deps = ["LinearAlgebra"]
git-tree-sha1 = "1feb45f88d133a655e001435632f019a9a1bcdb6"
uuid = "62fd8b95-f654-4bbd-a8a5-9c27f68ccd50"
version = "0.1.1"
[[deps.Test]]
deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[deps.ThreadingUtilities]]
deps = ["ManualMemory"]
git-tree-sha1 = "eda08f7e9818eb53661b3deb74e3159460dfbc27"
uuid = "8290d209-cae3-49c0-8002-c8c24d57dab5"
version = "0.5.2"
[[deps.Thrift_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "boost_jll"]
git-tree-sha1 = "fd7da49fae680c18aa59f421f0ba468e658a2d7a"
uuid = "e0b8ae26-5307-5830-91fd-398402328850"
version = "0.16.0+0"
[[deps.TiffImages]]
deps = ["ColorTypes", "DataStructures", "DocStringExtensions", "FileIO", "FixedPointNumbers", "IndirectArrays", "Inflate", "Mmap", "OffsetArrays", "PkgVersion", "ProgressMeter", "SIMD", "UUIDs"]
git-tree-sha1 = "bc7fd5c91041f44636b2c134041f7e5263ce58ae"
uuid = "731e570b-9d59-4bfa-96dc-6df516fadf69"
version = "0.10.0"
[[deps.TranscodingStreams]]
git-tree-sha1 = "e84b3a11b9bece70d14cce63406bbc79ed3464d2"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.11.2"
[[deps.Tricks]]
git-tree-sha1 = "7822b97e99a1672bfb1b49b668a6d46d58d8cbcb"
uuid = "410a4b4d-49e4-4fbc-ab6d-cb71b17b3775"
version = "0.1.9"
[[deps.TriplotBase]]
git-tree-sha1 = "4d4ed7f294cda19382ff7de4c137d24d16adc89b"
uuid = "981d1d27-644d-49a2-9326-4793e63143c3"
version = "0.1.0"
[[deps.URIs]]
git-tree-sha1 = "67db6cc7b3821e19ebe75791a9dd19c9b1188f2b"
uuid = "5c2747f8-b7ea-4ff2-ba2e-563bfd36b1d4"
version = "1.5.1"
[[deps.UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[deps.UnPack]]
git-tree-sha1 = "387c1f73762231e86e0c9c5443ce3b4a0a9a0c2b"
uuid = "3a884ed6-31ef-47d7-9d2a-63182c4928ed"
version = "1.0.2"
[[deps.Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[deps.UnicodeFun]]
deps = ["REPL"]
git-tree-sha1 = "53915e50200959667e78a92a418594b428dffddf"
uuid = "1cfade01-22cf-5700-b092-accc4b62d6e1"
version = "0.4.1"
[[deps.Unitful]]
deps = ["Dates", "LinearAlgebra", "Random"]
git-tree-sha1 = "d95fe458f26209c66a187b1114df96fd70839efd"
uuid = "1986cc42-f94f-5a68-af5c-568840ba703d"
version = "1.21.0"
[deps.Unitful.extensions]
ConstructionBaseUnitfulExt = "ConstructionBase"
InverseFunctionsUnitfulExt = "InverseFunctions"
[deps.Unitful.weakdeps]
ConstructionBase = "187b0558-2788-49d3-abe0-74a17ed4e7c9"
InverseFunctions = "3587e190-3f89-42d0-90ee-14403ec27112"
[[deps.VectorizationBase]]
deps = ["ArrayInterface", "CPUSummary", "HostCPUFeatures", "IfElse", "LayoutPointers", "Libdl", "LinearAlgebra", "SIMDTypes", "Static", "StaticArrayInterface"]
git-tree-sha1 = "e7f5b81c65eb858bed630fe006837b935518aca5"
uuid = "3d5dd08c-fd9d-11e8-17fa-ed2836048c2f"
version = "0.21.70"
[[deps.WeakRefStrings]]
deps = ["DataAPI", "InlineStrings", "Parsers"]
git-tree-sha1 = "b1be2855ed9ed8eac54e5caff2afcdb442d52c23"
uuid = "ea10d353-3f73-51f8-a26c-33c1cb351aa5"
version = "1.4.2"
[[deps.WoodburyMatrices]]
deps = ["LinearAlgebra", "SparseArrays"]
git-tree-sha1 = "c1a7aa6219628fcd757dede0ca95e245c5cd9511"
uuid = "efce3f68-66dc-5838-9240-27a6d6f5f9b6"
version = "1.0.0"
[[deps.WorkerUtilities]]
git-tree-sha1 = "cd1659ba0d57b71a464a29e64dbc67cfe83d54e7"
uuid = "76eceee3-57b5-4d4a-8e66-0e911cebbf60"
version = "1.6.1"
[[deps.XML2_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libiconv_jll", "Zlib_jll"]
git-tree-sha1 = "1165b0443d0eca63ac1e32b8c0eb69ed2f4f8127"
uuid = "02c8fc9c-b97f-50b9-bbe4-9be30ff0a78a"
version = "2.13.3+0"
[[deps.XSLT_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Libgcrypt_jll", "Libgpg_error_jll", "Libiconv_jll", "XML2_jll", "Zlib_jll"]
git-tree-sha1 = "a54ee957f4c86b526460a720dbc882fa5edcbefc"
uuid = "aed1982a-8fda-507f-9586-7b0439959a61"
version = "1.1.41+0"
[[deps.XZ_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "ac88fb95ae6447c8dda6a5503f3bafd496ae8632"
uuid = "ffd25f8a-64ca-5728-b0f7-c24cf3aae800"
version = "5.4.6+0"
[[deps.Xorg_libX11_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libxcb_jll", "Xorg_xtrans_jll"]
git-tree-sha1 = "afead5aba5aa507ad5a3bf01f58f82c8d1403495"
uuid = "4f6342f7-b3d2-589e-9d20-edeb45f2b2bc"
version = "1.8.6+0"
[[deps.Xorg_libXau_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "6035850dcc70518ca32f012e46015b9beeda49d8"
uuid = "0c0b7dd1-d40b-584c-a123-a41640f87eec"
version = "1.0.11+0"
[[deps.Xorg_libXdmcp_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "34d526d318358a859d7de23da945578e8e8727b7"
uuid = "a3789734-cfe1-5b06-b2d0-1dd0d9d62d05"
version = "1.1.4+0"
[[deps.Xorg_libXext_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"]
git-tree-sha1 = "d2d1a5c49fae4ba39983f63de6afcbea47194e85"
uuid = "1082639a-0dae-5f34-9b06-72781eeb8cb3"
version = "1.3.6+0"
[[deps.Xorg_libXrender_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Xorg_libX11_jll"]
git-tree-sha1 = "47e45cd78224c53109495b3e324df0c37bb61fbe"
uuid = "ea2f1a96-1ddc-540d-b46f-429655e07cfa"
version = "0.9.11+0"
[[deps.Xorg_libpthread_stubs_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "8fdda4c692503d44d04a0603d9ac0982054635f9"
uuid = "14d82f49-176c-5ed1-bb49-ad3f5cbd8c74"
version = "0.1.1+0"
[[deps.Xorg_libxcb_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "XSLT_jll", "Xorg_libXau_jll", "Xorg_libXdmcp_jll", "Xorg_libpthread_stubs_jll"]
git-tree-sha1 = "bcd466676fef0878338c61e655629fa7bbc69d8e"
uuid = "c7cfdc94-dc32-55de-ac96-5a1b8d977c5b"
version = "1.17.0+0"
[[deps.Xorg_xtrans_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e92a1a012a10506618f10b7047e478403a046c77"
uuid = "c5fb5394-a638-5e4d-96e5-b29de1b5cf10"
version = "1.5.0+0"
[[deps.ZipFile]]
deps = ["Libdl", "Printf", "Zlib_jll"]
git-tree-sha1 = "f492b7fe1698e623024e873244f10d89c95c340a"
uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea"
version = "0.10.1"
[[deps.Zlib_jll]]
deps = ["Libdl"]
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
version = "1.2.13+1"
[[deps.Zstd_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "e678132f07ddb5bfa46857f0d7620fb9be675d3b"
uuid = "3161d3a3-bdf6-5164-811a-617609db77b4"
version = "1.5.6+0"
[[deps.boost_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"]
git-tree-sha1 = "7a89efe0137720ca82f99e8daa526d23120d0d37"
uuid = "28df3c45-c428-5900-9ff8-a3135698ca75"
version = "1.76.0+1"
[[deps.isoband_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg"]
git-tree-sha1 = "51b5eeb3f98367157a7a12a1fb0aa5328946c03c"
uuid = "9a68df92-36a6-505f-a73e-abb412b6bfb4"
version = "0.2.3+0"
[[deps.libaec_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "46bf7be2917b59b761247be3f317ddf75e50e997"
uuid = "477f73a3-ac25-53e9-8cc3-50b2fa2566f0"
version = "1.1.2+0"
[[deps.libaom_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "1827acba325fdcdf1d2647fc8d5301dd9ba43a9d"
uuid = "a4ae2306-e953-59d6-aa16-d00cac43593b"
version = "3.9.0+0"
[[deps.libass_jll]]
deps = ["Artifacts", "Bzip2_jll", "FreeType2_jll", "FriBidi_jll", "HarfBuzz_jll", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "e17c115d55c5fbb7e52ebedb427a0dca79d4484e"
uuid = "0ac62f75-1d6f-5e53-bd7c-93b484bb37c0"
version = "0.15.2+0"
[[deps.libblastrampoline_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "8e850b90-86db-534c-a0d3-1478176c7d93"
version = "5.8.0+1"
[[deps.libfdk_aac_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "8a22cf860a7d27e4f3498a0fe0811a7957badb38"
uuid = "f638f0a6-7fb0-5443-88ba-1cc74229b280"
version = "2.0.3+0"
[[deps.libgeotiff_jll]]
deps = ["Artifacts", "JLLWrappers", "LibCURL_jll", "Libdl", "Libtiff_jll", "PROJ_jll"]
git-tree-sha1 = "c48ca6e850d4190dcb8e0ccd220380c2bc678403"
uuid = "06c338fa-64ff-565b-ac2f-249532af990e"
version = "100.701.300+0"
[[deps.libpng_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Zlib_jll"]
git-tree-sha1 = "d7015d2e18a5fd9a4f47de711837e980519781a4"
uuid = "b53b4c65-9356-5827-b1ea-8c7a1a84506f"
version = "1.6.43+1"
[[deps.libsixel_jll]]
deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Pkg", "libpng_jll"]
git-tree-sha1 = "d4f63314c8aa1e48cd22aa0c17ed76cd1ae48c3c"
uuid = "075b6546-f08a-558a-be8f-8157d0f608a5"
version = "1.10.3+0"
[[deps.libvorbis_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Ogg_jll", "Pkg"]
git-tree-sha1 = "490376214c4721cdaca654041f635213c6165cb3"
uuid = "f27f6e37-5d2b-51aa-960f-b287f2bc3b7a"
version = "1.3.7+2"
[[deps.libzip_jll]]
deps = ["Artifacts", "Bzip2_jll", "GnuTLS_jll", "JLLWrappers", "Libdl", "XZ_jll", "Zlib_jll", "Zstd_jll"]
git-tree-sha1 = "3282b7d16ae7ac3e57ec2f3fa8fafb564d8f9f7f"
uuid = "337d8026-41b4-5cde-a456-74a10e5b31d1"
version = "1.10.1+0"
[[deps.nghttp2_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d"
version = "1.52.0+1"
[[deps.oneTBB_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "7d0ea0f4895ef2f5cb83645fa689e52cb55cf493"
uuid = "1317d2d5-d96f-522e-a858-c73665f53c3e"
version = "2021.12.0+0"
[[deps.p7zip_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
version = "17.4.0+2"
[[deps.snappy_jll]]
deps = ["Artifacts", "JLLWrappers", "LZO_jll", "Libdl", "Zlib_jll"]
git-tree-sha1 = "ab27636e7c8222f14b9318a983fcd89cf130d419"
uuid = "fe1e1685-f7be-5f59-ac9f-4ca204017dfd"
version = "1.1.10+0"
[[deps.x264_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "35976a1216d6c066ea32cba2150c4fa682b276fc"
uuid = "1270edf5-f2f9-52d2-97e9-ab00b5d0237a"
version = "10164.0.0+0"
[[deps.x265_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl"]
git-tree-sha1 = "dcc541bb19ed5b0ede95581fb2e41ecf179527d2"
uuid = "dfaa095f-4041-5dcd-9319-2fabd8486b76"
version = "3.6.0+0"
"""
# βββ‘ Cell order:
# ββ71e87ed3-5a9f-49aa-99af-cf144501c678
# ββa58cc4b4-7023-4dcf-a5f4-6366be8047a3
# ββa45bbdbd-3793-4e69-b042-39a4a1ac7ed7
# ββ62e0b8a9-0025-4ce7-9538-b6114d97b762
# ββc93dde18-e639-4edd-9192-f6c9eed0cb89
# ββ0c9fdfb0-bddc-4def-9954-526978491a84
# ββ8ff180a4-dd71-4a70-82ab-70bc80427abb
# ββff7dd5eb-5b1b-4314-9553-b8c05c4d7376
# ββ9b3c3856-9fe1-43ba-97a2-abcd5b385c1d
# ββ1cf2cdb9-3c09-4b39-81cf-49318c16f531
# ββa8e0b727-a416-4aad-b660-69e5470c7e9e
# ββeeb9d308-ef62-4dcc-ba90-a2a1912ef2bd
# ββ50b75406-e55f-433d-bd7b-089d975e5001
# ββaf68228e-710c-4a5a-be48-c716592f8f45
# ββaa82d9bd-8ba2-4e18-8f65-e71b0361f5cb
# ββ24a9fc25-b85b-4582-a36b-0a43e04ee799
# ββ5fec1029-34a1-4d43-9183-7e6095194a3a
# ββec8cbf44-82d9-11ed-0131-1bdea9285f79
# β β8fbd1b1d-affe-4e30-a3b2-f2584e459003
# ββ2d5611a9-b8ea-4d26-8ca3-edff9f2ebfdd
# ββ00000000-0000-0000-0000-000000000001
# ββ00000000-0000-0000-0000-000000000002
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 288 | module ClimatologyMITgcmExt
import MITgcm
import Climatology: read_nctiles_alias, read_mdsio_alias
import MITgcm: read_nctiles, read_mdsio
read_nctiles_alias(args...;kwargs...)=read_nctiles(args...;kwargs...)
read_mdsio_alias(args...)=read_mdsio(args...)
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 13385 |
module ClimatologyMakieExt
using Makie, Climatology
import Climatology: Statistics, RollingFunctions, plot_examples, load
import Climatology: ECCOdiag, SSTdiag, SeaLevelAnomaly
import Statistics: mean
import Makie: plot
import RollingFunctions: runmean
## 1. ECCO
function plot(x::ECCOdiag)
if !isempty(x.options)
o=x.options
if string(o.plot_type)=="ECCO_map"
map(ECCO_procs.map(o.nammap,o.P,o.statmap,o.timemap,x.path))
elseif string(o.plot_type)=="ECCO_TimeLat"
nam=split(x.name,"_")[1]
TimeLat(ECCO_procs.TimeLat(nam,x.path,o.year0,o.year1,o.cmap_fac,o.k,o.P); years_to_display=o.years_to_display)
elseif string(o.plot_type)=="ECCO_TimeLatAnom"
nam=split(x.name,"_")[1]
TimeLat(ECCO_procs.TimeLatAnom(nam,x.path,o.year0,o.year1,o.cmap_fac,o.k,o.l0,o.l1,o.P); years_to_display=o.years_to_display)
elseif string(o.plot_type)=="ECCO_DepthTime"
nam=split(x.name,"_")[1]
DepthTime(ECCO_procs.DepthTime(nam,x.path,o.facA,o.l,o.year0,o.year1,o.k0,o.k1,o.P); years_to_display=o.years_to_display)
elseif string(o.plot_type)=="ECCO_GlobalMean"
gl1=ECCO_procs.glo(x.path,x.name,o.k,o.year0,o.year1)
glo(gl1,o.year0,o.year1; years_to_display=o.years_to_display)
elseif x.name=="OHT"&&string(o.plot_type)=="ECCO_OHT1"
OHT(x.path)
elseif x.name=="overturn"&&string(o.plot_type)=="ECCO_Overturn1"
figov1(x.path,o.kk,o.low1,o.year0,o.year1; years_to_display=o.years_to_display)
elseif x.name=="overturn"&&string(o.plot_type)=="ECCO_Overturn2"
figov2(x.path,o.grid)
elseif x.name=="trsp"&&string(o.plot_type)=="ECCO_Transports"
transport(o.namtrs,o.ncols,x.path,o.list_trsp,o.year0,o.year1,years_to_display=o.years_to_display)
else
println("unknown option (b)")
end
else
println("unknown option (a)")
end
end
##
to_range!(DD,levs::Tuple) = to_range!(DD,range(levs[1],levs[2],length=10))
function to_range!(DD,levs)
DD[findall(DD.<=levs[1])].=levs[1]+(levs[2]-levs[1])/100
DD[findall(DD.>=levs[end])].=levs[end]-(levs[end]-levs[end-1])/100
end
# years_to_display=(1960,2023)
years_to_display=(1980,2024)
function axtr1(ax,namtr,pth_out,list_trsp,year0,year1;years_to_display=years_to_display)
itr=findall(list_trsp.==namtr)[1]
tmp=vec(load(ECCOdiag(path=pth_out,name="trsp")))[itr]
nt=size(tmp.val,2)
x=vec(0.5:nt)
txt=tmp.nam[1:end-5]
val=1e-6*vec(sum(tmp.val,dims=1)[:])
valsmo = runmean(val, 12)
x=vec(0.5:nt)
x=year0 .+ x./12.0
hm1=lines!(ax,x,val,label="ECCO estimate")
valsmo[1:5].=NaN
valsmo[end-4:end].=NaN
lines!(ax,x,valsmo,linewidth=4.0,color=:red)
xlims!(ax,years_to_display)
end
function transport(namtrs,ncols,pth_out,list_trsp,year0,year1;years_to_display=years_to_display)
if ncols > 1
fig1 = Figure(size = (2000,1000),markersize=0.1)
else
fig1 = Figure(size = (900,400),markersize=0.1)
end
for na in 1:length(namtrs)
txt=namtrs[na]
jj=div.(na,ncols,RoundUp)
kk=na-(jj.-1)*ncols
ax1 = Axis(fig1[jj,kk], title=" $txt (in Sv)",
xticks=(year0:4:year1),ylabel="transport, in Sv")
axtr1(ax1,namtrs[na],pth_out,list_trsp,year0,year1,years_to_display=years_to_display)
#ylims!(ax1,rng)
end
fig1
end
function figov1(pth_out,kk,low1,year0,year1;years_to_display=years_to_display)
tmp=-1e-6*load(ECCOdiag(path=pth_out,name="overturn"))
nt=size(tmp,3)
x=vec(0.5:nt)
x=year0 .+ x./12.0
lats=vec(-89.0:89.0)
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1],ylabel="Sv",
title="Global Overturning, in Sv, at kk=$(kk)",
xticks=(year0:4:year1))
for ll in 115:10:145
ov=tmp[ll,kk,:]
ov=runmean(ov, 12)
ov[1:5].=NaN
ov[end-4:end].=NaN
hm1=lines!(x,ov,label="$(lats[ll])N")
end
xlims!(ax1,years_to_display)
ylims!(ax1,(5,20))
low1!="auto" ? ylims!(ax1,(low1,20.0)) : nothing
fig1[1, 2] = Legend(fig1, ax1, "estimate", framevisible = false)
fig1
end
function figov2(pth_out,Ξ; ClipToRange=true)
tmp=-1e-6*load(ECCOdiag(path=pth_out,name="overturn"))
ovmean=dropdims(mean(tmp[:,:,1:240],dims=3),dims=3)
x=vec(-89.0:89.0); y=reverse(vec(Ξ.RF[1:end-1])); #coordinate variables
z=reverse(ovmean,dims=2); z[z.==0.0].=NaN
levs=(-40.0:5.0:40.0)
ClipToRange ? to_range!(z,levs) : nothing
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title="Meridional Overturning Streamfunction (in Sv, time mean)",
xlabel="latitude",ylabel="depth (in m)")
hm1=contourf!(ax1,x,y,z,levels=levs)
Colorbar(fig1[1,2], hm1, height = Relative(0.65))
fig1
end
function OHT(pth_out)
tmp=load(ECCOdiag(path=pth_out,name="MHT"))
MT=vec(mean(tmp[:,1:240],dims=2))
x=vec(-89.0:89.0)
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title="Northward Heat Transport (in PW, time mean)",
xticks=(-90.0:10.0:90.0),yticks=(-2.0:0.25:2.0),
xlabel="latitude",ylabel="Transport (in PW)")
hm1=lines!(x,MT)
ylims!(ax1,(-2.0,2.0))
fig1
end
function glo(gl1,year0,year1;years_to_display=years_to_display)
ttl="Global Mean $(gl1.txt)"
zlb=gl1.txt
rng=gl1.rng
if false
fac=4e6*1.335*10^9*10^9/1e21
ttl="Ocean Heat Uptake (Zetta-Joules)"
zlb="Zetta-Joules"
rng=(-100.0,300.0)
y=fac*(gl1.y.-gl1.y[1])
else
y=gl1.y
end
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title=ttl,
xticks=collect(year0:4:year1),ylabel=zlb)
hm1=lines!(ax1,gl1.x,y)
xlims!(ax1,years_to_display)
ylims!(ax1,rng)
fig1
end
function DepthTime(XYZ; ClipToRange=true, years_to_display=years_to_display)
ClipToRange ? to_range!(XYZ.z,XYZ.levels) : nothing
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title=XYZ.title,
xticks=collect(XYZ.year0:4:XYZ.year1))
hm1=contourf!(ax1,XYZ.x,XYZ.y,XYZ.z,levels=XYZ.levels,colormap=:turbo)
Colorbar(fig1[1,2], hm1, height = Relative(0.65))
haskey(XYZ,:years_to_display) ? xlims!(ax1,XYZ.years_to_display) : xlims!(ax1,years_to_display)
ylims!(ax1,XYZ.ylims)
fig1
end
function TimeLat(XYZ; ClipToRange=true, years_to_display=years_to_display)
ClipToRange ? to_range!(XYZ.z,XYZ.levels) : nothing
fig1 = Figure(size = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title=XYZ.title,
xticks=collect(XYZ.year0:4:XYZ.year1),yticks=collect(-90.0:20.0:90.0),ylabel="latitude")
hm1=contourf!(ax1,XYZ.x,XYZ.y,XYZ.z,levels=XYZ.levels,colormap=:turbo)
Colorbar(fig1[1,2], hm1, height = Relative(0.65))
xlims!(ax1,years_to_display)
ylims!(ax1,XYZ.ylims...)
fig1
end
function map(X; ClipToRange=true)
ClipToRange ? to_range!(X.field,X.levels) : nothing
fig = Figure(size = (900,600), backgroundcolor = :grey95)
ax = Axis(fig[1,1], title=X.title,xlabel="longitude",ylabel="latitude")
hm1=contourf!(ax,X.Ξ».lon[:,1],X.Ξ».lat[1,:],X.field,levels=X.levels,colormap=:turbo)
Colorbar(fig[1,2], hm1, height = Relative(0.65))
fig
end
## 2. OISST
function plot(x::SSTdiag)
if !isempty(x.options)
o=x.options
if string(o.plot_type)=="map_base"
fig,ax,im=SST_plots.map_base()
fig
elseif string(o.plot_type)=="local_and_global"
SST_plots.local_and_global(o.ts,o.ts_global,o.kdf0)
elseif string(o.plot_type)=="by_year"
SST_plots.by_year(o.ts)
elseif string(o.plot_type)=="by_time"
SST_plots.by_time(o.ts,show_anom=o.show_anom,show_clim=o.show_clim)
elseif string(o.plot_type)=="TimeLat"
SST_plots.TimeLat(o.ts,o.zm,o.title)
elseif string(o.plot_type)=="MHW"
SST_plots.MHW(o.ts)
elseif string(o.plot_type)=="map"
SST_plots.plot_sst_map(o.to_map)
else
error("unknown plot_type")
end
else
error("unknown options")
end
end
module SST_plots
using Makie
import Climatology: load, Statistics, SSTdiag
import Climatology: MeshArrays, DataDeps
import Statistics: median
#
function by_time(ts; show_anom = true, show_clim=true)
tim=collect(1:length(ts.sst))/365.25 .+ 1982
f,a=lines(tim,ts.sst,label="SST",linewidth=4)
show_clim ? lines!(a,tim,ts.clim,color=:orange,label="seasonal climatology",linewidth=1) : nothing
show_anom ? lines!(a,tim,ts.anom,color=:red,label="SST - seasonal cycle") : nothing
a.title=ts.title
xlims!(1982,2024)
axislegend(a,position=:rb)
f
end
function by_year(ts)
f,a,l=lines(ts.sst[1:365],color=:gray)
[lines!(ts.sst[ (1:365) .+ 365*(y-1)] ,color=:gray) for y in 2:length(1982:2022)]
lines!(ts.sst[ 365*(2023-1982):365*(2024-1982)],color=:orange)
lines!(ts.sst[ 365*(2024-1982):end],color=:red,linewidth=2)
for y in 2021:2022
tt1=vec(1:365) .+(y-1982)*365; lines!(ts.sst[tt1],color=:blue)
end
a.title="SST year by year (red=2024, orange=2023, blue=2021:2022)"
f
end
#
function to_range!(DD,levs)
DD[findall(DD.<=levs[1])].=levs[1]+(levs[2]-levs[1])/100
DD[findall(DD.>=levs[end])].=levs[end]-(levs[end]-levs[end-1])/100
end
function TimeLat(list,zm,ttl;
ClipToRange=true, year0=1982, year1=2024, lat0=-90, lat1=90)
x=collect(1:length(list.year))/365.25 .+ 1982
dy=Int(180/size(zm,1))
y=collect(-90+dy/2:dy:90-dy/2)
z=permutedims(zm)
levs=(-2.0:0.25:2.0)/5.0
ClipToRange ? to_range!(z,levs) : nothing
fig1 = Figure(resolution = (900,400),markersize=0.1)
ax1 = Axis(fig1[1,1], title=ttl,
xticks=collect(year0:4:year1),yticks=collect(-90.0:20.0:90.0),ylabel="latitude")
hm1=contourf!(ax1,x[1:7:end],y,z[1:7:end,:],levels=levs,colormap=:curl)
Colorbar(fig1[1,2], hm1, height = Relative(0.65))
xlims!(ax1,year0,year1)
ylims!(ax1,lat0,lat1)
fig1
end
#
function lowres_scatter(kdf,fig=[],ax=[]; input=[])
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
(ii,jj)=(10*i.-5,10*j.-95)
if isa(fig,Array)
f,a=scatter(ii,jj,color=input,markersize=10)
c=(:blue,:red)
else
(f,a)=(ax,fig)
c=(:skyblue,:pink)
end
text!(a,ii.+1,jj,text=string.(i),fontsize=11,color=c[1])
text!(a,ii.+1,jj.-3,text=string.(j),fontsize=11,color=c[2])
f
end
#
function local_and_global(ts,ts_global,kdf0)
tim=collect(1:length(ts.anom))/365.25 .+ 1982
fig,ax,li=lines(tim,ts.anom .-median(ts.anom),label="local")
lines!(tim,ts_global.anom .-median(ts_global.anom),label="global")
ax.title="local and global SST anomalies"
xlims!(1982,2024)
ylims!(-2.5,2.5)
axislegend(ax,position = :rb)
fig
end
function map_base()
earth_jpg=joinpath(MeshArrays.mydatadep("basemap_jpg1"),
"Blue_Marble_Next_Generation_%2B_topography_%2B_bathymetry.jpg")
earth_img=load(earth_jpg)
earth_img=reverse(permutedims(earth_img),dims=2)
earth_img=circshift(earth_img,(1800,0))
#fig = Figure(resolution = (1200, 800)) #, backgroundcolor = :grey80)
fig=with_theme(Figure,theme_light())
ax = Axis(fig[1, 1])
# im=image!(ax, -0.05 .. 359.95, -89.95 .. 89.95, 0.5 .+0.5*Gray.(earth_img))
im=image!(ax, -0.05 .. 359.95, -89.95 .. 89.95, earth_img)
hidedecorations!(ax)
fig,ax,im
end
##
function MHW(ts,ttl="SST anomaly with extreme warm periods in red")
x=ts.sst-ts.clim
y=fill(:blue,size(x))
y[findall(x.>=ts.high)].=:red
tim=collect(1:length(ts.sst))/365.25 .+ 1982
fig,ax,li=lines(tim,x,color=y)
xlims!(1982,2024)
ax.title=ttl
fig
end
function plot_sst_map(to_map)
fig=plot(SSTdiag(options=(plot_type=:map_base,)))
ax=current_axis()
hm=heatmap!(ax,to_map.lon,to_map.lat,to_map.field,colormap=to_map.colormap,colorrange=to_map.colorrange)
to_map.showgrid ? lowres_scatter(ax) : nothing
scatter!(ax,to_map.lon1,to_map.lat1,marker=:circle,color=:blue,markersize=30)
scatter!(ax,to_map.lon1,to_map.lat1,marker=:x,color=:yellow,markersize=15)
Colorbar(fig[1, 2],hm)
ax.title=to_map.title
fig
end
end
## 3. SeaLevelAnomaly
function plot(x::SeaLevelAnomaly)
SLA_PLOTS.default_plot(x)
end
module SLA_PLOTS
using Makie
import Climatology: SeaLevelAnomaly, SLA_MAIN, Statistics
import Statistics: mean
## Satellite
"""
default_plot(b::SeaLevelAnomaly; dates=[], kwargs...)
```
using Climatology
sla=make_plot(SeaLevelAnomaly(),:sla_podaac)
plot(sla)
```
"""
default_plot(b::SeaLevelAnomaly) = begin
fig,_,_=prep_movie(b.data[1]; b.options...)
fig
end
function prep_movie(ds; topo=[], colormap=:PRGn, color=:black,
time=1, dates=[], resolution = (600, 400))
lon=ds["lon"][:]
lat=ds["lat"][:]
store=ds["SLA"][:,:,:]
nt=size(store,3)
kk=findall((!isnan).(store[:,:,end]))
n=Observable(time)
SLA=@lift(store[:,:,$n])
SLA2=@lift($(SLA).-mean($(SLA)[kk]))
fig=Figure(size=resolution,fontsize=11)
ax=Axis(fig[1,1])
hm=heatmap!(lon,lat,SLA2,colorrange=0.25.*(-1.0,1.0),colormap=colormap)
if !isempty(topo)
lon[1]>0.0 ? lon_off=360.0 : lon_off=0.0
contour!(lon_off.+topo.lon,topo.lat,topo.z,levels=-300:100:300,color=color,linewidth=1)
contour!(lon_off.+topo.lon,topo.lat,topo.z,levels=-2500:500:-500,color=color,linewidth=0.25)
contour!(lon_off.+topo.lon,topo.lat,topo.z,levels=-6000:1000:-3000,color=color,linewidth=0.1)
end
lon0=minimum(lon)+(maximum(lon)-minimum(lon))/20.0
lat0=maximum(lat)-(maximum(lat)-minimum(lat))/10.0
if isempty(dates)
println("no date")
else
dtxt=@lift(string(dates[$n]))
text!(lon0,lat0,text=dtxt,color=:blue2,fontsize=14,font = :bold)
end
Colorbar(fig[1,2],hm)
fig,n,nt
end
function make_movie(ds,tt; framerate = 90, dates=[])
fig,n,nt=prep_movie(ds,dates=dates)
record(fig,tempname()*".mp4", tt; framerate = framerate) do t
n[] = t
end
end
end
##
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 11721 | module ClimatologyNCDatasetsExt
import Climatology: ECCO, load, read_Dataset, ECCOdiags_to_nc
import Climatology: write_SST_climatology, SST_demo_path, to_monthly_file
import Climatology: write_SLA_PODAAC, write_SLA_CMEMS
import MeshArrays, Printf
import MeshArrays: GridSpec, Tiles, GridLoadVar, GRID_LLC90
import NCDatasets: Dataset, defDim, defVar
read_Dataset(args...;kwargs...)=Dataset.(args...;kwargs...)
function ECCOdiags_to_nc(;path_in=".",file_out=tempname()*".nc",
year1=1960,nt=771,title="this is a test file")
nsec=23
nlatMT=179
nlatZM=90
ds = Dataset(file_out,"c")
## dimension definitions
defDim(ds,"x",30); defDim(ds,"y",30); defDim(ds,"tile",117)
#defDim(ds,"lon",360); defDim(ds,"lat",180)
defDim(ds,"latMT",nlatMT); defDim(ds,"latZM",nlatZM)
defDim(ds,"section",23); defDim(ds,"depth",50)
defDim(ds,"time",nt); defDim(ds,"date",nt)
defDim(ds,"month",12)
defDim(ds,"time_clim",14)
defDim(ds,"level_clim",6)
## dimension variables
latMT=-89.0:89.0
v = defVar(ds,"latMT",Float32,("latMT",)); v[:]=latMT
dlat=2.0; latZM=vec(-90+dlat/2:dlat:90-dlat/2)
v = defVar(ds,"latZM",Float32,("latZM",)); v[:]=latZM
v = defVar(ds,"time",Float32,("time",)); v[:]=year1-0.5/12 .+ (1:nt)/12
v = defVar(ds,"month",Float32,("month",)); v[:]=1:12
time_clim=string.([:J,:F,:M,:A,:M,:J,:J,:A,:S,:O,:N,:D, :annual, :std])
v = defVar(ds,"time_clim",String,("time_clim",)); v[:]=time_clim
level_clim=[1 10 20 29 38 44]
v = defVar(ds,"level_clim",Int,("level_clim",)); v[:]=level_clim
ds.attrib["title"] = title
## simple array data
list_in=ECCO.diagnostics_set1(path_in)
for k in 1:size(list_in,1)
# println(list_in[k,"name"])
tmp=load(list_in[k,"file"])["single_stored_object"]
v = defVar(ds,list_in[k,"name"],Float32,list_in[k,"dims"])
v.attrib["units"] = list_in[k,"units"]
if isa(tmp[1],NamedTuple)
[v[s,:,:] = tmp[s].val for s in 1:nsec]
w = defVar(ds,"transport_name",String,("section",))
[w[s] = split(tmp[s].nam,".")[1] for s in 1:nsec]
else
if (ndims(tmp)>1)&&(size(tmp,1)==nt)
v[:] = permutedims(tmp,ndims(tmp):-1:1)
else
v[:] = tmp
end
end
end
## MeshArrays data
Ξ³=GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
Ο=Tiles(Ξ³,30,30)
list_dims=("x","y","tile")
lon_clim = Tiles(Ο,GridLoadVar("XC",Ξ³))
v = defVar(ds,"lon_clim",Float64,list_dims)
[v[:,:,tile]=lon_clim[tile] for tile in 1:117]
lat_clim = Tiles(Ο,GridLoadVar("YC",Ξ³))
v = defVar(ds,"lat_clim",Float64,list_dims)
[v[:,:,tile]=lat_clim[tile] for tile in 1:117]
list_in=ECCO.diagnostics_set2(path_in)
for k in 1:size(list_in,1)
# println(list_in[k,"name"])
tmp=load(list_in[k,"file"])
list_dims=("x","y","tile","time_clim")
v = defVar(ds,list_in[k,"name"],Float64,list_dims)
for time in 1:12
tmp1=Tiles(Ο,tmp["mon"][:,time]);
[v[:,:,tile,time]=tmp1[tile] for tile in 1:117]
end
tmp1=Tiles(Ο,tmp["mean"]); [v[:,:,tile,13]=tmp1[tile] for tile in 1:117]
tmp1=Tiles(Ο,tmp["std"]); [v[:,:,tile,14]=tmp1[tile] for tile in 1:117]
end
list_in=ECCO.diagnostics_set3(path_in)
for k in 1:size(list_in,1)
#println(list_in[k,"name"])
list_dims=("x","y","tile","level_clim","time_clim")
v = defVar(ds,list_in[k,"name"],Float64,list_dims)
for level in 1:length(level_clim)
suff=Printf.@sprintf("%02d.jld2",level_clim[level])
file=list_in[k,"file"][1:end-7]*suff
tmp=load(file)
#println("file = "*file)
for time in 1:12
tmp1=Tiles(Ο,tmp["mon"][:,time]);
[v[:,:,tile,level,time]=tmp1[tile] for tile in 1:117]
end
tmp1=Tiles(Ο,tmp["mean"])
[v[:,:,tile,level,13]=tmp1[tile] for tile in 1:117]
tmp1=Tiles(Ο,tmp["std"])
[v[:,:,tile,level,14]=tmp1[tile] for tile in 1:117]
end
end
close(ds)
file_out
end
"""
write_SST_climatology(output_path,year0,year1,lon,lat)
Consolidate monhtly fields into one file with
- 12 months
- both sst and anom
- coordinate variables
- some metadata
"""
function write_SST_climatology(output_path,year0,year1,lo,la)
arr=zeros(1440,720,12,2)
for m in 1:12
arr[:,:,m,1].=Dataset(joinpath(output_path,"sst_month$(m).nc"))["sst"][:,:]
arr[:,:,m,2].=Dataset(joinpath(output_path,"anom_month$(m).nc"))["anom"][:,:]
end
fi=joinpath(output_path,"OISST_mean_monthly_$(year0)_$(year1).nc")
#
ds = Dataset(fi,"c")
ds.attrib["title"] = "OISST climatology for $(year0) to $(year1)"
ds.attrib["author"] = "Gael Forget"
defDim(ds,"lon",1440); defDim(ds,"lat",720); defDim(ds,"month",12);
#
lon = defVar(ds,"lon",Float32,("lon",))
lat = defVar(ds,"lat",Float32,("lat",))
mon = defVar(ds,"month",Float32,("month",))
sst = defVar(ds,"sst",Float32,("lon","lat","month"))
anom = defVar(ds,"anom",Float32,("lon","lat","month"))
#
lon[:] = lo[:]
lat[:] = la[:]
mon[:] = 1:12
sst[:,:,:] = arr[:,:,:,1]
anom[:,:,:] = arr[:,:,:,2]
#
close(ds)
fi
end
function to_monthly_file(arr,m; varname="sst",output_path=SST_demo_path)
fil=joinpath(output_path,"$(varname)_month$(m).nc")
ds = read_Dataset(fil,"c")
defDim(ds,"i",size(arr,1))
defDim(ds,"j",size(arr,2))
v = defVar(ds,varname,Float32,("i","j"))
arr[ismissing.(arr)].=NaN
v[:,:] = arr
close(ds)
return fil
end
##
import Climatology: read_IAP, file_IAP, write_H_to_T
import NCDatasets, MeshArrays
using DataStructures: OrderedDict
using MeshArrays: gridmask, Integration
"""
file_IAP(path,y,m)
"""
file_IAP(path,y,m)=begin
mm=(m<10 ? "0$m" : "$m")
joinpath(path,"IAPv4_Temp_monthly_1_6000m_year_$(y)_month_$(mm).nc")
end
"""
read_IAP(F,var,tim,tmp=[])
```
using Climatology, NCDatasets, MeshArrays
p0="IAPv4_IAP_Temperature_gridded_1month_netcdf/monthly/"
fil=Climatology.file_IAP(p0,"2023","12")
depth=Dataset(fil)["depth_std"][:]
temp=Climatology.read_IAP(fil,"temp",1,[])
mask=1.0*(!ismissing).(temp)
G=Gris_simple.GridLoad_lonlatdep(depth,mask)
tmp=zeros(G.XC.grid)*ones(length(depth))
Climatology.read_IAP(fil,"temp",1,tmp)
```
"""
function read_IAP(F,var,tim,tmp=[])
fil=F
ds=Dataset(fil)
temp=permutedims(ds[var][:,:,:],(2,3,1))
close(ds)
temp[findall(ismissing.(temp))].=0
temp[findall(isnan.(temp))].=0
if !isempty(tmp)
tmp.=read(Float32.(temp),tmp)
tmp
else
temp
end
end
"""
write_H_to_T(file::String,M::gridmask,G::NamedTuple,H::Array)
Write `H / Integration.volumes(M,G)` to file.
```
using Climatology, NCDatasets, MeshArrays
G=MeshArrays.Grids_simple.GridLoad_lonlatdep(depth,mask)
M=Integration.define_sums(grid=G,regions=(10,5))
H=ones(length(M.names),length(M.depths),3)
V=Integration.volumes(M,G)
Climatology.write_H_to_T(tempname()*".nc",M,G,H,V)
```
"""
function write_H_to_T(file::String,M::gridmask,G::NamedTuple,H::Array,V::Array)
nb,nz,nt=size(H)
inv_vol=1.0./V
#inv_vol[V.==0].=0
pos=gridpos(M,(10,5))
arr2d=zeros(36,32)
arr3d=zeros(36,32,nz)
arr4d=zeros(36,32,nz,nt)
println(nz)
ds = Dataset(file,"c")
defDim(ds,"lon",36); defDim(ds,"lat",32);
defDim(ds,"dep",size(H,2)); defDim(ds,"tim",size(H,3));
ds.attrib["title"] = "this is a test file"
dlo=10; dla=5;
lons=collect(-180:dlo:180); lons=0.5*(lons[1:end-1]+lons[2:end])
lats=[-90 ; -75:dla:75 ; 90]; lats=0.5*(lats[1:end-1]+lats[2:end])
vlo=defVar(ds,"lon",lons,("lon",), attrib = OrderedDict("units" => "degree", "long_name" => "Longitude"))
vla=defVar(ds,"lat",lats,("lat",), attrib = OrderedDict("units" => "degree", "long_name" => "Latitude"))
v1 = defVar(ds,"volume",Float32,("lon","lat","dep"), attrib = OrderedDict("units" => "m^3",))
arr3d.=0
for ii in 1:nb
i,j=pos[ii]
[arr3d[i,j,k]=V[ii,k] for k in 1:nz]
end
v1[:,:,:] = arr3d
v = defVar(ds,"temperature",Float32,("lon","lat","dep","tim"), attrib = OrderedDict("units" => "degree Celsius",))
v.attrib["comments"] = "this is a string attribute with Unicode Ξ© β β β« f(x) dx"
arr4d.=0
for t in 1:nt
for ii in 1:nb
i,j=pos[ii]
[arr4d[i,j,k,t]=H[ii,k,t]*inv_vol[ii,k] for k in 1:nz]
end
end
v[:,:,:,:] = arr4d
close(ds)
file
end
"""
gridpos(M::gridmask,res::Tuple)
```
gridpos(M,(10,5))
```
"""
gridpos(M::gridmask,res::Tuple)=begin
n=length(M.names)
allpos=fill((0,0),n)
for i in 1:n
t1=split(M.names[i],"Nto")
t2=split(t1[2],"N_")
t3=split(t2[2],"Eto")
t4=split(t3[2],"E")
tt=[t1[1] t2[1] t3[1] t4[1]]
tt=parse.(Ref(Int),tt)
dlo=res[1]; dla=res[2]
lons=collect(-180:dlo:180)
lats=[-90 ; -75:dla:75 ; 90]
thispos=(findall(lons.==tt[3])[1],findall(lats.==tt[1])[1])
allpos[i]=thispos
end
allpos
end
##
function write_SLA_PODAAC(gr,data)
fil=joinpath(tempdir(),"podaac_sla_dev.nc")
Dataset(fil,"c",attrib = OrderedDict("title" => "Azores Regional Subset")) do ds
defVar(ds,"SLA",data,("lon","lat","time"), attrib = OrderedDict(
"units" => "m", "long_name" => "Sea Level Anomaly",
"comments" => "source is https://sealevel.nasa.gov/data/dataset/?identifier=SLCP_SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205_2205")),
defVar(ds,"lon",gr.lon[gr.ii],("lon",), attrib = OrderedDict(
"units" => "degree", "long_name" => "Longitude"))
defVar(ds,"lat",gr.lat[gr.jj],("lat",), attrib = OrderedDict(
"units" => "degree", "long_name" => "Latitude"))
end
println("File name :")
fil
end
function write_SLA_CMEMS(lon,lat,data)
fil=joinpath(tempdir(),"cmems_sla_dev.nc")
read_Dataset(fil,"c",attrib = OrderedDict("title" => "Azores Regional Subset")) do ds
defVar(ds,"SLA",data,("lon","lat","time"), attrib = OrderedDict(
"units" => "m", "long_name" => "Sea Level Anomaly",
"comments" => "source is https://my.cmems-du.eu")),
defVar(ds,"lon",lon,("lon",), attrib = OrderedDict(
"units" => "degree", "long_name" => "Longitude"))
defVar(ds,"lat",lat,("lat",), attrib = OrderedDict(
"units" => "degree", "long_name" => "Latitude"))
end
println("File name :")
fil
end
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 2492 | module Climatology
pkg_pth=dirname(pathof(Climatology))
## functions that extensions define more specifically
#e.g. read_Dataset : Placeholder to allow NCDatasets extension, which is activated by `using NCDatasets`.
#e.g. read_nctiles_alias : Placeholder to allow MITgcmTools extension, which is activated by `using MITgcmTools`.
"""
read_Dataset
alias for NCDatasets.Dataset that is defined by NCDatasets.jl extension
"""
function read_Dataset end
function ECCOdiags_to_nc end
function write_SST_climatology end
function to_monthly_file end
function plot_examples end
function read_nctiles_alias end
function read_mdsio_alias end
function file_IAP end
function read_IAP end
function write_H_to_T end
function write_SLA_PODAAC end
function write_SLA_CMEMS end
## packages that extensions import from Climatology
import Glob, RollingFunctions, JLD2, Statistics, MeshArrays, Printf, Dates, DataStructures, STAC
import Dataverse.downloads: Downloads
## main set of functions provided by this package
include("types.jl")
include("downloads.jl")
include("OISST.jl")
include("SSH.jl")
include("ECCO.jl")
import Climatology.downloads: get_ecco_files, get_ecco_variable_if_needed, get_ecco_velocity_if_needed
import Climatology.downloads: get_occa_variable_if_needed, get_occa_velocity_if_needed
import Climatology.downloads: ECCOdiags_add, CBIOMESclim_download, MITPROFclim_download
import DataDeps; import DataDeps: @datadep_str
"""
examples()
List of examples provided in Climatology.jl (full paths)
"""
function examples()
nb=joinpath(abspath("/"),split(pathof(Climatology),"/")[2:end-2]...,"examples")
# ex=glob("*/*.jl",nb)
ex_known=("CBIOMES_climatology_plot.jl","ECCO_standard_plots.jl",
"HadIOD_viz.jl","NSLCT_notebook.jl","OptimalTransport_demo.jl")
ex=[glob("*/"*e,nb)[1] for e in ex_known]
end
## export functionalities
export ECCOdiag, SSTdiag, SeaLevelAnomaly
export @datadep_str, ECCOdiags_add
export ECCOdiags_to_nc, write_SST_climatology
export get_ecco_variable_if_needed, get_ecco_velocity_if_needed
export get_occa_variable_if_needed, get_occa_velocity_if_needed
export ECCO, ECCO_helpers, ECCO_io, ECCO_diagnostics, ECCO_procs
export SST_FILES, SST_coarse_grain, SST_processing, SST_timeseries, SST_scenarios
export ScratchSpaces, read_Dataset, plot_examples
export SLA_PODAAC, SLA_CMEMS, SLA_MAIN
## initialize data deps
__init__() = begin
ScratchSpaces.__init__scratch()
downloads.__init__standard_diags()
end
end # module
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 34254 |
module ECCO
using Pkg, DataFrames
import Climatology: pkg_pth
"""
ECCO.standard_analysis_setup(pth0::String)
Create temporary run folder `pth` where data folder `pth0` will be linked.
Data folder `pth0` should be the path to ECCO data.
For example:
```
using Climatology, Pkg
pth=ECCO.standard_analysis_setup(ScratchSpaces.ECCO)
```
The `Project.toml` file found in `pth` provides an environment ready for `ECCO` analyses.
This environment can be activated and instantiated:
```
Pkg.activate(pth)
Pkg.instantiate()
```
"""
function standard_analysis_setup(pth0="",sol0="")
#1. setup run folder and create link to ECCO data folder
pth=joinpath(tempdir(),"ECCO_diags_dev");
!isdir(pth) ? mkdir(pth) : nothing
if in(sol0,["r2","r3","r4","r5"])
pth1=joinpath(pth,"ECCOv4"*sol0)
else
pth1=joinpath(pth,sol0)
end
!isdir(pth1) ? mkdir(pth1) : nothing
link0=joinpath(pth1,"diags")
!isfile(link0)&& !islink(link0)&& !isempty(pth0) ? symlink(pth0,link0) : nothing
#2. copy Project.toml to run folder
tmp0=pkg_pth
tmp1=joinpath(tmp0,"..","examples","ECCO","ECCO_standard_Project.toml")
tmp2=joinpath(pth,"Project.toml")
!isfile(tmp2) ? cp(tmp1,tmp2) : nothing
return pth1
end
add_diag!(list,file=tempname(),name="variable",units="unknown",dims=("time",)) = begin
append!(list,DataFrame("file"=>file,"name"=>name,"units"=>units,"dims"=>dims))
end
#time series
function diagnostics_set1(path_in=".")
list=DataFrame("file"=>String[],"name"=>String[],"units"=>String[],"dims"=>Tuple[])
add_diag!(list,joinpath(path_in,"THETA_glo3d","glo3d.jld2"),"temperature_global","degreeC",("time",))
add_diag!(list,joinpath(path_in,"THETA_glo2d","glo2d.jld2"),"temperature_global_level","degreeC",("depth","time"))
add_diag!(list,joinpath(path_in,"SALT_glo3d","glo3d.jld2"),"salinity_global","PSS",("time",))
add_diag!(list,joinpath(path_in,"SALT_glo2d","glo2d.jld2"),"salinity_global_level","PSS",("depth","time"))
add_diag!(list,joinpath(path_in,"trsp","trsp.jld2"),"volume_transport","m3/s",("section","depth","time"))
add_diag!(list,joinpath(path_in,"MHT","MHT.jld2"),"meridional_heat_transport","PW",("latMT","time"))
add_diag!(list,joinpath(path_in,"THETA_zonmean","zonmean.jld2"),"temperature_zonal_level","degreeC",("latZM","depth","time"))
add_diag!(list,joinpath(path_in,"SALT_zonmean","zonmean.jld2"),"salinity_zonal_level","PSS",("latZM","depth","time"))
add_diag!(list,joinpath(path_in,"MXLDEPTH_zonmean2d","zonmean2d.jld2"),"MLD_zonal","m",("latZM","time"))
add_diag!(list,joinpath(path_in,"SSH_zonmean2d","zonmean2d.jld2"),"SSH_zonal","m",("latZM","time"))
add_diag!(list,joinpath(path_in,"SIarea_zonmean2d","zonmean2d.jld2"),"SIarea_zonal","nondimensional",("latZM","time"))
add_diag!(list,joinpath(path_in,"overturn","overturn.jld2"),"overturn","m3/s",("latMT","depth","time"))
list
end
#2d climatologies on ECCO's LLC90 grid
function diagnostics_set2(path_in=".")
list=DataFrame("file"=>String[],"name"=>String[],"units"=>String[],"dims"=>Tuple[])
add_diag!(list,joinpath(path_in,"BSF_clim","BSF.jld2"),"BSF_clim","m3/s",("time",))
add_diag!(list,joinpath(path_in,"MXLDEPTH_clim","MXLDEPTH.jld2"),"MXLDEPTH_clim","m",("time",))
add_diag!(list,joinpath(path_in,"SIarea_clim","SIarea.jld2"),"SIarea_clim","nondimensional",("time",))
add_diag!(list,joinpath(path_in,"SSH_clim","SSH.jld2"),"SSH_clim","m",("time",))
list
end
#3d climatologies on ECCO's LLC90 grid
function diagnostics_set3(path_in=".")
list=DataFrame("file"=>String[],"name"=>String[],"units"=>String[],"dims"=>Tuple[])
add_diag!(list,joinpath(path_in,"THETA_clim","THETA_k01.jld2"),"THETA_clim","degreeC",("time",))
add_diag!(list,joinpath(path_in,"SALT_clim","SALT_k01.jld2"),"SALT_clim","PSS",("time",))
list
end
end
##
module ECCO_helpers
using MeshArrays, TOML, JLD2, Glob
import Climatology: read_Dataset
"""
parameters(P0,params)
Prepare parameter NamedTuple for use in `ECCO_diagnostics.driver`.
`P1=parameters(P0,p)`
is faster than e.g. `parameters(pth,"r2",p)` as grid, etc get copied from `P0` to `P1`.
"""
function parameters(P,params)
calc=params.calc
nam=params.nam
kk=params.lev
pth_out=dirname(P.pth_out)
if sum(calc.==("overturn","MHT","trsp"))==0
pth_out=joinpath(pth_out,nam*"_"*calc)
else
pth_out=joinpath(pth_out,calc)
end
return (pth_in=P.pth_in,pth_out=pth_out,list_steps=P.list_steps,nt=P.nt,
calc=calc,nam=nam,kk=kk,sol=P.sol,Ξ³=P.Ξ³,Ξ=P.Ξ,LC=P.LC)
end
"""
parameters(pth0::String,sol0::String,params)
Prepare parameter NamedTuple for use in `ECCO_diagnostics.driver`.
For example, to compute zonal mean temperatures at level 5:
```
p=(calc = "zonmean", nam = "THETA", lev = 5)
pth=ECCO.standard_analysis_setup(ScratchSpaces.ECCO)
P0=ECCO_helpers.parameters(pth,"r2",p)
```
or, from a predefined list:
```
list0=ECCO_helpers.standard_list_toml("")
pth=ECCO.standard_analysis_setup(ScratchSpaces.ECCO)
P1=ECCO_helpers.parameters(pth,"r2",list0[1])
```
"""
function parameters(pth0::String,sol0::String,params)
calc=params.calc
nam=params.nam
kk=params.lev
if in(sol0,["r2","r3","r4","r5"])
sol="ECCOv4"*sol0*"_analysis"
pth_in=joinpath(pth0,"ECCOv4"*sol0,"diags")
else
sol=sol0*"_analysis"
pth_in=joinpath(pth0,sol0,"diags")
end
!ispath(pth_in) ? pth_in=joinpath(pth0,"diags") : nothing
list_steps=list_time_steps(pth_in)
if sol0=="r1"||sol0=="r2"
fil=joinpath(pth_in,"THETA","THETA.0001.nc")
if isfile(fil)
nt=read_Dataset(fil) do ds
data = length(ds["tim"][:])
end
else
nt=12
end
elseif sol0=="r3"
nt=288
elseif sol0=="r4"
nt=312
elseif sol0=="r5"
nt=336
else
nt=length(list_steps)
end
pth_out=joinpath(pth0,sol)
if sum(calc.==("overturn","MHT","trsp"))==0
pth_out=joinpath(pth_out,nam*"_"*calc)
else
pth_out=joinpath(pth_out,calc)
end
Ξ³,Ξ,LC=GridLoad_Plus()
P=(pth_in=pth_in,pth_out=pth_out,list_steps=list_steps,nt=nt,
calc=calc,nam=nam,kk=kk,sol=sol,Ξ³=Ξ³,Ξ=Ξ,LC=LC)
end
#STATE/state_3d_set1.0000241020.meta
# 'THETA ' 'SALT ' 'DRHODR '
#TRSP/trsp_3d_set1.0000241020.meta
# 'UVELMASS' 'VVELMASS' 'WVELMASS' 'GM_PsiX ' 'GM_PsiY '
#TRSP/trsp_3d_set3.0000241020.meta
# 'DFxE_TH ' 'DFyE_TH ' 'ADVx_TH ' 'ADVy_TH ' 'DFxE_SLT' 'DFyE_SLT' 'ADVx_SLT' 'ADVy_SLT'
function list_time_steps(pth_in)
println(pth_in)
if !isempty(glob("STATE/state_3d_set1*.data",pth_in))
list=basename.(glob("STATE/state_3d_set1*.data",pth_in))
elseif !isempty(glob("state_3d_set1*.data",pth_in))
list=basename.(glob("state_3d_set1*.data",pth_in))
else
list=[]
end
return list
end
nansum(x) = sum(filter(!isnan,x))
nansum(x,y) = mapslices(nansum,x,dims=y)
function GridLoad_Plus()
G=GridLoad(ID=:LLC90,option=:light)
Ξ³=G.XC.grid
nr=length(G.RC)
hFacC=GridLoadVar("hFacC",Ξ³)
hFacW=GridLoadVar("hFacW",Ξ³)
hFacS=GridLoadVar("hFacS",Ξ³)
mskC=hFacC./hFacC
tmp=[nansum(mskC[i,j].*G.RAC[i]) for j in 1:nr, i in eachindex(G.RAC)]
tot_RAC=nansum(tmp,2)
tmp=[nansum(hFacC[i,j].*G.RAC[i].*G.DRF[j]) for j in 1:nr, i in eachindex(G.RAC)]
tot_VOL=nansum(tmp,2)
G=merge(G,(hFacC=hFacC,hFacW=hFacW,hFacS=hFacS,mskC=mskC,tot_RAC=tot_RAC,tot_VOL=tot_VOL))
LC=LatitudeCircles(-89.0:89.0,G)
return Ξ³,G,LC
end
import Base:push!
function push!(allcalc::Vector{String},allnam::Vector{String},allkk::Vector{Int};
calc="unknown",nam="unknown",kk=1)
push!(allcalc,calc)
push!(allnam,nam)
push!(allkk,kk)
end
function standard_list_toml(fil)
allcalc=String[]
allnam=String[]
allkk=Int[]
push!(allcalc,allnam,allkk;calc="trsp")
push!(allcalc,allnam,allkk;calc="MHT")
push!(allcalc,allnam,allkk;calc="zonmean2d",nam="SIarea")
push!(allcalc,allnam,allkk;calc="zonmean2d",nam="MXLDEPTH")
push!(allcalc,allnam,allkk;calc="zonmean2d",nam="SSH")
push!(allcalc,allnam,allkk;calc="zonmean",nam="THETA")
push!(allcalc,allnam,allkk;calc="glo2d",nam="THETA")
push!(allcalc,allnam,allkk;calc="glo3d",nam="THETA")
push!(allcalc,allnam,allkk;calc="zonmean",nam="SALT")
push!(allcalc,allnam,allkk;calc="glo2d",nam="SALT")
push!(allcalc,allnam,allkk;calc="glo3d",nam="SALT")
push!(allcalc,allnam,allkk;calc="overturn")
[push!(allcalc,allnam,allkk;calc="clim",nam="THETA",kk=kk) for kk in [1 10 20 29 38 44]]
[push!(allcalc,allnam,allkk;calc="clim",nam="SALT",kk=kk) for kk in [1 10 20 29 38 44]]
push!(allcalc,allnam,allkk;calc="clim",nam="SSH")
push!(allcalc,allnam,allkk;calc="clim",nam="MXLDEPTH")
push!(allcalc,allnam,allkk;calc="clim",nam="SIarea")
push!(allcalc,allnam,allkk;calc="clim",nam="BSF")
tmp1=Dict("calc"=>allcalc,"nam"=>allnam,"kk"=>allkk)
if !isempty(fil)
open(fil, "w") do io
TOML.print(io, tmp1)
end
end
out=[(calc=allcalc[i],nam=allnam[i],lev=allkk[i]) for i in 1:length(allcalc)]
return out
end
##
function transport_lines()
lonPairs=[]
latPairs=[]
namPairs=[]
push!(lonPairs,[-173 -164]); push!(latPairs,[65.5 65.5]); push!(namPairs,"Bering Strait");
push!(lonPairs,[-5 -5]); push!(latPairs,[34 40]); push!(namPairs,"Gibraltar");
push!(lonPairs,[-81 -77]); push!(latPairs,[28 26]); push!(namPairs,"Florida Strait");
push!(lonPairs,[-81 -79]); push!(latPairs,[28 22]); push!(namPairs,"Florida Strait W1");
push!(lonPairs,[-76 -76]); push!(latPairs,[21 8]); push!(namPairs,"Florida Strait S1");
push!(lonPairs,[-77 -77]); push!(latPairs,[26 24]); push!(namPairs,"Florida Strait E1");
push!(lonPairs,[-77 -77]); push!(latPairs,[24 22]); push!(namPairs,"Florida Strait E2");
push!(lonPairs,[-65 -50]); push!(latPairs,[66 66]); push!(namPairs,"Davis Strait");
push!(lonPairs,[-35 -20]); push!(latPairs,[67 65]); push!(namPairs,"Denmark Strait");
push!(lonPairs,[-16 -7]); push!(latPairs,[65 62.5]); push!(namPairs,"Iceland Faroe");
push!(lonPairs,[-6.5 -4]); push!(latPairs,[62.5 57]); push!(namPairs,"Faroe Scotland");
push!(lonPairs,[-4 8]); push!(latPairs,[57 62]); push!(namPairs,"Scotland Norway");
push!(lonPairs,[-68 -63]); push!(latPairs,[-54 -66]); push!(namPairs,"Drake Passage");
push!(lonPairs,[103 103]); push!(latPairs,[4 -1]); push!(namPairs,"Indonesia W1");
push!(lonPairs,[104 109]); push!(latPairs,[-3 -8]); push!(namPairs,"Indonesia W2");
push!(lonPairs,[113 118]); push!(latPairs,[-8.5 -8.5]); push!(namPairs,"Indonesia W3");
push!(lonPairs,[118 127 ]); push!(latPairs,[-8.5 -15]); push!(namPairs,"Indonesia W4");
push!(lonPairs,[127 127]); push!(latPairs,[-25 -68]); push!(namPairs,"Australia Antarctica");
push!(lonPairs,[38 46]); push!(latPairs,[-10 -22]); push!(namPairs,"Madagascar Channel");
push!(lonPairs,[46 46]); push!(latPairs,[-22 -69]); push!(namPairs,"Madagascar Antarctica");
push!(lonPairs,[20 20]); push!(latPairs,[-30 -69.5]); push!(namPairs,"South Africa Antarctica");
push!(lonPairs,[-76 -72]); push!(latPairs,[21 18.5]); push!(namPairs,"Florida Strait E3");
push!(lonPairs,[-72 -72]); push!(latPairs,[18.5 10]); push!(namPairs,"Florida Strait E4");
lonPairs,latPairs,namPairs
end
function transport_lines(Ξ,pth_trsp)
mkdir(pth_trsp)
lonPairs,latPairs,namPairs=transport_lines()
for ii in 1:length(lonPairs)
lons=Float64.(lonPairs[ii])
lats=Float64.(latPairs[ii])
name=namPairs[ii]
Trsct=Transect(name,lons,lats,Ξ,format=:NamedTuple)
jldsave(joinpath(pth_trsp,"$(Trsct.name).jld2"),
tabC=Trsct.tabC,tabW=Trsct.tabW,tabS=Trsct.tabS);
end
return true
end
function reload_transport_lines(pth_trsp)
list_trsp=readdir(pth_trsp)
ntr=length(list_trsp)
TR=[load(joinpath(pth_trsp,list_trsp[itr])) for itr in 1:ntr]
return list_trsp,MeshArrays.Dict_to_NamedTuple.(TR),ntr
end
end #module ECCO_helpers
## generic read function
module ECCO_io
using MeshArrays
import Climatology: read_nctiles_alias, read_Dataset, read_mdsio_alias
"""
read_monthly(P,nam,t)
Read record `t` for variable `nam` from file locations specified via parameters `P`.
The method used to read `nam` is selected based on `nam`'s value. Methods include:
- `read_monthly_default`
- `read_monthly_SSH`
- `read_monthly_MHT`
- `read_monthly_BSF`
"""
function read_monthly(P,nam,t)
if nam=="SSH"
read_monthly_SSH(P,t)
elseif nam=="MHT"
read_monthly_MHT(P,t)
elseif nam=="BSF"
read_monthly_BSF(P,t)
else
read_monthly_default(P,nam,t)
end
end
function read_monthly_SSH(P,t)
(; Ξ) = P
ETAN=read_monthly_default(P,"ETAN",t)
sIceLoad=read_monthly_default(P,"sIceLoad",t)
(ETAN+sIceLoad/1029.0)*Ξ.mskC[:,1]
end
function read_monthly_MHT(P,t)
(; Ξ) = P
U=read_monthly_default(P,"ADVx_TH",t)
V=read_monthly_default(P,"ADVy_TH",t)
U=U+read_monthly_default(P,"DFxE_TH",t)
V=V+read_monthly_default(P,"DFyE_TH",t)
[U[i][findall(isnan.(U[i]))].=0.0 for i in eachindex(U)]
[V[i][findall(isnan.(V[i]))].=0.0 for i in eachindex(V)]
Tx=0.0*U[:,1]
Ty=0.0*V[:,1]
[Tx=Tx+U[:,z] for z=1:nr]
[Ty=Ty+V[:,z] for z=1:nr]
return Tx,Ty
end
function read_monthly_BSF(P,t)
(; Ξ) = P
U=read_monthly_default(P,"UVELMASS",t)
V=read_monthly_default(P,"VVELMASS",t)
MeshArrays.UVtoTransport!(U,V,Ξ)
nz=size(Ξ.hFacC,2)
ΞΌ=Ξ.mskC[:,1]
Tx=0.0*U[:,1]
Ty=0.0*V[:,1]
for z=1:nz
Tx=Tx+U[:,z]
Ty=Ty+V[:,z]
end
#convergence & land mask
TrspCon=ΞΌ.*convergence(Tx,Ty)
#scalar potential
TrspPot=ScalarPotential(TrspCon)
#Divergent transport component
(TxD,TyD)=gradient(TrspPot,Ξ)
TxD=TxD.*Ξ.DXC
TyD=TyD.*Ξ.DYC
#Rotational transport component
TxR = Tx-TxD
TyR = Ty-TyD
#vector Potential
TrspPsi=VectorPotential(TxR,TyR,Ξ)
GC.gc()
return TrspPsi
end
function read_monthly_default(P,nam,t)
(; pth_in, sol, list_steps, Ξ³) = P
var_list3d=("THETA","SALT","UVELMASS","VVELMASS",
"ADVx_TH","ADVy_TH","DFxE_TH","DFyE_TH")
if ispath(joinpath(pth_in,"STATE"))
mdsio_list3d=("STATE/state_3d_set1","STATE/state_3d_set1",
"TRSP/trsp_3d_set1","TRSP/trsp_3d_set1","TRSP/trsp_3d_set2",
"TRSP/trsp_3d_set2","TRSP/trsp_3d_set2","TRSP/trsp_3d_set2")
else
mdsio_list3d=("state_3d_set1","state_3d_set1",
"trsp_3d_set1","trsp_3d_set1","trsp_3d_set2",
"trsp_3d_set2","trsp_3d_set2","trsp_3d_set2")
end
var_list2d=("MXLDEPTH","SIarea","sIceLoad","ETAN")
if ispath(joinpath(pth_in,"STATE"))
mdsio_list2d=("STATE/state_2d_set1","STATE/state_2d_set1",
"STATE/state_2d_set1","STATE/state_2d_set1")
else
mdsio_list2d=("state_2d_set1","state_2d_set1","state_2d_set1","state_2d_set1")
end
if (sol=="ECCOv4r1_analysis")||(sol=="ECCOv4r2_analysis")||(sol=="ECCOv4r3_analysis")
nct_path=joinpath(pth_in,nam)
try
if sum(var_list3d.==nam)==1
tmp=read_nctiles_alias(nct_path,nam,Ξ³,I=(:,:,:,t))
else
tmp=read_nctiles_alias(nct_path,nam,Ξ³,I=(:,:,t))
end
catch
error("failed: call to `read_nctiles`
This method is provided by `MITgcm.jl`
and now activated by `using MITgcm` ")
end
elseif (sol=="ECCOv4r4_analysis")
y0=Int(floor((t-1)/12))+1992
m0=mod1(t,12)
nct_path=joinpath(pth_in,nam,string(y0))
m0<10 ? fil=nam*"_$(y0)_0$(m0).nc" : fil=nam*"_$(y0)_$(m0).nc"
tmp0=read_Dataset(joinpath(nct_path,fil))[nam]
til0=Tiles(Ξ³,90,90)
if sum(var_list3d.==nam)==1
tmp=MeshArray(Ξ³,Ξ³.ioPrec,nr)
for i in 1:13, k in 1:50
ff=til0[i].face
ii=collect(til0[i].i)
jj=collect(til0[i].j)
tmp[ff,k][ii,jj]=tmp0[:,:,i,k,1]
end
tmp
else
tmp=MeshArray(Ξ³,Ξ³.ioPrec)
for i in 1:13
ff=til0[i].face
ii=collect(til0[i].i)
jj=collect(til0[i].j)
tmp[ff][ii,jj]=tmp0[:,:,i,1]
end
tmp
end
else
if !isempty(findall(var_list3d.==nam))
fil=mdsio_list3d[ findall(var_list3d.==nam)[1] ]
fil1=joinpath(pth_in,fil*list_steps[t][14:end])
tmp=read_mdsio_alias(fil1,Symbol(nam))
tmp=P.Ξ.mskC*read(tmp,Ξ³)
else
fil=mdsio_list2d[ findall(var_list2d.==nam)[1] ]
tmp=read_mdsio_alias(joinpath(pth_in,fil*list_steps[t][14:end]),Symbol(nam))
tmp=P.Ξ.mskC[:,1]*read(tmp,P.Ξ.XC)
end
end
end
end #module ECCO_io
##
module ECCO_diagnostics
using SharedArrays, Distributed, Printf, JLD2, MeshArrays
import Climatology: ECCO_io, ECCO_helpers
"""
List of variables derived in this module:
- climatologies
- global means
- zonal means
- geographic maps
- transect transports
- MOC, MHT
Sample workflow:
```
## Setup Computation Parameters
@everywhere sol0="r2"
@everywhere nam="THETA"
@everywhere calc="clim"
@everywhere kk=1
## Preliminary Steps
@everywhere include("ECCO_pkg_grid_etc.jl")
@everywhere pth_in,pth_out,pth_tmp,sol,nt,list_steps=ECCO_path_etc(sol0,calc,nam)
!isdir(pth_out) ? mkdir(pth_out) : nothing
!isdir(pth_tmp) ? mkdir(pth_tmp) : nothing
## Main Computation
include("ECCO_standard_analysis.jl")
```
"""
## climatological mean
function comp_clim(P,tmp_m,tmp_s1,tmp_s2,m)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
nm=length(m:12:nt)
tmp_m[:,:,m].=0.0
tmp_s1[:,:,m].=0.0
tmp_s2[:,:,m].=0.0
for t in m:12:nt
tmp=ECCO_io.read_monthly(P,nam,t)
ndims(tmp)>1 ? tmp=tmp[:,kk] : nothing
tmp_m[:,:,m]=tmp_m[:,:,m]+1.0/nm*Ξ³.write(tmp)
tmp_s1[:,:,m]=tmp_s1[:,:,m]+Ξ³.write(tmp)
tmp_s2[:,:,m]=tmp_s2[:,:,m]+Ξ³.write(tmp).^2
end
end
function main_clim(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
tmp_s1 = SharedArray{Float64}(Ξ³.ioSize...,12)
tmp_s2 = SharedArray{Float64}(Ξ³.ioSize...,12)
tmp_m = SharedArray{Float64}(Ξ³.ioSize...,12)
tmp=ECCO_io.read_monthly(P,nam,1)
ndims(tmp)>1 ? nz=size(tmp,2) : nz=1
nz==1 ? kk=1 : nothing
nz>1 ? suff=Printf.@sprintf("_k%02d",kk) : suff=""
@sync @distributed for m in 1:12
comp_clim(P,tmp_m,tmp_s1,tmp_s2,m)
GC.gc()
end
tmp0=read(tmp_m[:],Ξ³)
tmp=1.0/nt*sum(tmp_s1,dims=3)
tmp1=read(tmp[:],Ξ.XC)
tmp=1/nt*sum(tmp_s2,dims=3)-tmp.^2
tmp[findall(tmp.<0.0)].=0.0
tmp=sqrt.(nt/(nt-1)*tmp)
tmp2=read(tmp[:],Ξ.XC)
fil_out=joinpath(pth_out,nam*suff*".jld2")
save(fil_out,"mean",tmp1,"std",tmp2,"mon",tmp0)
return true
end
##
nansum(x) = sum(filter(!isnan,x))
nansum(x,y) = mapslices(nansum,x,dims=y)
## global mean
function comp_glo(P,glo,t)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ) = P
nr=length(Ξ.DRF)
tmp=ECCO_io.read_monthly(P,nam,t)
if calc=="glo2d"
tmp=[nansum(tmp[i,j].*Ξ.RAC[i]) for j in 1:nr, i in eachindex(Ξ.RAC)]
else
tmp=[nansum(tmp[i,j].*Ξ.hFacC[i,j].*Ξ.RAC[i]*Ξ.DRF[j]) for j in 1:nr, i in eachindex(Ξ.RAC)]
end
glo[:,t]=nansum(tmp,2)
end
function main_glo(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ) = P
nr=length(Ξ.DRF)
glo = SharedArray{Float64}(nr,nt)
@sync @distributed for t in 1:nt
comp_glo(P,glo,t)
GC.gc()
end
if calc=="glo2d"
tmp=[glo[r,t]/Ξ.tot_RAC[r] for t in 1:nt, r in 1:nr]
else
tmp=[nansum(glo[:,t])/nansum(Ξ.tot_VOL) for t in 1:nt]
end
save_object(joinpath(pth_out,calc*".jld2"),collect(tmp))
end
##
function comp_msk0(P,msk0,zm0,l)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
nr=length(Ξ.DRF)
lats=load(joinpath(pth_out,calc*"_lats.jld2"),"single_stored_object")
dlat=lats[2]-lats[1]
la0=lats[l]-dlat/2
la1=lats[l]+dlat/2
if la1<0.0
msk=1.0*(Ξ.YC.>=la0)*(Ξ.YC.<la1)
elseif la0>0.0
msk=1.0*(Ξ.YC.>la0)*(Ξ.YC.<=la1)
else
msk=1.0*(Ξ.YC.>=la0)*(Ξ.YC.<=la1)
end
msk[findall(msk.==0.0)].=NaN;
msk0[:,:,l]=write(msk*Ξ.RAC)
tmp2=[nansum(Ξ.mskC[i,j].*msk[i].*Ξ.RAC[i]) for j in 1:nr, i in eachindex(Ξ.RAC)]
zm0[l,:]=1.0 ./nansum(tmp2,2)
end
function zmsum!(tmp1,tmp,msk,idx)
tmp1.=0.0
for j in 1:length(tmp1)
for i in 1:length(idx)
tmp1[j]+=tmp[idx[i],j]*msk[idx[i]]
end
end
end
function comp_zonmean(P,zm,t,msk0,zm0)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
nl=size(msk0,3)
idx0=[findall(msk0[:,:,l].>0) for l in 1:nl]
comp_zonmean(P,zm,t,msk0,zm0,idx0)
end
function comp_zonmean(P,zm,t,msk0,zm0,idx0)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
nr=length(Ξ.DRF)
lats=load(joinpath(pth_out,calc*"_lats.jld2"),"single_stored_object")
nl=length(lats)
tmp=write(ECCO_io.read_monthly(P,nam,t))
tmp[findall(isnan.(tmp))].=0.0
tmp1=zeros(nr)
for l in 1:nl
zmsum!(tmp1,tmp,msk0[:,:,l],idx0[l])
zm[l,:,t]=tmp1.*zm0[l,:]
end
end
function comp_zonmean2d(P,zm,t,msk0,zm0)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
lats=load(joinpath(pth_out,calc*"_lats.jld2"),"single_stored_object")
nl=length(lats)
tmp=ECCO_io.read_monthly(P,nam,t)
for l in 1:nl
mskrac=read(msk0[:,:,l],Ξ³)
tmp1=[nansum(tmp[i].*mskrac[i]) for i in eachindex(Ξ.RAC)]
zm[l,t]=nansum(tmp1)*zm0[l,1]
end
end
function main_zonmean(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ³, Ξ) = P
nr=length(Ξ.DRF)
dlat=2.0
lats=(-90+dlat/2:dlat:90-dlat/2)
save_object(joinpath(pth_out,calc*"_lats.jld2"),collect(lats))
nl=length(lats)
msk0 = SharedArray{Float64}(Ξ³.ioSize...,nl)
zm0 = SharedArray{Float64}(nl,nr)
@sync @distributed for l in 1:nl
comp_msk0(P,msk0,zm0,l)
end
save_object(joinpath(pth_out,calc*"_zm0.jld2"),collect(zm0))
save_object(joinpath(pth_out,calc*"_msk0.jld2"),collect(msk0))
#to speed up main loop, reuse:
#- precomputed msk*RAC once and for all
#- precomputed 1.0./nansum(tmp2,2)
msk0=load(joinpath(pth_out,calc*"_msk0.jld2"),"single_stored_object")
zm0=load(joinpath(pth_out,calc*"_zm0.jld2"),"single_stored_object")
idx0=[findall(msk0[:,:,l].>0) for l in 1:nl]
if (calc=="zonmean")
zm = SharedArray{Float64}(nl,nr,nt)
@sync @distributed for t in 1:nt
comp_zonmean(P,zm,t,msk0,zm0,idx0)
GC.gc()
end
else
zm = SharedArray{Float64}(nl,nt)
@sync @distributed for t in 1:nt
comp_zonmean2d(P,zm,t,msk0,zm0)
GC.gc()
end
end
save_object(joinpath(pth_out,calc*".jld2"),collect(zm))
return true
end
##
function comp_overturn(P,ov,t)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, LC, Ξ) = P
nr=length(Ξ.DRF)
nl=length(LC)
U=ECCO_io.read_monthly(P,"UVELMASS",t)
V=ECCO_io.read_monthly(P,"VVELMASS",t)
MeshArrays.UVtoTransport!(U,V,Ξ)
UV=Dict("U"=>0*U[:,1],"V"=>0*V[:,1],"dimensions"=>["x","y"])
#integrate across latitude circles
for z=1:nr
UV["U"].=U[:,z]
UV["V"].=V[:,z]
[ov[l,z,t]=ThroughFlow(UV,LC[l],Ξ) for l=1:nl]
end
#integrate from bottom
ov[:,:,t]=reverse(cumsum(reverse(ov[:,:,t],dims=2),dims=2),dims=2)
#
true
end
function main_overturn(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, LC, Ξ) = P
nr=length(Ξ.DRF)
nl=length(LC)
ov = SharedArray{Float64}(nl,nr,nt)
@sync @distributed for t in 1:nt
comp_overturn(P,ov,t)
GC.gc()
end
save_object(joinpath(pth_out,calc*".jld2"),collect(ov))
"Done with overturning"
end
##
function comp_MHT(P,MHT,t)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, LC, Ξ) = P
nr=length(Ξ.DRF)
nl=length(LC)
U=ECCO_io.read_monthly(P,"ADVx_TH",t)+ECCO_io.read_monthly(P,"DFxE_TH",t)
V=ECCO_io.read_monthly(P,"ADVy_TH",t)+ECCO_io.read_monthly(P,"DFyE_TH",t)
[U[i][findall(isnan.(U[i]))].=0.0 for i in eachindex(U)]
[V[i][findall(isnan.(V[i]))].=0.0 for i in eachindex(V)]
Tx=0.0*U[:,1]
Ty=0.0*V[:,1]
[Tx=Tx+U[:,z] for z=1:nr]
[Ty=Ty+V[:,z] for z=1:nr]
UV=Dict("U"=>Tx,"V"=>Ty,"dimensions"=>["x","y"])
[MHT[l,t]=1e-15*4e6*ThroughFlow(UV,LC[l],Ξ) for l=1:nl]
end
function main_MHT(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, LC) = P
nl=length(LC)
MHT = SharedArray{Float64}(nl,nt)
@sync @distributed for t in 1:nt
comp_MHT(P,MHT,t)
GC.gc()
end
save_object(joinpath(pth_out,calc*".jld2"),collect(MHT))
"Done with MHT"
end
##
function comp_trsp(P,trsp,t)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol, Ξ) = P
U=ECCO_io.read_monthly(P,"UVELMASS",t)
V=ECCO_io.read_monthly(P,"VVELMASS",t)
MeshArrays.UVtoTransport!(U,V,Ξ)
UV=Dict("U"=>0*U[:,1],"V"=>0*V[:,1],"dimensions"=>["x","y"])
pth_trsp=joinpath(pth_out,"..","ECCO_transport_lines")
list_trsp,msk_trsp,ntr=ECCO_helpers.reload_transport_lines(pth_trsp)
#integrate across transport lines
for z=1:length(Ξ.DRF)
UV["U"].=U[:,z]
UV["V"].=V[:,z]
[trsp[itr,z,t]=ThroughFlow(UV,msk_trsp[itr],Ξ) for itr=1:ntr]
end
end
function main_trsp(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol) = P
list_trsp=readdir(joinpath(pth_out,"..","ECCO_transport_lines"))
ntr=length(list_trsp)
nr=length(P.Ξ.DRF)
trsp = SharedArray{Float64}(ntr,nr,nt)
@sync @distributed for t in 1:nt
comp_trsp(P,trsp,t)
GC.gc()
end
trsp=[(nam=list_trsp[itr],val=trsp[itr,:,:]) for itr=1:ntr]
save_object(joinpath(pth_out,calc*".jld2"),collect(trsp))
"Done with transports"
end
"""
driver(P)
Call main computation loop as specified by parameters `P`.
The main computation loop choice depends on the `P` parameter values. Methods include:
- `main_clim`
- `main_glo`
- `main_zonmean`
- `main_overturn`
- `main_MHT`
- `main_trsp`
"""
function driver(P)
(; pth_in, pth_out, list_steps, nt, calc, nam, kk, sol) = P
if calc=="clim"
main_clim(P)
elseif (calc=="glo2d")||(calc=="glo3d")
main_glo(P)
elseif (calc=="zonmean")||(calc=="zonmean2d")
main_zonmean(P)
elseif (calc=="overturn")
main_overturn(P)
elseif (calc=="MHT")
main_MHT(P)
elseif (calc=="trsp")
main_trsp(P)
else
println("unknown calc")
end
end
end #module ECCO_diagnostics
##
module ECCO_procs
using JLD2, MeshArrays, DataDeps, Statistics, Climatology, TOML
import Climatology: ECCOdiag
function longname(n)
if occursin("_k",n)
ln=split(n,"_k")[1]*" at level "*split(n,"_k")[2]
else
ln=n
end
occursin("BSF",ln) ? ln=replace(ln, "BSF" => "Horizontal Streamfunction (m3/s)") : nothing
occursin("MXLDEPTH",ln) ? ln=replace(ln, "MXLDEPTH" => "Mixed Layer Depth (m)") : nothing
occursin("SIarea",ln) ? ln=replace(ln, "SIarea" => "Ice Concentration (0 to 1)") : nothing
occursin("SSH",ln) ? ln=replace(ln, "SSH" => "Free Surface Height (m)") : nothing
occursin("THETA",ln) ? ln=replace(ln, "THETA" => "Potential Temperature (degree C)") : nothing
occursin("SALT",ln) ? ln=replace(ln, "SALT" => "Salinity (psu)") : nothing
return ln
end
function climatology_files(pth_out)
list_clim=readdir(pth_out)
kk=findall(occursin.(Ref("clim"),list_clim))
list_clim=list_clim[kk]
clim_files=[]
for ii in 1:length(list_clim)
tmp=joinpath.(Ref(list_clim[ii]),readdir(joinpath(pth_out,list_clim[ii])))
[push!(clim_files,i) for i in tmp]
end
clim_files
end
##
function years_min_max(sol)
year0=1992
year1=2011
if occursin("ECCOv4r3",sol)
year1=2015
elseif occursin("ECCOv4r4",sol)
year1=2017
elseif occursin("ECCOv4r5",sol)
year1=2019
elseif occursin("OCCA2HR1",sol)
year0=1980
year1=2024
elseif occursin("OCCA2HR2",sol)
year0=1960
year1=2024
end
return year0,year1
end
##
function parameters()
pth=MeshArrays.GRID_LLC90
Ξ³=GridSpec("LatLonCap",pth)
Ξ=GridLoad(Ξ³;option="full")
#LC=LatitudeCircles(-89.0:89.0,Ξ)
ΞΌ = land_mask(Ξ)
Ξ» = interpolation_setup()
path0=ECCOdiags_add("OCCA2HR1")
tmp=load(ECCOdiag(path=path0,name="trsp"))
ntr=length(tmp)
list_trsp=[vec(tmp)[i].nam for i in 1:ntr]
list_trsp=[i[1:end-5] for i in list_trsp]
pth_colors=joinpath(dirname(pathof(Climatology)),"..","examples","ECCO")
clim_colors1=TOML.parsefile(joinpath(pth_colors,"clim_colors1.toml"))
clim_colors2=TOML.parsefile(joinpath(pth_colors,"clim_colors2.toml"))
clim_files=climatology_files(path0)
clim_name=[split(basename(f),'.')[1] for f in clim_files]
clim_longname=longname.(clim_name)
#"Done with listing solutions, file names, color codes"
(Ξ³=Ξ³,Ξ=Ξ,Ξ»=Ξ»,ΞΌ=ΞΌ,list_trsp=list_trsp,
clim_colors1=clim_colors1,clim_colors2=clim_colors2,
clim_files=clim_files,clim_name=clim_name,clim_longname=clim_longname)
end
##
function glo(pth_out,nam,k,year0,year1)
nam_full=nam*(k>0 ? "_glo2d" : "_glo3d")
tmp=load(ECCOdiag(path=pth_out,name=nam_full))
occursin("THETA",nam) ? ln=longname("THETA") : ln=longname("SALT")
if k>0
nt=Int(length(tmp[:])./50.0)
tmp=reshape(tmp,(nt,50))
tmp=tmp[:,k]
occursin("THETA",fil) ? rng=[18.0,19.0] : rng=[34.65,34.80]
txt=ln*" -- level $(k)"
k>1 ? rng=[extrema(tmp)...] : nothing
else
nt=length(tmp[:])
occursin("THETA",nam) ? rng=[3.5,3.65] : rng=[34.724,34.728]
txt=ln
end
x=vec(0.5:nt)
x=year0 .+ x./12.0
(y=tmp,txt=txt,rng=rng,x=x)
end
function map(nammap,P,statmap,timemap,pth_out)
ii=findall(P.clim_longname.==nammap)[1]
nam=P.clim_name[ii]; file=nam*".jld2"
nam_full=split(nam,"_")[1]*"_clim"
tmp=load(ECCOdiag(path=pth_out,name=nam_full),file=file,variable=statmap)
tmp=(statmap!=="mon" ? tmp : tmp[:,timemap])
DD=Interpolate(P.ΞΌ*tmp,P.Ξ».f,P.Ξ».i,P.Ξ».j,P.Ξ».w)
DD=reshape(DD,size(P.Ξ».lon))
#DD[findall(DD.==0.0)].=NaN
statmap=="std" ? rng=P.clim_colors2[nam] : rng=P.clim_colors1[nam]
levs=rng[1] .+collect(0.0:0.05:1.0)*(rng[2]-rng[1])
ttl=P.clim_longname[ii]
(Ξ»=P.Ξ»,field=DD,levels=levs,title=ttl)
end
function TimeLat(namzm,pth_out,year0,year1,cmap_fac,k_zm,P)
fn(x)=transpose(x);
if namzm=="MXLDEPTH"
levs=(0.0:50.0:400.0); cm=:turbo
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nam=namzm*"_zonmean2d"
elseif namzm=="SIarea"
levs=(0.0:0.1:1.0); cm=:turbo
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nam=namzm*"_zonmean2d"
elseif namzm=="THETA"
levs=(-2.0:2.0:34.0); cm=:turbo
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nam=namzm*"_zonmean"
elseif namzm=="SALT"
levs=(32.6:0.2:36.2); cm=:turbo
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nam=namzm*"_zonmean"
elseif (namzm=="ETAN")||(namzm=="SSH")
levs=10*(-0.15:0.02:0.15); cm=:turbo
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nam=namzm*"_zonmean2d"
else
levs=missing
nam="missing"
end
tmp=load(ECCOdiag(path=pth_out,name=nam))
if length(size(tmp))==3
z=fn(tmp[:,k_zm,:])
x=vec(0.5:size(tmp,3))
addon1=" at $(Int(round(P.Ξ.RC[k_zm])))m "
else
z=fn(tmp[:,:])
x=vec(0.5:size(tmp,2))
addon1=""
end
x=year0 .+ x./12.0
ttl="$(longname(namzm)) : Zonal Mean $(addon1)"
(x=x,y=y,z=z,levels=cmap_fac*levs,title=ttl,ylims=(-90.0,90.0),year0=year0,year1=year1)
end
function TimeLatAnom(namzmanom2d,pth_out,year0,year1,cmap_fac,k_zm2d,l0,l1,P)
namzm=namzmanom2d
if namzm=="MXLDEPTH"
levs=(-100.0:25.0:100.0)/2.0; fn=transpose; cm=:turbo
nam=namzm*"_zonmean2d"
elseif namzm=="SIarea"
levs=(-0.5:0.1:0.5)/5.0; fn=transpose; cm=:turbo
nam=namzm*"_zonmean2d"
elseif namzm=="THETA"
levs=(-2.0:0.25:2.0)/5.0; fn=transpose; cm=:turbo
nam=namzm*"_zonmean"
elseif namzm=="SALT"
levs=(-0.5:0.1:0.5)/5.0; fn=transpose; cm=:turbo
nam=namzm*"_zonmean"
elseif (namzm=="ETAN")||(namzm=="SSH")
levs=(-0.5:0.1:0.5)/2.0; fn=transpose; cm=:turbo
nam=namzm*"_zonmean2d"
else
fn=transpose
levs=missing
nam="missing"
end
tmp=load(ECCOdiag(path=pth_out,name=nam))
if length(size(tmp))==3
z=fn(tmp[:,k_zm2d,:])
x=vec(0.5:size(tmp,3));
addon1=" -- at $(Int(round(P.Ξ.RC[k_zm2d])))m "
else
z=fn(tmp[:,:])
x=vec(0.5:size(tmp,2));
addon1=""
end
dlat=2.0; y=vec(-90+dlat/2:dlat:90-dlat/2)
nt=size(z,1)
m0=(1992-year0)*12
if true
#a. subtract monthly mean
ref1="1992-2011 monthy mean"
for m in 1:12
zmean=vec(mean(z[m0+m:12:m0+240,:],dims=1))
[z[t,:]=z[t,:]-zmean for t in m:12:nt]
end
else
#b. subtract time mean
ref1="1992-2011 annual mean"
zmean=vec(mean(z[m0+1:m0+240,:],dims=1))
[z[t,:]=z[t,:]-zmean for t in 1:nt]
end
x=1992.0-m0/12.0 .+ x./12.0
ttl="$(longname(namzm)) -- minus $(ref1) $(addon1)"
(x=x,y=y,z=z,levels=cmap_fac*levs,title=ttl,ylims=(y[l0],y[l1]),year0=year0,year1=year1)
end
fn_DepthTime(x)=transpose(x)
function DepthTime(namzmanom,pth_out,facA,l_Tzm,year0,year1,k0,k1,P)
if namzmanom=="THETA"
levs=(-3.0:0.4:3.0)/8.0; cm=:turbo
elseif namzmanom=="SALT"
levs=(-0.5:0.1:0.5)/10.0;cm=:turbo
else
levs=missing;
end
nam_full=namzmanom*"_zonmean"
tmp=load(ECCOdiag(path=pth_out,name=nam_full))
dlat=2.0
lats=(-90+dlat/2:dlat:90-dlat/2)
z=fn_DepthTime(tmp[l_Tzm,:,:])
addon1=" -- at $(lats[l_Tzm])N "
x=vec(0.5:size(tmp,3));
y=vec(P.Ξ.RC)
nt=size(tmp,3)
#a. subtract monthly mean
ref1="1992-2011 monthy mean"
m0=(1992-year0)*12
for m in 1:12
zmean=vec(mean(z[m0+m:12:m0+240,:],dims=1))
[z[t,:]=z[t,:]-zmean for t in m:12:nt]
end
#b. subtract time mean
#ref1="1992-2011 annual mean"
#zmean=vec(mean(z[1:240,:],dims=1))
#[z[t,:]=z[t,:]-zmean for t in 1:nt]
x=year0 .+ x./12.0
ttl="$(longname(namzmanom)) -- minus $(ref1) $(addon1)"
(x=x,y=y,z=z,levels=facA*levs,title=ttl,ylims=(P.Ξ.RC[k1],P.Ξ.RC[k0]),year0=year0,year1=year1)
end
end #module ECCO_procs
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 15634 | SST_demo_path=joinpath(tempdir(),"demo_OISST")
##
module SST_FILES
using Printf, DataFrames, CSV, Dates, Glob
import Climatology: read_Dataset, SST_demo_path
read_files_list(;path=SST_demo_path,file="oisst_whole_file_list.csv",add_ymd=true) = begin
if add_ymd
add_to_table(CSV.read(joinpath(path,file),DataFrame))
else
CSV.read(joinpath(path,file),DataFrame)
end
end
function add_to_table(list)
ymd!(list)
list.t=collect(1:length(list.day))
list
end
"""
file_lists(path="")
Create file lists and output to csv.
- `whole_file_list.csv` : all files through today's date
- `to_get_file_list.csv` : files that remain to download
Sample file names :
```
url="https://www.ncei.noaa.gov/thredds/dodsC/OisstBase/NetCDF/V2.1/AVHRR/198201/oisst-avhrr-v02r01.19820101.nc"
url="https://www.ncei.noaa.gov/thredds/fileServer/OisstBase/NetCDF/V2.1/AVHRR/198201/oisst-avhrr-v02r01.19820101.nc"
```
"""
function file_lists(;path=tempname())
#url0="https://www.ncei.noaa.gov/thredds/fileServer/OisstBase/NetCDF/V2.1/AVHRR/"
url0="https://noaa-cdr-sea-surface-temp-optimum-interpolation-pds.s3.amazonaws.com/data/v2.1/avhrr/"
!ispath(path) ? mkdir(path) : nothing
ndays=( today()-Date(1982,1,1) ).value
file_list=DataFrame(fil=String[],url=String[],todo=Bool[])
for t in 1:ndays
dd=Date(1982,1,1)+Dates.Day(t-1)
y=year(dd)
m=month(dd)
d=day(dd)
url=@sprintf "%s%04i%02i%s%04i%02i%02i.nc" url0 y m "/oisst-avhrr-v02r01." y m d
fil=@sprintf "%s/%04i%02i%s%04i%02i%02i.nc" path y m "/oisst-avhrr-v02r01." y m d
push!(file_list,(fil=fil,url=url,todo=!isfile(fil)))
end
fil1=joinpath(path,"oisst_whole_file_list.csv")
CSV.write(fil1,file_list)
fil2=joinpath(path,"oisst_to_get_file_list.csv")
CSV.write(fil2,file_list[file_list.todo,:])
return fil1,fil2
end
function ersst_file_lists(;path=SST_demo_path)
url0="https://www.ncei.noaa.gov/pub/data/cmb/ersst/v5/netcdf/"
nmonths=(2023-1854)*12+7
file_list=DataFrame(fil=String[],url=String[],todo=Bool[])
for t in 1:nmonths
dd=Date(1854,1,1)+Dates.Month(t-1)
y=year(dd)
m=month(dd)
d=day(dd)
url=@sprintf "%s%s%04i%02i.nc" url0 "ersst.v5." y m
fil=@sprintf "files_ersst/ersst.v5.%04i%02i.nc" y m
push!(file_list,(fil=fil,url=url,todo=!isfile(fil)))
end
fil1=joinpath(path,"ersst_whole_file_list.csv")
CSV.write(fil1,file_list)
fil2=joinpath(path,"ersst_to_get_file_list.csv")
CSV.write(fil2,file_list[file_list.todo,:])
return fil1,fil2
end
"""
test_files(list,ii=[])
Test whether all downloaded files are valid.
```
list=CSV.read("oisst_whole_file_list.csv",DataFrame)
list_pb=sst_files.test_files(list)
[Downloads.download(r.url,r.fil) for r in eachrow(list[list_pb,:])]
```
"""
function test_files(list,ii=[]; print_fails=false)
test=zeros(1,length(list.fil))
isempty(ii) ? jj=collect(1:length(list.fil)) : jj=ii
for f in jj
try
ds=read_Dataset(list.fil[f])
close(ds)
catch e
print_fails ? println(basename(list.fil[f])) : nothing
test[f]=1
end
end
return [i[2] for i in findall(test.==1)]
end
function ymd(f)
tmp=split(f,".")[end-1]
parse.(Int,[tmp[1:4] tmp[5:6] tmp[7:8]])
end
function ymd!(d::DataFrame)
tmp=ymd.(d.fil)
d[!, :year]=[a[1] for a in tmp]
d[!, :month]=[a[2] for a in tmp]
d[!, :day]=[a[3] for a in tmp]
d
end
function monthlymean(gdf,m;path0=pwd(),varname="sst")
list=joinpath.(path0,gdf[m].fil)
ds=read_Dataset(list[1])
tmp=0*ds[varname][:,:,1,1]
[tmp.+=read_Dataset(f)[varname][:,:,1,1] for f in list]
tmp./length(list)
end
###
read_lon_lat(fil) = begin
lon=read_Dataset(fil)["lon"][:]
lat=read_Dataset(fil)["lat"][:]
lon,lat
end
###
"""
read_map(;variable="anom",file="",file_climatology="")
variable can be "sst", "anom", or "anom_recompute"
"""
function read_map(;variable="anom",file="",file_climatology="")
(year_sst,mon_sst,day_sst)=ymd(file)
isfile(file) ? fil_sst1=file : fil_sst1=file[1:end-3]*"_preliminary.nc"
ds= read_Dataset(fil_sst1)
sst=ds["sst"][:,:,1,1]
anom = ds["anom"][:,:,1,1]
close(ds)
x = if variable=="anom_recompute"
sst_clim = read_Dataset(file_climatology)["sst"][:,:,mon_sst]
sst-sst_clim
elseif variable=="anom"
anom
else
sst
end
x
end
end
##
module SST_coarse_grain
using Statistics, DataFrames, CSV, Glob
import Climatology: read_Dataset, SST_demo_path
@inline areamean(arr,ii,jj,dnl) =
mean(skipmissing(
arr[(ii-1)*dnl.+collect(1:dnl),(jj-1)*dnl.+collect(1:dnl)]
))
function indices(list,dlon=10.0)
dnl=Int(dlon/0.25)
nnl=Int(720/dnl)
fil=(isfile(list.fil[1]) ? list.fil[1] : list.fil[1][1:end-3]*"_preliminary.nc")
println(fil)
arr=read_Dataset(fil)["sst"][:,:]
ii=[ii for ii in 1:nnl*2, jj in 1:nnl]
jj=[jj for ii in 1:nnl*2, jj in 1:nnl]
tmp=[areamean(arr,ii,jj,dnl) for ii in 1:nnl*2, jj in 1:nnl]
kk=findall((!isnan).(tmp))
(i=ii[kk],j=jj[kk],k=kk)
end
"""
grid(fil)
Return `(lon=lon,lat=lat,msk=msk,area=area)` based on `fil`.
"""
function grid(fil)
fil=(isfile(fil) ? fil : fil[1:end-3]*"_preliminary.nc")
ds=read_Dataset(fil)
lon=ds["lon"][:]
lat=ds["lat"][:]
msk=ds["sst"][:,:]
msk[ismissing.(msk)].=NaN
msk=1 .+ 0*msk[:,:]
area=[cellarea(lon0,lon0+0.25,lat0,lat0+0.25) for lon0 in 0:0.25:360-0.25, lat0 in -90:0.25:90-0.25]
close(ds)
(lon=lon,lat=lat,msk=msk,area=area)
end
"""
cellarea(lon0,lon1,lat0,lat1)
[source](https://gis.stackexchange.com/questions/29734/how-to-calculate-area-of-1-x-1-degree-cells-in-a-raster)
As a consequence of a theorem of Archimedes, the area of a cell spanning longitudes l0 to l1 (l1 > l0) and latitudes f0 to f1 (f1 > f0) is
```(sin(f1) - sin(f0)) * (l1 - l0) * R^2```
where
- l0 and l1 are expressed in radians (not degrees or whatever).
- l1 - l0 is calculated modulo 2*pi (e.g., -179 - 181 = 2 degrees, not -362 degrees).
- R is the authalic Earth radius, almost exactly 6371 km.
!!! note
As a quick check, the entire globe area can be computed by letting `l1 - l0 = 2pi`, `f1 = pi/2`, `f0 = -pi/2`. The result is `4 * Pi * R^2`.
"""
function cellarea(lon0,lon1,lat0,lat1)
EarthRadius = 6371.0
#f0=20; f1=21; l0=349; l1=350;
f0=-90; f1=90; l0=0; l1=360;
1e6 * (sind(lat1) - sind(lat0)) * mod1(deg2rad(lon1 - lon0),2pi) * EarthRadius^2
end
@inline nansum(x) = sum(filter(!isnan,x))
@inline nansum(x,y) = mapslices(nansum,x,dims=y)
@inline areaintegral(arr,i::Int,j::Int,G::NamedTuple,dnl) = begin
ii=(i-1)*dnl.+collect(1:dnl)
jj=(j-1)*dnl.+collect(1:dnl)
nansum(arr[ii,jj].*G.msk[ii,jj].*G.area[ii,jj])
end
function calc_zm(G::NamedTuple,df)
gdf_tim=groupby(df, :t)
arr=NaN*zeros(maximum(df.j),length(gdf_tim))
for k in minimum(df.j):maximum(df.j)
area_tmp=[areaintegral(G.msk,x.i,x.j,G) for x in eachrow(gdf_tim[1])]
area_tmp[gdf_tim[1].j.!==k].=0
tmp1=[sum(tmp1.sst[:].*area_tmp)/sum(area_tmp) for tmp1 in gdf_tim]
arr[k,:].=tmp1
end
return arr
end
"""
lowres_merge(;path=SST_demo_path,variable="sst")
Merge all files found in chosen path.
"""
function merge_files(;path=SST_demo_path,variable="sst",dlon=10.0)
path0=dirname(file_root(path=path,variable=variable))
file_list=glob("$(variable)_lowres*csv",path0)
df=DataFrame(i=Int[],j=Int[],t=Int[],sst=Float32[])
[lowres_append!(df,f) for f in file_list]
CSV.write(joinpath(path,"lowres_oisst_$(variable)_$(dlon).csv"),df)
end
function lowres_append!(df,f)
tmp=CSV.read(f,DataFrame)
tmp.t.=parse(Int,split(basename(f),"_")[end][1:8])
append!(df,tmp)
return tmp
end
file_root(;path=SST_demo_path,variable="sst") = joinpath(path,"$(variable)_lowres_files","$(variable)_lowres_")
"""
lowres_read(;path=SST_demo_path,fil="lowres_oisst_sst_10.0.csv")
Read `sst_lowres.csv`
"""
function lowres_read(;path=SST_demo_path,fil="lowres_oisst_sst_10.0.csv")
fil=joinpath(path,fil)
df=CSV.read(fil,DataFrame)
gdf=groupby(df, [:i, :j])
kdf=keys(gdf)
return (df,gdf,kdf)
end
function lowres_index(lon0,lat0,kdf)
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
dx=Int(360/maximum(i))
(ii,jj)=(dx*i.-dx/2,dx*j.-dx/2 .-90)
d=(ii .-lon0).^2 .+ (jj .-lat0).^2
findall(d.==minimum(d))[1]
end
lowres_position(ii,jj,kdf) = begin
(i,j)=([x.i for x in kdf],[x.j for x in kdf])
dx=Int(360/maximum(i))
(dx*ii.-dx/2,dx*jj.-dx/2 .-90)
end
end
##
module SST_processing
using Distributed, Dataverse, DataFrames
import Dataverse.downloads: Downloads
import Climatology: SST_FILES, SST_coarse_grain, read_Dataset
import Climatology: SST_demo_path, to_monthly_file, write_SST_climatology
function download_files(;path=SST_demo_path,short_demo=false,verbose=false)
!ispath(path) ? mkdir(path) : nothing
fil,_=SST_FILES.file_lists(path=path)
list=SST_FILES.read_files_list(path=path)
list=(short_demo ? list[end-29:end,:] : list)
n_per_workwer=Int(ceil(length(list.fil)/nworkers()))
if !isempty(list.fil)
@sync @distributed for m in 1:nworkers()
n0=n_per_workwer*(m-1)+1
n1=min(n_per_workwer*m,length(list.fil))
verbose ? println("$(n0),$(n1)") : nothing
for r in eachrow(list[n0:n1,:])
!isdir(dirname(r.fil)) ? mkdir(dirname(r.fil)) : nothing
if !isfile(r.fil)
verbose ? println(r.fil) : nothing
try
Downloads.download(r.url,r.fil)
catch
try
Downloads.download(r.url[1:end-3]*"_preliminary.nc",r.fil[1:end-3]*"_preliminary.nc")
catch
verbose ? println("file not found online : "*r.fil[1:end-3]) : nothing
end
end
end
end
end
else
verbose ? println("no more files to process") : nothing
end
nl=length(list.fil)
tst=fill("",nl)
for ll in 1:nl
if isfile(list.fil[ll])
tst[ll]=list.fil[ll]
elseif isfile(list.fil[ll][1:end-3]*"_preliminary.nc")
list.fil[ll][1:end-3]*"_preliminary.nc"
tst[ll]=list.fil[ll][1:end-3]*"_preliminary.nc"
else
tst[ll]=""
end
end
tst[findall((!isempty).(tst))]
end
##
function coarse_grain(;datname="oisst",varname="sst",dlon=10.0,
path=SST_demo_path,short_demo=false)
## setup
list=SST_FILES.read_files_list(file="$(datname)_whole_file_list.csv",path=path,add_ymd=false)
list=(short_demo ? list[end-9:end,:] : list)
ind=SST_coarse_grain.indices(list)
nt=length(list.fil)
n_per_workwer=Int(ceil(nt/nworkers()))
file_root=SST_coarse_grain.file_root(variable=varname,path=path)
isdir(dirname(file_root)) ? mv(dirname(file_root),tempname()) : nothing
mkdir(dirname(file_root))
## distributed computation
@sync @distributed for m in 1:nworkers()
n0=n_per_workwer*(m-1)+1
n1=min(n_per_workwer*m,length(list.fil))
dnl=Int(dlon/0.25)
nnl=Int(720/dnl)
println("$(n0),$(n1)")
for n in n0:n1
r=list[n,:]
fil=(isfile(r.fil) ? r.fil : r.fil[1:end-3]*"_preliminary.nc")
if isfile(fil)
#calculate
ds=read_Dataset(fil)
tmp=ds[varname][:,:]
sst=[SST_coarse_grain.areamean(tmp,ii,jj,dnl) for ii in 1:nnl*2, jj in 1:nnl]
#save to csv
df=SST_FILES.DataFrame(i=ind.i,j=ind.j,sst=Float32.(sst[ind.k]))
tmp=split(basename(r.fil),".")[2]
SST_FILES.CSV.write(file_root*tmp*".csv",df)
end
end
end
## write to final file
SST_coarse_grain.merge_files(variable=varname,path=path,dlon=dlon)
end
##
function monthly_climatology(;datname="oisst",varname="sst",path=SST_demo_path)
year0=1992; year1=2011
list=SST_FILES.read_files_list(file="$(datname)_whole_file_list.csv",path=path,add_ymd=true)
lon,lat=SST_FILES.read_lon_lat(list.fil[1])
sel=findall([(f.year>=year0 && f.year<=year1) for f in eachrow(list)])
suf="$(year0)_$(year1)_"
gdf=groupby(list[sel,:],:month)
output_path=tempname(); mkdir(output_path)
println("output path="*output_path)
n_per_workwer=Int(ceil(12/nworkers()))
n_per_workwer*nworkers()!==12 ? println("need nworkers to divide 12") : nothing
for varname in ("sst","anom")
@sync @distributed for m in 1:nworkers()
for mm in 1:n_per_workwer
month=(m-1)*n_per_workwer+mm
tmp=SST_FILES.monthlymean(gdf,month,varname=varname)
to_monthly_file(tmp,month,varname=varname,output_path=output_path)
end
end
end
output_file=write_SST_climatology(output_path,year0,year1,lon,lat)
end
end
##
module SST_timeseries
using DataFrames, Statistics, Dates
function calc(input,list; title="", gdf=nothing)
if isa(input,DataFrames.GroupKey)
sst1=gdf[input].sst[:]
else
sst1=input[:]
end
nt=size(sst1,1)
sst2=repeatclim(sst1,list[1:nt,:])
sst3=anom(sst1,list[1:nt,:])
ttl="SST time series"
#isa(input,DataFrames.GroupKey) ? ttl=ttl*"for i="*string(input.i)*", j="*string(input.j) : nothing
!isempty(title) ? ttl=title : nothing
ts=(sst=sst1,clim=sst2,anom=sst3,title=ttl,
year=list.year[1:nt],month=list.month[1:nt],day=list.day[1:nt])
tmp1=calc_quantile(ts)
merge(ts,tmp1)
end
function gdf_clim(list)
sel=findall([(f.year>=1992 && f.year<=2011) for f in eachrow(list)])
groupby(list[sel,:],[:month,:day])
end
@inline clim(sst,list) = [mean(sst[a.t[:]]) for a in gdf_clim(list)]
@inline function anom(sst,list)
c=clim(sst,list)
a=0*sst
for t in 1:length(list.t)
(y,m,d)=(list.year[t],list.month[t],list.day[t])
tt=min(1+(Date(y,m,d)-Date(y,1,1)).value,365)
a[t]=sst[t]-c[tt]
end
a.+median(c)
end
@inline function repeatclim(sst,list)
c=clim(sst,list)
a=0*sst
for t in 1:length(list.t)
(y,m,d)=(list.year[t],list.month[t],list.day[t])
tt=min(1+(Date(y,m,d)-Date(y,1,1)).value,365)
a[t]=c[tt]
end
a
end
##
@inline function calc_quantile(x,msk,yearday,yd)
d0=yearday[yd]
d1=[sum(mod1.( d0 .+ (-2:2),365) .==dd)==1 for dd in yearday]
sel=findall(msk .&& d1)
quantile(x[sel], [0.1, 0.9])
end
@inline function calc_quantile(ts)
x=ts.sst-ts.clim
msk=(ts.year.>=1992 .&& ts.year.<=2011)
yearday=Date.(ts.year,ts.month,ts.day)-Date.(ts.year,1,1)
yearday=min.(1 .+ [yd.value for yd in yearday],365)
ts_low=zeros(365)
ts_high=zeros(365)
for yd in 1:365
ts_low[yd],ts_high[yd]=calc_quantile(x,msk,yearday,yd)
end
(low=ts_low[yearday],high=ts_high[yearday])
end
end
##
module SST_scenarios
function read_temp(fil)
log=readlines(fil)
ii=findall([occursin("tas=",i) for i in log])
nt=length(ii)
tas=zeros(nt)
year=zeros(nt)
for i in 1:nt
tmp=split(log[ii[i]],"=")[2]
tas[i]=parse(Float64,split(tmp,"degC")[1])
year[i]=parse(Float64,split(tmp,"in")[2])
end
year,tas
end
function calc_offset(year_sst,ny,scenario=245)
year1=year_sst+ny
hector_fil="hector_scenarios/temperature_ssp$(scenario).log"
hector_year,hector_tas=read_temp(hector_fil)
y0=findall(hector_year.==year_sst)[1]
y1=findall(hector_year.==year1)[1]
hector_tas[y1]-hector_tas[y0]
end
end | Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 5072 |
module SLA_MAIN
using Dataverse
import Climatology: SeaLevelAnomaly, read_Dataset, Dates, write_SLA_PODAAC, write_SLA_CMEMS
import Base: read
#fil=["sla_podaac.nc","sla_cmems.nc"]
function read(x::SeaLevelAnomaly)
ID=x.name
path=x.path
fil=string(ID)*".nc"
sla_file=joinpath(path,fil)
!isdir(path) ? mkdir(path) : nothing
if !isfile(sla_file)
DOI="doi:10.7910/DVN/OYBLGK"
lst=Dataverse.file_list(DOI)
Dataverse.file_download(lst,fil,path)
end
ds=read_Dataset(sla_file)
op=(dates=sla_dates(sla_file),)
SeaLevelAnomaly(name=x.name,path=path,data=[ds],options=op)
end
podaac_date(n)=Dates.Date("1992-10-05")+Dates.Day(5*n)
podaac_sample_dates=podaac_date.(18:73:2190)
cmems_date(n)=Dates.Date("1993-01-01")+Dates.Day(1*n)
podaac_all_dates=podaac_date.(1:2190)
cmems_all_dates=cmems_date.(1:10632)
sla_dates(fil) = ( fil=="sla_podaac.nc" ? podaac_all_dates : cmems_all_dates)
end
##
module SLA_PODAAC
using Dates, DataStructures
import Climatology: Downloads, read_Dataset, write_SLA_PODAAC
#note : this need up-to-date credentials in ~/.netrc and ~/.ncrc
url0="https://opendap.earthdata.nasa.gov/collections/C2270392799-POCLOUD/granules/"
##url0="https://podaac-tools.jpl.nasa.gov/drive/files/allData/merged_alt/L4/cdr_grid/"
path0=joinpath(pwd(),"SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205")*"/"
#url1="https://opendap.earthdata.nasa.gov/collections/C2102959417-POCLOUD/granules/"
#url=url1*"oscar_currents_interim_20230101.nc"
#path1=joinpath(pwd(),"OSCAR_L4_OC_INTERIM_V2.0")*"/"
"""
get_grid(;url=url0,file="",range_lon=360.0.+(-35.0,-22),range_lat=(34.0,45))
"""
function get_grid(;url=url0,file="",range_lon=360.0.+(-35.0,-22),range_lat=(34.0,45))
if !isempty(file)
fil=file
ds=read_Dataset(fil)
lon=ds["lon"][:]
lat=ds["lat"][:]
else
url=url*"ssh_grids_v2205_1992101012.dap.nc"
# fil=joinpath(tempdir(),"ssh_grids_v2205_1992101012.dap.nc")
fil=Downloads.download(url)
ds=read_Dataset(fil)
lon=Float64.(ds["Longitude"][:])
lat=Float64.(ds["Latitude"][:])
end
ii=findall( (lon.>range_lon[1]) .& (lon.<range_lon[2]) )
jj=findall( (lat.>range_lat[1]) .& (lat.<range_lat[2]) )
(lon=lon,lat=lat,ii=ii,jj=jj,nt=2190,file=fil)
end
function file_name(n)
d0=Date("1992-10-05")
d=d0+Dates.Day(n*5)
dtxt=string(d)
"ssh_grids_v2205_"*dtxt[1:4]*dtxt[6:7]*dtxt[9:10]*"12.nc" #".dap.nc"
end
function read_slice(url,gr)
#fil=Downloads.download(url)
#ds=read_Dataset(fil)
ds=read_Dataset(url)
SLA=ds["SLA"][gr.ii,gr.jj,1]
SLA[ismissing.(SLA)].=NaN
Float64.(SLA)
end
"""
SLA_PODAAC.subset()
For download directions, see [this site](https://podaac.jpl.nasa.gov/dataset/SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205)
```
SLA_PODAAC.subset(; read_from_file=SLA.file)
```
"""
function subset(;
path0="SEA_SURFACE_HEIGHT_ALT_GRIDS_L4_2SATS_5DAY_6THDEG_V_JPL2205/",
username="unknown",
password="unknown",
range_lon=360.0.+(-35.0,-22),
range_lat=(34.0,45),
read_from_file="",
save_to_file=false,
)
if !isempty(read_from_file)
gr=SLA_PODAAC.get_grid(file=read_from_file)
ds=read_Dataset(read_from_file)
i0=1; i1=gr.nt
data=ds["SLA"][:,:,:]
else
gr=get_grid(range_lon=range_lon,range_lat=range_lat)
i0=1; i1=gr.nt
data=zeros(length(gr.ii),length(gr.jj),i1-i0+1)
for n=i0:i1
mod(n,100)==0 ? println(n) : nothing
data[:,:,n-i0+1]=read_slice(path0*file_name(n),gr)
end
end
#show(gr)
save_to_file ? write_SLA_PODAAC(gr,data) : data
end
end #module SLA_PODAAC
module SLA_CMEMS
using URIs, DataStructures
import Climatology: Downloads, read_Dataset, write_SLA_CMEMS
"""
SLA_CMEMS.subset()
For download directions, see [this site](https://marine.copernicus.eu)
For data documentation, see [this page](https://data.marine.copernicus.eu/product/SEALEVEL_GLO_PHY_L4_MY_008_047/description)
```
SLA_CMEMS.subset(username=username,password=password)
```
"""
function subset(;
var="cmems_obs-sl_glo_phy-ssh_my_allsat-l4-duacs-0.25deg_P1D",
username="unknown",
password="unknown",
range_lon=(-35.0,-22),
range_lat=(34.0,45),
read_from_file="",
save_to_file=false,
)
if !isempty(read_from_file)
ds=read_Dataset(read_from_file)
SSH=ds["SLA"]
lon=ds["lon"][:]
lat=ds["lat"][:]
else
url="https://my.cmems-du.eu/thredds/dodsC/"*var
url2 = string(URI(URI(url),userinfo = string(username,":",password)))
ds = read_Dataset(url2)
SSH=ds["sla"]
lon=ds["longitude"][:]
lat=ds["latitude"][:]
end
ii=findall( (lon.>range_lon[1]) .& (lon.<range_lon[2]) )
jj=findall( (lat.>range_lat[1]) .& (lat.<range_lat[2]) )
data = SSH[ii,jj,:]
#show(gr)
save_to_file ? write_SLA_CMEMS(lon[ii],lat[jj],data) : data
end
end #module SLA_CMEMS
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 7483 | module ScratchSpaces
using Dataverse, Scratch
using Dataverse.downloads.Downloads
# This will be filled in inside `__init__()`
ECCO = ""
OCCA = ""
CBIOMES = ""
MITprof = ""
# Downloads a resource, stores it within path
function download_dataset(url,path)
fname = joinpath(path, basename(url))
if !isfile(fname)
Downloads.download(url, fname)
end
return fname
end
function __init__scratch()
global ECCO = @get_scratch!("ECCO")
global OCCA = @get_scratch!("OCCA")
global CBIOMES = @get_scratch!("CBIOMES")
global MITprof = @get_scratch!("MITprof")
end
end
##
module downloads
import Climatology: pkg_pth
import Climatology: ScratchSpaces
import Climatology: read_nctiles_alias
using Statistics, MeshArrays
using Dataverse, DataDeps, Glob
## Dataverse Donwloads
"""
get_ecco_files(Ξ³::gcmgrid,v::String,t=1)
```
using MeshArrays, Climatology, MITgcm
Ξ³=GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
Climatology.get_ecco_variable_if_needed("oceQnet")
tmp=read_nctiles(joinpath(ScratchSpaces.ECCO,"oceQnet/oceQnet"),"oceQnet",Ξ³,I=(:,:,1))
```
"""
function get_ecco_files(Ξ³::gcmgrid,v::String,t=1)
get_ecco_variable_if_needed(v)
try
read_nctiles_alias(joinpath(ScratchSpaces.ECCO,"$v/$v"),"$v",Ξ³,I=(:,:,t))
catch
error("failed: call to `read_nctiles`
This method is provided by `MITgcm.jl`
and now activated by `using MITgcm` ")
end
end
"""
get_ecco_variable_if_needed(v::String)
Download ECCO output for variable `v` to scratch space if needed
"""
function get_ecco_variable_if_needed(v::String)
lst=Dataverse.file_list("doi:10.7910/DVN/3HPRZI")
fil=joinpath(ScratchSpaces.ECCO,v,v*".0001.nc")
if !isfile(fil)
pth1=joinpath(ScratchSpaces.ECCO,v)
lst1=findall([v==n[1:end-8] for n in lst.filename])
!isdir(pth1) ? mkdir(pth1) : nothing
[Dataverse.file_download(lst,v,pth1) for v in lst.filename[lst1]]
end
end
"""
get_ecco_velocity_if_needed()
Download ECCO output for `u,v,w` to scratch space if needed
"""
function get_ecco_velocity_if_needed()
get_ecco_variable_if_needed("UVELMASS")
get_ecco_variable_if_needed("VVELMASS")
get_ecco_variable_if_needed("WVELMASS")
end
"""
get_occa_variable_if_needed(v::String)
Download OCCA output for variable `v` to scratch space if needed
"""
function get_occa_variable_if_needed(v::String)
lst=Dataverse.file_list("doi:10.7910/DVN/RNXA2A")
fil=joinpath(ScratchSpaces.OCCA,v*".0406clim.nc")
!isfile(fil) ? Dataverse.file_download(lst,v,ScratchSpaces.OCCA) : nothing
end
"""
get_occa_velocity_if_needed()
Download OCCA output for `u,v,w` to scratch space if needed
"""
function get_occa_velocity_if_needed()
nams = ("DDuvel","DDvvel","DDwvel","DDtheta","DDsalt")
[get_occa_variable_if_needed(nam) for nam in nams]
"done"
end
## zenodo.org and other ownloads
st_d_md(txt="ECCO version 4 release 2") =
"""
Dataset: standard analysis for the $(txt) ocean state estimate.
Authors: GaΓ«l Forget
"""
"""
unpackDV(filepath)
Like DataDeps's `:unpack` but using `Dataverse.untargz` and remove the `.tar.gz` file.
"""
function unpackDV(filepath)
tmp_path=Dataverse.untargz(filepath)
tmp_path2=joinpath(tmp_path,basename(filepath)[1:end-7])
tmp_path=(ispath(tmp_path2) ? tmp_path2 : tmp_path)
if isdir(tmp_path)
[mv(p,joinpath(dirname(filepath),basename(p))) for p in glob("*",tmp_path)]
[println(joinpath(dirname(filepath),basename(p))) for p in glob("*",tmp_path)]
rm(filepath)
else
rm(filepath)
mv(tmp_path,joinpath(dirname(filepath),basename(tmp_path)))
end
println("done with unpackDV for "*filepath)
end
"""
__init__standard_diags()
Register data dependency with DataDep.
"""
function __init__standard_diags()
register(DataDep("ECCO4R1-stdiags",st_d_md("ECCO4 release 1"),
["https://zenodo.org/record/6123262/files/ECCOv4r1_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("ECCO4R2-stdiags",st_d_md("ECCO4 release 2"),
["https://zenodo.org/record/6123272/files/ECCOv4r2_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("ECCO4R3-stdiags",st_d_md("ECCO4 release 3"),
["https://zenodo.org/record/6123288/files/ECCOv4r3_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("ECCO4R4-stdiags",st_d_md("ECCO4 release 4"),
["https://zenodo.org/record/6123127/files/ECCOv4r4_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("ECCO4R5-stdiags",st_d_md("ECCO4 release 5"),
["https://zenodo.org/record/7869067/files/ECCOv4r5_rc2_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("OCCA2HR1-stdiags",st_d_md("OCCA2 historical run 1"),
["https://zenodo.org/records/11062685/files/OCCA2HR1_analysis.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("CBIOMES-clim1","CBIOMES global model climatology",
["https://zenodo.org/record/5598417/files/CBIOMES-global-alpha-climatology.nc.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("CBIOMES-PML1","CBIOMES global model climatology",
["https://rsg.pml.ac.uk/shared_files/brj/CBIOMES_ecoregions/ver_0_2_6/gridded_darwin_montly_clim_360_720_ver_0_2_6.nc"]))
register(DataDep("MITprof-clim1","MITprof gridded climatologies",
["https://zenodo.org/record/5101243/files/gcmfaces_climatologies.tar.gz"],
post_fetch_method=unpackDV))
register(DataDep("OISST-stats1","SST climatology and time series",
["https://zenodo.org/records/13736355/files/OISST_stats.tar.gz"],
post_fetch_method=unpackDV))
end
"""
ECCOdiags_add(nam::String)
Add data to the scratch space folder. Known options for `nam` include
"release1", "release2", "release3", "release4", "release5", and "OCCA2HR1".
Under the hood this is the same as:
```
using Climatology
datadep"ECCO4R1-stdiags"
datadep"ECCO4R2-stdiags"
datadep"ECCO4R3-stdiags"
datadep"ECCO4R4-stdiags"
datadep"ECCO4R5-stdiags"
datadep"OCCA2HR1-stdiags"
```
"""
function ECCOdiags_add(nam::String)
withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
if nam=="release1"||nam=="ECCO4R1"
datadep"ECCO4R1-stdiags"
elseif nam=="release2"||nam=="ECCO4R2"
datadep"ECCO4R2-stdiags"
elseif nam=="release3"||nam=="ECCO4R3"
datadep"ECCO4R3-stdiags"
elseif nam=="release4"||nam=="ECCO4R4"
datadep"ECCO4R4-stdiags"
elseif nam=="release5"||nam=="ECCO4R5"
datadep"ECCO4R5-stdiags"
elseif nam=="OCCA2HR1"
datadep"OCCA2HR1-stdiags"
else
println("unknown solution")
end
end
end
"""
MITPROFclim_download()
Download lazy artifact to scratch space.
"""
MITPROFclim_download() = withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
datadep"MITprof-clim1"
end
"""
CBIOMESclim_download()
Download lazy artifact to scratch space.
"""
CBIOMESclim_download(nam="clim1") = withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
if nam=="clim1"
datadep"CBIOMES-clim1"
elseif nam=="PML1"
datadep"CBIOMES-PML1"
else
println("unknown data set")
end
end
"""
OISSTstats_download()
Download lazy artifact to scratch space.
"""
OISST_stats_download() = withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
datadep"OISST-stats1"
end
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 1268 |
##
abstract type AbstractClimateDiagnostic <: Any end
##
Base.@kwdef struct ECCOdiag <: AbstractClimateDiagnostic
path :: String = tempdir()
name :: String = "unknown"
options :: NamedTuple = NamedTuple()
data :: AbstractArray = []
end
import JLD2: load
load(x::ECCOdiag; file="",variable="single_stored_object") = begin
if occursin("zonmean",x.name)
fil=joinpath(x.path,x.name,"zonmean.jld2")
fil=(ispath(fil) ? fil : joinpath(x.path,x.name,"zonmean2d.jld2"))
elseif occursin("_glo2d",x.name)||occursin("_glo3d",x.name)
fil=joinpath(x.path,x.name,"glo2d.jld2")
fil=(ispath(fil) ? fil : joinpath(x.path,x.name,"glo3d.jld2"))
elseif !isempty(file)
fil=joinpath(x.path,x.name,file)
else
fil=joinpath(x.path,x.name,x.name*".jld2")
end
load(fil,variable)
end
export load
##
Base.@kwdef struct SSTdiag <: AbstractClimateDiagnostic
path :: String = "unknown"
name :: String = "unknown"
options :: NamedTuple = NamedTuple()
data :: AbstractArray = []
end
##
Base.@kwdef struct SeaLevelAnomaly <: AbstractClimateDiagnostic
path :: String = tempdir()
name :: String = "unknown"
options :: NamedTuple = NamedTuple()
data :: AbstractArray = []
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | code | 7733 | using Test, Climatology, Statistics, MITgcm, CairoMakie, Suppressor
import NCDatasets, NetCDF, MeshArrays
ENV["DATADEPS_ALWAYS_ACCEPT"]=true
p=dirname(pathof(Climatology))
@testset "NCDatasetsExt" begin
G=MeshArrays.Grids_simple.GridLoad_lonlatdep(collect(1000:1000:5000),ones(360,180,5))
M=MeshArrays.Integration.define_sums(grid=G,regions=(10,5))
H=ones(length(M.names),length(M.depths),3)
V=MeshArrays.Integration.volumes(M,G)
Climatology.write_H_to_T(tempname()*".nc",M,G,H,V)
end
@testset "Climatology.jl" begin
## 1. SST
input_path=tempname()
list_downloaded=SST_processing.download_files(path=input_path,short_demo=true)
@test ispath(input_path)
output_path=SST_processing.coarse_grain(path=input_path,short_demo=true)
@test isfile(output_path)
#@everywhere using Climatology, NCDatasets
#output_path=SST_processing.monthly_climatology(path=input_path)
#mv(output_file,joinpath(input_path,basename(output_file)))
(fil1,fil2)=SST_FILES.file_lists(path=input_path)
whole_list=SST_FILES.CSV.read(fil1,SST_FILES.DataFrame)
fil=list_downloaded[end]
lon,lat=SST_FILES.read_lon_lat(fil)
@test isa(lon,Vector)
gr=SST_coarse_grain.grid(fil)
@test isa(gr,NamedTuple)
list_pb=SST_FILES.test_files(whole_list)
@test isa(list_pb,Vector)
(fil1,fil2)=SST_FILES.ersst_file_lists(path=input_path)
@test isfile(fil1)
(df,gdf,kdf)=SST_coarse_grain.lowres_read(path=input_path)
kdf0=kdf[SST_coarse_grain.lowres_index(205,25,kdf)]
(lon1,lat1)=SST_coarse_grain.lowres_position(kdf0.i,kdf0.j,kdf)
#ts=SST_timeseries.calc(kdf0,whole_list,gdf=gdf)
@test isa(df,SST_FILES.DataFrame)
###
path_OISST_stats=Climatology.downloads.OISST_stats_download()
dlon=10.0
dnl=Int(dlon/0.25)
(df,gdf,kdf)=SST_coarse_grain.lowres_read(fil="lowres_oisst_sst_$(dlon).csv",path=path_OISST_stats)
lon0=205; lat0=25
list=SST_FILES.read_files_list(path=input_path)[1:length(unique(df.t)),:]
kdf0=kdf[SST_coarse_grain.lowres_index(lon0,lat0,kdf)]
(lon1,lat1)=SST_coarse_grain.lowres_position(kdf0.i,kdf0.j,kdf)
ts=SST_timeseries.calc(kdf0,list,gdf=gdf)
plot(SSTdiag(options=(plot_type=:by_year,ts=ts)))
options=(plot_type=:by_time,ts=ts,show_anom=false,show_clim=false)
plot(SSTdiag(options=options))
plot(SSTdiag(options=(plot_type=:MHW,ts=ts)))
gdf1=SST_FILES.groupby(df, :t)
tmp1=gdf1[end]
area_tmp=[SST_coarse_grain.areaintegral(gr.msk,x.i,x.j,gr,dnl) for x in eachrow(tmp1)]
glmsst=[sum(tmp1.sst[:].*area_tmp)/sum(area_tmp) for tmp1 in gdf1]
ts_global=SST_timeseries.calc(glmsst,list,title="Global Mean SST")
x=SSTdiag(options=(plot_type=:local_and_global,ts=ts,ts_global=ts_global,kdf0=kdf0))
f=plot(x)
@test isa(f,Figure)
##
path_OISST_stats=Climatology.downloads.OISST_stats_download()
file_climatology=joinpath(path_OISST_stats,"OISST_mean_monthly_1992_2011.nc")
to_map=(field=SST_FILES.read_map(variable="anom",file=fil,file_climatology=file_climatology),
title="test",colorrange=4 .*(-1.0,1.0),colormap=:thermal,
lon=gr.lon,lat=gr.lat,lon1=lon1,lat1=lat1,showgrid=false)
f7=plot(SSTdiag(options=(plot_type=:map,to_map=to_map)))
@test isa(f7,Figure)
##
# zm=SST_coarse_grain.calc_zm(gr,df)
# f5=plot(SSTdiag(options=(plot_type=:TimeLat,zm=zm,title="OISST anomaly")))
# @test isa(f5,Figure)
## 2. ECCO
Ξ³=MeshArrays.GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
Climatology.get_ecco_files(Ξ³,"oceQnet")
tmp=read_nctiles(joinpath(ScratchSpaces.ECCO,"oceQnet/oceQnet"),"oceQnet",Ξ³,I=(:,:,1))
tmp=[mean(tmp[j][findall((!isnan).(tmp[j]))]) for j=1:5]
ref=[19.88214831145215,47.63055475475805,-44.1122401210416,
3.4402271721659816,30.14270126344508]
@test tmp==ref
get_occa_velocity_if_needed()
get_occa_variable_if_needed("DDuvel")
@test isfile(joinpath(ScratchSpaces.OCCA,"DDuvel.0406clim.nc"))
get_ecco_velocity_if_needed()
get_ecco_variable_if_needed("UVELMASS")
@test isdir(joinpath(ScratchSpaces.ECCO,"UVELMASS"))
##
Climatology.MITPROFclim_download()
Climatology.CBIOMESclim_download()
Climatology.ECCOdiags_add("release2")
@test true
##
if true
var_list3d=("THETA","SALT","UVELMASS","VVELMASS",
"ADVx_TH","ADVy_TH","DFxE_TH","DFyE_TH")
var_list2d=("MXLDEPTH","SIarea","sIceLoad","ETAN")
[get_ecco_variable_if_needed(v) for v in var_list3d]
[get_ecco_variable_if_needed(v) for v in var_list2d]
else
get_ecco_variable_if_needed("MXLDEPTH")
end
MeshArrays.GRID_LLC90_download()
pth=ECCO.standard_analysis_setup(ScratchSpaces.ECCO)
list0=ECCO_helpers.standard_list_toml("")
P0=ECCO_helpers.parameters(pth,"r2",list0[4])
!isdir(dirname(P0.pth_out)) ? mkdir(dirname(P0.pth_out)) : nothing
pth_trsp=joinpath(pth,P0.sol,"ECCO_transport_lines")
isdir(pth_trsp) ? mv(pth_trsp,tempname()) : nothing
ECCO_helpers.transport_lines(P0.Ξ,pth_trsp)
for k in [collect(1:8)...,12,13,25,26,27,28]
P=ECCO_helpers.parameters(P0,list0[k])
!isdir(P.pth_out) ? mkdir(P.pth_out) : nothing
ECCO_diagnostics.driver(P)
end
fil0=joinpath(P0.pth_out,"zonmean2d.jld2")
@test isfile(fil0)
##
[ECCO_procs.years_min_max(sol) for sol in ("ECCOv4r3","ECCOv4r4","ECCOv4r5","OCCA2HR1","OCCA2HR2")]
sol="ECCO4R2"
year0,year1=ECCO_procs.years_min_max(sol)
pth_out=Climatology.downloads.ECCOdiags_add(sol)
ECCOdiags_to_nc(path_in=datadep"ECCO4R2-stdiags",year1=1992,nt=240)
using CairoMakie
P=ECCO_procs.parameters()
nammap=P.clim_longname[11]
statmap="mean"
timemap=1
plot(ECCOdiag(path=pth_out,name="tbd",options=
(plot_type=:ECCO_map,nammap=nammap,P=P,statmap=statmap,timemap=timemap)))
plot(ECCOdiag(path=pth_out,name="THETA_clim",options=
(plot_type=:ECCO_TimeLat,year0=year0,year1=year1,cmap_fac=1.0,
k=1,P=P,years_to_display=[year0 year1+1])))
l0=1; l1=90
plot(ECCOdiag(path=pth_out,name="THETA_clim",options=
(plot_type=:ECCO_TimeLatAnom,year0=year0,year1=year1,cmap_fac=1.0,
k=1,l0=l0,l1=l1,P=P,years_to_display=[year0 year1+1])))
k0=1; k1=30
plot(ECCOdiag(path=pth_out,name="THETA_clim",options=
(plot_type=:ECCO_DepthTime,facA=1.0,l=28,year0=year0,year1=year1,
k0=k0,k1=k1,P=P,years_to_display=[year0 year1+1])))
plot(ECCOdiag(path=pth_out,name="THETA",options=
(plot_type=:ECCO_GlobalMean,k=0,year0=year0,year1=year1,
years_to_display=[year0 year1+1])))
plot(ECCOdiag(path=pth_out,name="OHT",options=(plot_type=:ECCO_OHT1,)))
plot(ECCOdiag(path=pth_out,name="overturn",options=(plot_type=:ECCO_Overturn2,grid=P.Ξ)))
plot(ECCOdiag(path=pth_out,name="overturn",options=
(plot_type=:ECCO_Overturn1,kk=29,low1="auto",year0=year0,year1=year1,
years_to_display=[year0 year1+1])))
ntr1=P.list_trsp[1]
plot(ECCOdiag(path=pth_out,name="trsp",options=
(plot_type=:ECCO_Transports,namtrs=[ntr1],ncols=1,list_trsp=P.list_trsp,
year0=year0,year1=year1,years_to_display=[year0 year1+1])))
@test ispath(pth_out)
## 3. SSH/SLA
SLA=read(SeaLevelAnomaly(name="sla_podaac"))
f3=plot(SLA)
@test isa(f3,Figure)
file=joinpath(SLA.path,SLA.name*".nc")
gr=SLA_PODAAC.get_grid(file=file)
data=SLA_PODAAC.read_slice(file,gr)
sub=SLA_PODAAC.subset(; read_from_file=file,save_to_file=true)
@test isa(sub,String)
SLA=read(SeaLevelAnomaly(name="sla_cmems"))
file=joinpath(SLA.path,SLA.name*".nc")
sub=SLA_CMEMS.subset(; read_from_file=file,save_to_file=true)
@test isa(sub,String)
end
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | docs | 2099 | # Climatology
[](https://JuliaOcean.github.io/Climatology.jl/dev)
[](https://github.com/JuliaOcean/Climatology.jl/actions/workflows/ci.yml)
[](https://codecov.io/gh/JuliaOcean/Climatology.jl)
[](https://zenodo.org/badge/latestdoi/260376633)
This package is currently focused on downloading, reading, visualizing, and analyzing gridded data sets and [ocean state estimates](http://dx.doi.org/10.5194/gmd-8-3071-2015).
## Tutorial Notebooks
- [Sea Surface Temperature](https://JuliaOcean.github.io/Climatology.jl/dev/examples/sst_anomaly_notebook.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/OISST/sst_anomaly_notebook.jl))
- [Sea Level Anomalies](https://JuliaOcean.github.io/Climatology.jl/dev/examples/SatelliteAltimetry.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/SSH/SatelliteAltimetry.jl)) sea level anomaly maps derived from altimetry. Sources : NASA/PODAAC, CMEMS.
- [Sea Level Time Series & Maps](https://JuliaOcean.github.io/Climatology.jl/dev/examples/NSLCT_notebook.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/NSLCT/NSLCT_notebook.jl))
- [Physical Ocean, Currents, & Climate](https://JuliaOcean.github.io/Climatology.jl/dev/examples/ECCO_standard_plots.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/ECCO/ECCO_standard_plots.jl))
- [Marine Ecosystems & Biogeochemistry](https://JuliaOcean.github.io/Climatology.jl/dev/examples/CBIOMES_climatology_plot.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/CBIOMES/CBIOMES_climatology_plot.jl))
Please refer to the [docs](https://JuliaOcean.github.io/Climatology.jl/dev) for detail and additional examples.
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | docs | 2732 | ## Intro
Climatologies are readily downloaded and accessed using the [Scratch.jl](https://github.com/JuliaPackaging/Scratch.jl#readme) artifact system as explained below.
## Use Examples
### ECCO
ECCO climatology files can downloaded using `get_ecco_files`. These files are for version 4 release 2, on the native model grid.
```@example 1
using Climatology
get_ecco_variable_if_needed("ETAN")
using MeshArrays, MITgcm, NetCDF
path=joinpath(ScratchSpaces.ECCO,"ETAN/ETAN")
Ξ³=GridSpec("LatLonCap",MeshArrays.GRID_LLC90)
tmp=read_nctiles(path,"ETAN",Ξ³,I=(:,:,1))
```
Precomputed quantities shown in [ECCO\_standard\_plots.jl](examples/ECCO_standard_plots.html) can be downloaded separately.
```@example 1
Climatology.ECCOdiags_add("release2")
readdir(ScratchSpaces.ECCO)
```
### OCCA
```@example 1
get_occa_variable_if_needed("SIarea")
readdir(ScratchSpaces.OCCA)
```
### CBIOMES
To retrieve the CBIOMES climatology, in the `julia REPL` for example :
```@example 1
withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
path_clim1=datadep"CBIOMES-clim1"
readdir(path_clim1)
end
```
And the files, now found in `datadep"CBIOMES-clim1"`, can then be read using other libraries.
```@example 1
using NCDatasets
path_clim1=datadep"CBIOMES-clim1"
fil=joinpath(path_clim1,"CBIOMES-global-alpha-climatology.nc")
nc=NCDataset(fil,"r")
keys(nc)
```
### MITprof
To retrieve the MITprof climatologies :
```@example 1
withenv("DATADEPS_ALWAYS_ACCEPT"=>true) do
readdir(datadep"MITprof-clim1")
end
```
## Path Names
Gridded fields are mostly retrieved from [Harvard Dataverse](https://dataverse.harvard.edu). These can be relatively large files, compared to the package codes, so they are handled `lazily` (only downloaded when needed). Precomputed diagnostics have also been archived on [zenodo.org](https://zenodo.org).
| Artifact Name | File Type | Download Method |
|:----------------|:----------------:|-----------------:|
| `ScratchSpaces.ECCO` | NetCDF | lazy, by variable, [dataverse](https://dataverse.harvard.edu/dataverse/ECCO?q=&types=dataverses&sort=dateSort&order=desc&page=1) |
| `ScratchSpaces.ECCO` | JLD2 | lazy, whole, [zenodo](https://zenodo.org/record/5773401#.YbQmhS1h3Pg) |
| `datadep"MITprof-clim1"` | binary | lazy, whole, [zenodo](https://zenodo.org/record/5101243#.YXiEci1h1qs) |
| `ScratchSpaces.OCCA` | NetCDF |lazy, by variable, [dataverse](https://dataverse.harvard.edu/dataset.xhtml?persistentId=doi:10.7910/DVN/RNXA2A) |
| `datadep"CBIOMES-clim1"` | NetCDF | lazy, whole, [zenodo](https://zenodo.org/record/5598417#.YoW46C-B3MU) |
## Functions Reference
```@autodocs
Modules = [Climatology.downloads]
```
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | docs | 482 |
The workflow presented here is as follows.
- set up for running analyses of `ECCO estimates`.
- run one computation loop on the `ECCO monthly` files.
```@docs
ECCO.standard_analysis_setup
```
Here is an example of parameters `P` to compute zonal mean temperatures at level 5.
```@docs
ECCO_helpers.parameters
```
The computation loop, over all months, can then be carried out as follows.
```@docs
ECCO_diagnostics.driver
```
```@autodocs
Modules = [Climatology.ECCO_io]
```
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | docs | 6184 |
## Physical Oceanography
- [Sea Surface Temperature](sst_anomaly_notebook.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/OISST/sst_anomaly_notebook.jl)) : plot global mean and regional sea surface temperature (NOAA's OISST).
- [Sea Level Anomalies](SatelliteAltimetry.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/SSH/SatelliteAltimetry.jl)) : gridded satellite altimetry data
- [Sea Level Time Series](NSLCT_notebook.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/NSLCT/NSLCT_notebook.jl)) : plot global mean and regional sea level data from NASA (NASA's NSLCT and ECCO).
- [Ocean State Estimates](ECCO_standard_plots.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/ECCO/ECCO_standard_plots.jl)) : explore ocean transports, climate indices, siub-surface temperature, and many more variables from full ocean state estimates (ECCO, OCCA).
#### Detail
- [sst\_anomaly\_notebook.jl](sst_anomaly_notebook.html) plots a map of SST anomaly, as well as time series (SST and anomalies). Source : NOAA/OISST.
- [SatelliteAltimetry.jl](SatelliteAltimetry.html) plots a map a sea level anomaly maps derived from altimetry. Sources : NASA/PODAAC, CMEMS.
- [NSLCT\_notebook.jl](NSLCT_notebook.html) lets you access sea level data from NASA and Dataverse portals (`HTTP.jl`, `Dataverse.jl`), organize it into tables (`DataFrames.jl`), and plot it (`Makie.jl`).
- [ECCO\_standard\_plots.jl](ECCO_standard_plots.html) lets you explore a wide range of variables derived from gridded time-variable ocean climatologies (ECCO4, OCCA2). The data is retrieved from [dataverse.org](https://dataverse.harvard.edu/dataverse/ECCO), and intermediate results from [zenodo.org](https://zenodo.org). Source: MIT, NASA/ECCO. Source code: [here](https://github.com/JuliaOcean/Climatology.jl/blob/master/examples/ECCO/ECCO_standard_calcs.jl), [here](https://github.com/JuliaOcean/Climatology.jl/blob/master/examples/ECCO/ECCO_standard_loop.jl).
## Marine Ecosystems
- [Plankton, Chemistry, and Light](CBIOMES_climatology_plot.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/CBIOMES/CBIOMES_climatology_plot.jl)) : visualize ocean colour and biomass climatologies estimated using the Darwin3 model.
#### Detail
- The [CBIOMES1](https://github.com/CBIOMES/global-ocean-model) climatology (alpha version) is a global ocean state estimate that covers the period from 1992 to 2011 (ECCO). It is based on Forget et al 2015 for ocean physics [MIT general circulation model](https://mitgcm.readthedocs.io/en/latest/#) and on Dutkiewicz et al 2015 for marine biogeochemistry and ecosystems [Darwin Project model](https://darwin3.readthedocs.io/en/latest/phys_pkgs/darwin.html).
- [CBIOMES\_climatology\_create](https://JuliaOcean.github.io/Climatology.jl/v0.1.13/examples/CBIOMES_model_climatogy.html) (β [code link](https://raw.githubusercontent.com/JuliaOcean/Climatology.jl/master/examples/CBIOMES/CBIOMES_climatology_create.jl)) : recreate the CBIOMES-global climatology files
- [OptimalTransport\_demo.jl](OptimalTransport_demo.html) : using optimal transport for model-data comparison (CBIOMES1 vs satellite data).
## Other Notebooks
- [HadIOD\_viz.jl](HadIOD_viz.html) : download, read, and plot a subset of the [HadIOD](https://www.metoffice.gov.uk/hadobs/hadiod/) T/S database
- the suite of examples provided in [OceanRobots.jl](https://juliaocean.github.io/OceanRobots.jl/dev/examples/) that focus on observations collected at sea.
## References
- OCCA1 : [Forget 2010](https://doi.org/10.1175/2009JPO4043.1)
- ECCO4 : [Forget et al 2015](https://gmd.copernicus.org/articles/8/3071/2015/)
- CBIOMES1: [Forget 2018](https://zenodo.org/record/2653669#.YbwAUi1h0ow)
- OCCA2 : [Forget 2024](https://doi.org/10.21203/rs.3.rs-3979671/v1)
## Notes
!!! note
For more on these estimates, and how to use them in Julia, please refer to the following documentation and links therein.
- [OceanRobots.jl](https://juliaocean.github.io/OceanRobots.jl/dev/) : access, analyze, process, and simulate data generated by ocean robots. These ocean observing platforms collect observations in the field, and allow us to monitor climate.
- [MITgcm.jl](https://gaelforget.github.io/MITgcm.jl/dev/) : framework to interact with MITgcm (setup, run, output, plot, etc), CBIOMES, and ECCO output.
- [MeshArrays.jl](https://juliaclimate.github.io/MeshArrays.jl/dev/) : gridded Earth variables, domain decomposition, C-grid support; [Ocean Circulation](https://juliaclimate.github.io/MeshArrays.jl/dev/tutorials/vectors.html), [Geography](https://juliaclimate.github.io/MeshArrays.jl/dev/tutorials/geography.html) tutorials.
- [IndividualDisplacements.jl](https://juliaclimate.github.io/IndividualDisplacements.jl/dev/) : simulation and analysis of materials moving through oceanic and atmospheric flows.
!!! note
For more notebooks involving [CBIOMES](https://cbiomes.org) and related efforts, take a look at the following pages.
- [Marine Ecosystem Notebooks](https://github.com/JuliaOcean/MarineEcosystemNotebooks) : Darwin3 model, ocean color data, gradients field program, and more.
- [JuliaCon2021 workshop](https://github.com/JuliaOcean/MarineEcosystemsJuliaCon2021.jl) : _Modeling Marine Ecosystems At Multiple Scales Using Julia_.
- [PlanktonIndividuals.jl](https://juliaocean.github.io/PlanktonIndividuals.jl/dev/) : simulate the life cycle of ocean phytoplankton cells and their environment.
!!! note
To run the notebook on a local computer or in the cloud, please refer to the [Pluto docs](https://github.com/fonsp/Pluto.jl/wiki). Directions are also provided in the following pages.
- [ECCO\_standard\_plots.jl](https://JuliaOcean.github.io/Climatology.jl/dev/examples/ECCO_standard_plots.html)
- [JuliaClimate How-To](https://juliaclimate.github.io/Notebooks/#directions)
- [ECCO/Julia storymap](https://ecco-group.org/storymaps.htm?id=69)
- [video demonstration](https://www.youtube.com/watch?v=mZevMagHatc&list=PLXO7Tdh24uhPFZ5bph6Y_Q3-CRSfk5cDU)
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.5.11 | 8087dc49bd478bd575d9115bca02d920e29d29f7 | docs | 313 | # Climatology.jl
This package is currently focused on serving and deriving climatologies from [ocean state estimates](http://dx.doi.org/10.5194/gmd-8-3071-2015).
See [Physical Oceanography](@ref) and [Marine Ecosystems](@ref) for examples.
_It is in early development stage; breaking changes remain likely._
| Climatology | https://github.com/JuliaOcean/Climatology.jl.git |
|
[
"MIT"
] | 0.1.1 | f38d4361b979e4d9e87638a18893a1e48facd446 | code | 403 | using Documenter, Desktop
makedocs(;
format=Documenter.HTML(;
canonical="https://mgkuhn.github.io/Desktop.jl",
repolink="https://github.com/mgkuhn/Desktop.jl",
edit_link="master",
assets=String[],
),
pages=[
"Home" => "index.md",
],
sitename="Desktop.jl",
authors="Markus Kuhn",
)
deploydocs(;
repo="github.com/mgkuhn/Desktop.jl",
)
| Desktop | https://github.com/mgkuhn/Desktop.jl.git |
|
[
"MIT"
] | 0.1.1 | f38d4361b979e4d9e87638a18893a1e48facd446 | code | 5422 | """
Basic GUI desktop interactions, such as opening a URL with
a web browser.
# Example:
```julia
using Desktop
if hasdesktop()
browse_url("https://julialang.org/")
else
@info("No desktop environment available")
end
```
"""
module Desktop
export hasdesktop, browse_url, open_file
using Base.Filesystem
# some standard Win32 API types and constants, see [MS-DTYP]
const BOOL = Cint
const DWORD = Culong
const LPDWORD = Ref{DWORD}
const HWINSTA = Ptr{Cvoid}
const UOI_NAME = 2
# auxiliary function for the Windows part of hasdesktop()
"""
window_station_name()
Query the name of the β[window station](https://docs.microsoft.com/en-us/windows/win32/winstation/window-stations)β
in which the current Win32 process is running. If this
function returns `"WinSta0"`, the process calling it has
access to a GUI desktop.
"""
function window_station_name()
hwinsta = ccall((:GetProcessWindowStation, "user32.dll"),
stdcall, HWINSTA, ())
Base.windowserror("GetProcessWindowStation", hwinsta == C_NULL)
buf = zeros(UInt8, 80)
len = LPDWORD(0)
r = ccall((:GetUserObjectInformationA, "user32.dll"), stdcall, BOOL,
(Ptr{Cvoid}, Cint, Ptr{Cvoid}, DWORD, LPDWORD),
hwinsta, UOI_NAME, buf, sizeof(buf), len)
Base.windowserror("GetUserObjectInformationA", r == 0)
buf[end] = 0
return unsafe_string(pointer(buf))
end
"""
hasdesktop()
Returns `true` if the current process appears to have access to a
graphical desktop environment and is therefore likely to succeed when
invoking GUI functions or applications.
The algorithm used is a platform-dependent heuristic:
- On Microsoft Windows: tests if the current process is running in a
βwindow stationβ called `WinSta0`
- On macOS: checks the has-graphic-access bit in the security session
information of the calling process
- On other platforms: checks if a non-empty environment variable
`DISPLAY` or `WAYLAND_DISPLAY` exists
It only checks the native GUI interface of the respective platform;
e.g. an available X11 server will be ignored on Windows or macOS.
"""
function hasdesktop()
if Sys.iswindows()
return window_station_name() == "WinSta0"
elseif Sys.isapple()
# https://developer.apple.com/documentation/security/1593382-sessiongetinfo
callerSecuritySession = 0xffffffff
sessionHasGraphicAccess = 16
errSessionSuccess = 0
attrs = Ref{Cuint}(0)
r = ccall(:SessionGetInfo,
Cint, (Cuint, Ref{Cuint}, Ref{Cuint}),
callerSecuritySession, C_NULL, attrs)
if r == errSessionSuccess
return (attrs[] & sessionHasGraphicAccess) != 0
else
@error r
end
else
return (!isempty(get(ENV, "DISPLAY", "")) ||
!isempty(get(ENV, "WAYLAND_DISPLAY", "")))
end
end
"""
browse_url(url::AbstractString)
Attempts to launch a web browser to display the document available at
the provided URL or filesystem path.
The success of this function depends on access to a GUI desktop
environment.
See also: [`hasdesktop`](@ref), [`open_file`](@ref)
"""
function browse_url(url::AbstractString)
if Sys.iswindows()
# https://github.com/LOLBAS-Project/LOLBAS/blob/master/Archive-Old-Version/OSLibraries/Url.dll.md
return success(`rundll32.exe url.dll,OpenURL $url`)
elseif Sys.isapple()
# currently requests Safari explicitly, as e.g. Google Chrome
# (if that's the default browser) fails to open the
# Julia documentation index.html due to that file
# commonly being installed with xattr com.apple.quarantine
# https://github.com/JuliaLang/julia/issues/34275
return success(`/usr/bin/open -a safari $url`)
else
for browser in [
"/usr/bin/xdg-open",
"/usr/bin/firefox",
"/usr/bin/google-chrome",
]
if isfile(browser)
return success(`$browser $url`)
end
end
@error "Cannot find a web browser to display $url"
end
end
"""
open_file(path::AbstractString)
Opens a file using a default application that the operating system
or desktop environment associates with this file type.
The success of this function may depend on access to a GUI desktop
environment.
See also: [`hasdesktop`](@ref), [`browse_url`](@ref)
"""
function open_file(path::AbstractString)
if Sys.iswindows()
# https://github.com/LOLBAS-Project/LOLBAS/blob/master/Archive-Old-Version/OSLibraries/Url.dll.md
return success(`rundll32.exe url.dll,FileProtocolHandler $path`)
elseif Sys.isapple()
return success(`/usr/bin/open $path`)
else
for handler in [
"/usr/bin/xdg-open",
"/usr/bin/run-mailcap",
]
if isfile(handler)
return success(`$handler $path`)
end
end
@error "Cannot find an application to open $path"
end
end
# TODO: Should Julia have more functions for basic desktop interaction
# (open, print, edit a file), like an equivalent of
# https://docs.oracle.com/javase/9/docs/api/java/awt/Desktop.html
# as a Base.Desktop module?
# See also similar packages:
# http://www.davidc.net/programming/java/browsing-urls-and-opening-files
# https://github.com/GiovineItalia/Gadfly.jl/blob/master/src/open_file.jl
end # module
| Desktop | https://github.com/mgkuhn/Desktop.jl.git |
|
[
"MIT"
] | 0.1.1 | f38d4361b979e4d9e87638a18893a1e48facd446 | code | 98 | using Desktop
using Test
@testset "Desktop.jl" begin
@test hasdesktop() in (true, false)
end
| Desktop | https://github.com/mgkuhn/Desktop.jl.git |
|
[
"MIT"
] | 0.1.1 | f38d4361b979e4d9e87638a18893a1e48facd446 | docs | 806 | # Desktop
[](https://mgkuhn.github.io/Desktop.jl/stable)
[](https://mgkuhn.github.io/Desktop.jl/dev)
[](https://github.com/mgkuhn/Desktop.jl/actions/workflows/CI.yml?query=branch%3Amaster)
This Julia package provides functions for basic GUI Desktop interactions:
* checking if the current process has access to a desktop environment
* opening a URL with a web browser
* opening a file with the desktop environment's default application
## Example
```julia
using Desktop
if hasdesktop()
browse_url("https://julialang.org/")
else
@info("No desktop environment available.")
end
```
| Desktop | https://github.com/mgkuhn/Desktop.jl.git |
|
[
"MIT"
] | 0.1.1 | f38d4361b979e4d9e87638a18893a1e48facd446 | docs | 82 | # Desktop.jl
```@index
```
```@docs
Desktop
hasdesktop
browse_url
open_file
```
| Desktop | https://github.com/mgkuhn/Desktop.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 800 | using Documenter, SIAMFANLEquations, DocumenterTools
push!(LOAD_PATH, "../src/")
makedocs(
sitename = "SIAMFANLEquations.jl",
authors = "C. T. Kelley",
format = Documenter.HTML(prettyurls = get(ENV, "CI", nothing) == "true"),
pages = Any[
"Home"=>"index.md",
"Solvers"=>Any[
"functions/nsol.md",
"functions/ptcsol.md",
"functions/nsoli.md",
"functions/ptcsoli.md",
"functions/aasol.md",
],
"Scalar Equations"=>Any[
"functions/nsolsc.md",
"functions/ptcsolsc.md",
"functions/secant.md",
],
"Linear Solvers"=>Any["functions/kl_gmres.md", "functions/kl_bicgstab.md"],
],
)
deploydocs(repo = "github.com/ctkelley/SIAMFANLEquations.jl.git")
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 505 | using Documenter, ScalarEquations, DocumenterLaTeX, DocumenterTools
push!(LOAD_PATH, "../src/")
makedocs(
sitename = "ScalarEquations.jl",
authors = "C. T. Kelley",
format = Documenter.HTML(prettyurls = get(ENV, "CI", nothing) == "true"),
pages = Any[
"Home"=>"index.md",
"Scalar Equations"=>Any["Scalar.md"],
"Scalar Equations Functions"=>Any["functions/functions.md", "functions/sptc.md"],
],
)
deploydocs(repo = "github.com/ctkelley/ScalarEquations.jl.git")
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4415 | module SIAMFANLEquations
# Avoiding most implicit imports
using LinearAlgebra: I, axpy!, axpby!, cond, lu, lu!, mul!, norm, qr, qr!
using LinearAlgebra: LinearAlgebra, BLAS
using SparseArrays: SparseArrays, SparseMatrixCSC
using SuiteSparse: SuiteSparse
using BandedMatrices: BandedMatrices, BandedMatrix
using LaTeXStrings: LaTeXStrings
# Export the nonlinear solvers
export nsolsc
export ptcsolsc
export ptcsol
export ptcsoli
export nsol
export nsoli
export aasol
export secant
# nofact is the factorization that does nothing. It is
# a legal option for nsol and ptcsol and I must export it.
export nofact
# Export the linear solvers
export kl_gmres
export kl_bicgstab
# A couple functions the solvers need to manage storage. These are
# from src/Tools/NewtonKrylov_Tools.jl
#
# kstore gets the vectors GMRES needs internally and makes room to
# copy the initial iterate and right side. I use this in the heat
# transfer problem in Chapter 5.
#
export kstore
#
# knl_init preallocates the vectores nsoli and ptcsoli use internally.
# I need to export this for the continuation code in Chapter 5.
#
export nkl_init
#
#
#export knlstore
#export EvalF!
#export solhistinit
#export armijosc
#export Katv
#export Orthogonalize!
include("Tools/armijo.jl")
include("Tools/PrintError.jl")
include("Tools/FunctionJacobianEvals.jl")
include("Tools/ManageStats.jl")
include("Tools/IterationInit.jl")
include("Tools/ErrorTest.jl")
include("Tools/NewtonKrylov_Tools.jl")
include("Tools/PTCTools.jl")
include("Tools/AA_Tools.jl")
include("Solvers/Chapter1/nsolsc.jl")
include("Solvers/Chapter1/ptcsolsc.jl")
include("Solvers/Chapter1/secant.jl")
include("Solvers/ptcsol.jl")
include("Solvers/ptcsoli.jl")
include("Solvers/nsol.jl")
include("Solvers/nsoli.jl")
include("Solvers/aasol.jl")
include("Solvers/LinearSolvers/kl_gmres.jl")
include("Solvers/LinearSolvers/kl_bicgstab.jl")
include("Solvers/LinearSolvers/Orthogonalize!.jl")
#include("PlotsTables/printhist.jl")
module TestProblems
using SIAMFANLEquations
#using LinearAlgebra: LinearAlgebra, BLAS, Diagonal
using LinearAlgebra: LinearAlgebra, Diagonal
using LinearAlgebra: I, SymTridiagonal, Tridiagonal, axpby!, axpy!
using LinearAlgebra: diagind, dot, ldiv!, ldlt, lu!, mul!, norm
#using LinearAlgebra
#using LinearAlgebra.BLAS
using SparseArrays: SparseArrays, spdiagm
using SuiteSparse: SuiteSparse
using BandedMatrices
using AbstractFFTs: AbstractFFTs, plan_fft, plan_fft!
using FFTW: FFTW
using Printf: Printf
using QuadGK: QuadGK, gauss
export
#Functions
# fcos,
# fpatan,
spitchfork,
# linatan,
sptestp,
sptest,
# ftanx,
# ftanxp,
heqinit,
heqf!,
heqJ!,
HeqFix!,
simple!,
jsimple!,
JVsimple,
heqbos!,
setc!,
chandprint,
bvpinit,
Fbvp!,
Jbvp!,
FBeam!,
FBeamtd!,
BeamJ!,
BeamtdJ!,
beaminit,
ptctest,
pdeF!,
pdeJ!,
Jvec2d,
pdeinit,
pdegminit,
fishinit,
fish2d,
sintv,
isintv,
Pfish2d,
Pvec2d,
Lap2d,
Lap1d,
Dx2d,
Dy2d,
solexact,
l2dexact,
dxexact,
dyexact,
hardleft!,
hardleftFix!,
heat_init,
sn_init,
heat_fixed!,
FCR_heat!,
getrhs,
AxB,
transport_sweep!,
heq_continue,
knl_continue
include("TestProblems/Scalars/spitchfork.jl")
include("TestProblems/Systems/simple!.jl")
include("TestProblems/Systems/Fbvp!.jl")
include("TestProblems/Systems/FBeam!.jl")
include("TestProblems/Systems/Hequation.jl")
include("TestProblems/Systems/EllipticPDE.jl")
include("TestProblems/Systems/PDE_Tools.jl")
include("TestProblems/CaseStudies/CR_Heat.jl")
include("TestProblems/CaseStudies/knl_continue.jl")
include("TestProblems/CaseStudies/heq_continue.jl")
end
module Examples
using SIAMFANLEquations
using SIAMFANLEquations.TestProblems
using LinearAlgebra: LinearAlgebra, I, Tridiagonal, norm, qr!
#using LinearAlgebra: LinearAlgebra, BLAS, I, Tridiagonal, norm, qr!
#using LinearAlgebra.BLAS
using BandedMatrices
export ptciBeam
export ptcBeam
export ivpBeam
export BVP_solve
export nsolheq
export NsolPDE
export NsoliPDE
export PDE_aa
include("Examples/ptciBeam.jl")
include("Examples/ptcBeam.jl")
include("Examples/ivpBeam.jl")
include("Examples/BVP_solve.jl")
include("Examples/NsolPDE.jl")
include("Examples/NsoliPDE.jl")
include("Examples/PDE_aa.jl")
include("Examples/Internal/nsolheq.jl")
end
end # module
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1031 | """
BVP_solve(n = 801, T = Float64; bfact=qr!)
Solve the BVP for the Chapter 2 figures and testing.
"""
function BVP_solve(n = 801, T = Float64; bfact = qr!)
# Set it up
bdata = bvpinit(n, T)
#
U0 = zeros(2n)
FV = zeros(2n)
# Banded matrix with the correct number of bands
FPV = BandedMatrix{T}(Zeros(2n, 2n), (2, 4))
#
# Build the initial iterate
#
BVP_U0!(U0, n, bdata)
#
if bfact == qr!
bvpout = nsol(
Fbvp!,
U0,
FV,
FPV,
Jbvp!;
rtol = 1.e-10,
sham = 1,
pdata = bdata,
jfact = bfact,
)
else
# Test for default of qr. Used for CI only.
bvpout = nsol(Fbvp!, U0, FV, FPV, Jbvp!; rtol = 1.e-10, sham = 1, pdata = bdata)
end
return (bvpout = bvpout, tv = bdata.tv)
end
function BVP_U0!(U0, n, bdata)
tv = bdata.tv
view(U0, 1:2:2n-1) .= exp.(-0.1 .* tv .* tv)
view(U0, 2:2:2n) .= -0.2 .* view(U0, 1:2:2n-1) .* tv
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 711 | """
NsolPDE(n; sham=1, resdec=.5, rtol=1.e-7, atol=1.e-10)
Solve the Elliptic PDE using nsol.jl on an n x n grid. You give me
n and (optionally) sham and resdec and I return the output of nsol.
"""
function NsolPDE(n; sham = 1, resdec = 0.5, rtol = 1.e-7, atol = 1.e-10)
# Get some room for the residual
u0 = zeros(n * n)
FV = copy(u0)
# Get the precomputed data from pdeinit
pdata = pdeinit(n)
# Storage for the Jacobian
J = copy(pdata.D2)
# Call the solver
hout = nsol(
pdeF!,
u0,
FV,
J,
pdeJ!;
rtol = rtol,
atol = atol,
pdata = pdata,
sham = sham,
resdec = resdec,
)
return hout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1001 | """
NsoliPDE(n; fixedeta=true, eta=.1, lsolver="gmres", restarts = 99)
Solve the Elliptic PDE using nsoli.jl on an n x n grid. You give me
n and (optionally) the iteration paramaters and I return the output of nsoli.
"""
function NsoliPDE(
n;
eta = 0.1,
fixedeta = true,
rtol = 1.e-7,
atol = 1.e-10,
Pvec = Pvec2d,
pside = "right",
lsolver = "gmres",
restarts = 99,
)
# Get some room for the residual
u0 = zeros(n * n)
FV = copy(u0)
# Get the precomputed data from pdeinit
pdata = pdeinit(n)
# Storage for the Krylov basis
(lsolver == "gmres") ? (JV = zeros(n * n, restarts + 1)) : JV = zeros(n * n)
pout = nsoli(
pdeF!,
u0,
FV,
JV,
Jvec2d;
rtol = rtol,
atol = atol,
Pvec = Pvec,
pdata = pdata,
eta = eta,
fixedeta = fixedeta,
maxit = 20,
lmaxit = 20,
pside = pside,
lsolver = lsolver,
)
return pout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1197 | """
PDE_aa(n=31, m=3; Vstore=Float64[], pdata=nothing, beta=1.0, maxit=40)
Solve preconditioned convection-diffusion equation with hardwired left
preconditioner using Anderson acceleration.
If you're putting this in a loop, you should allocate Vstore to
zeros(n*n,2*(mmax+1)). Otherwise I will make a decision for you and
allocate for Vstore with each call to this function. The story on
pdata is the same. If you are calling this several times with the
same value of n, build pdata outside the call.
"""
function PDE_aa(n = 31, m = 3; Vstore = Float64[], pdata = nothing, beta = 1.0, maxit = 40)
#
# Process Vstore and pdata
#
(pdata != nothing) || (pdata = pdeinit(n))
(length(Vstore) > 0) || (Vstore = zeros(n * n, 3 * m + 3))
(mv, nv) = size(Vstore)
dimvtest = ((mv == n * n) && (nv >= 2 * m + 4))
dimvtest || error("Vstore too small")
#
# Call aasol and return the results.
#
u0 = zeros(n * n)
rtol = 1.e-8
atol = 1.e-8
aout = aasol(
hardleftFix!,
u0,
m,
Vstore;
pdata = pdata,
maxit = maxit,
rtol = rtol,
atol = atol,
beta = beta,
)
return aout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2068 | """
ivpBeam(n, dt, stepnum)
Solve the time-dependent beam problem. Return the iteration history
for the figure and the tables.
We use the same initial data as in the PTC example, ptcBeam.jl
"""
function ivpBeam(n, dt, stepnum)
#
# Set up the initial data for the temporal integration and
# the figure.
#
bdata = beaminit(n, dt)
FB = zeros(n)
FR = zeros(n)
zd = zeros(n)
zr = zeros(n - 1)
JB = Tridiagonal(zr, zd, zr)
x = bdata.x
un = x .* (1.0 .- x) .* (2.0 .- x)
un .*= exp.(-10.0 * un)
bdata.UN .= un
nout = []
solhist = zeros(n, stepnum + 1)
solhist[:, 1] .= un
fhist = []
fhistt = []
idid = true
idt = 1
fx = FBeam!(FR, un, bdata)
fxn = norm(fx, Inf)
fxt = FBeamtd!(FR, un, bdata)
push!(fhist, fxn)
#
# Take stepnum time steps and accumulate the data for the book.
# The integration will terminate prematurely if the nonlinear solve fails.
# This can happen if your time step is too large and/or your
# predictor is poor.
#
# I have tuned the time step to make the solver happy and
# we are getting close to steady state.
#
while idt <= stepnum && idid && fxn > 1.e-12
nout = nsol(
FBeamtd!,
un,
FB,
JB,
BeamtdJ!;
pdata = bdata,
atol = 1.e-10,
rtol = 1.e-6,
maxit = 3,
solver = "chord",
)
idid = nout.idid
un = nout.solution
solhist[:, idt+1] .= un
bdata.UN .= un
idt += 1
fx = FBeam!(FR, un, bdata)
fxn = norm(fx, Inf)
push!(fhist, fxn)
push!(fhistt, nout.history[end])
#
# If the predictor satisfies the termination criterion, advance
# in time anyhow?
#
# idid=abs(idid)
end
t = dt * collect(1:1:idt)
zp = zeros(idt)
se = [zp solhist[:, 1:idt]' zp]'
xe = [0.0 x' 1.0]'
return (t = t, se = se, xe = xe, fhist = fhist, fhistt = fhistt)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1324 | """
ptcBeam(n, maxit, delta=.01, lambda=20.0; precision=Float64, keepsolhist=false)
Test PTC for systems on the buckling beam problem.
Compare to Newton, which will converge to the unstable solution.
"""
function ptcBeam(
n,
maxit,
delta = 0.01,
lambda = 20.0;
precision = Float64,
keepsolhist = false,
jknowsdt = false,
)
#
# This is a steady-state computation, so there is no dt in the problem.
#
bdata = beaminit(n, 0.0, lambda)
x = bdata.x
u0 = x .* (1.0 .- x) .* (2.0 .- x)
u0 .*= exp.(-10.0 * u0)
FS = copy(u0)
FPS = precision.(copy(bdata.D2))
if jknowsdt
Jeval = BeamJdt!
else
Jeval = BeamJ!
end
bout = ptcsol(
FBeam!,
u0,
FS,
FPS,
Jeval;
# BeamJ!;
rtol = 1.e-10,
pdata = bdata,
delta0 = delta,
maxit = maxit,
jknowsdt = jknowsdt,
keepsolhist = keepsolhist,
)
if ~jknowsdt
qout = nsol(FBeam!, u0, FS, FPS, BeamJ!; pdata = bdata, sham = 1)
return (bout, qout)
else
return bout
end
end
"""
BeamJdt!(FP, FV, U, dt, bdata)
Evaluates the Jacobian + (1/dt) I for PTC.
"""
function BeamJdt!(FP, FV, U, dt, bdata)
FP .= BeamJ!(FP, FV, U, bdata)
FP .= FP + (1.0 / dt) * I
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1432 | """
ptciBeam(n=63, delta0=1.e-2, PvecKnowsdelta=true, pside = "right";
lsolver="gmres")
Solves the buckling beam problem with ptcsoli. You can play
- left/right preconditioning
- pseudo time step dependent preconditioning
- relationship of delta0 to n (hint, it's not mesh-independent)
"""
function ptciBeam(
n = 63,
delta0 = 1.e-2,
PvecKnowsdelta = true,
pside = "right";
lsolver = "gmres",
)
lambda = 20.0
maxit = 1000
delta0 = 0.01
PvecKnowsdelta ? Pvec = ptvbeamdelta : Pvec = ptvbeam
bdata = beaminit(n, 0.0, lambda)
x = bdata.x
u0 = x .* (1.0 .- x) .* (2.0 .- x)
u0 .*= exp.(-10.0 * u0)
FS = copy(u0)
FPJV = zeros(n, 20)
pout = ptcsoli(
FBeam!,
u0,
FS,
FPJV;
lsolver = lsolver,
delta0 = delta0,
pdata = bdata,
lmaxit = 19,
eta = 1.e-2,
rtol = 1.e-10,
maxit = maxit,
Pvec = Pvec,
PvecKnowsdelta = PvecKnowsdelta,
pside = pside,
)
return pout
end
"""
ptvbeamdelta(v, x, bdata)
Precondition buckling beam problem with delta-aware preconditioner.
"""
function ptvbeamdelta(v, x, bdata)
delta = bdata.deltaval[1]
J = bdata.D2 + (1.0 / delta) * I
ptv = J \ v
end
"""
ptvbeamp(v, x, bdata)
Precondition buckling beam problem with inverse of high-order term.
"""
function ptvbeam(v, x, bdata)
J = bdata.D2
ptv = J \ v
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 361 | """
nsolheq(x0, FS, FPS, hdata; diff=:fd)
Internal function to run with CI. Nothing to see here, move along.
"""
function nsolheq(x0, FS, FPS, hdata; diff = :fd)
if diff == :fd
heqout = nsol(heqf!, x0, FS, FPS; pdata = hdata, sham = 1)
else
heqout = nsol(heqf!, x0, FS, FPS, heqJ!; pdata = hdata, sham = 1)
end
return heqout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 12658 | """
aasol(GFix!, x0, m, Vstore; maxit=20,
rtol=1.e-10, atol=1.e-10, beta=1.0, pdata=nothing, keepsolhist = false)
C. T. Kelley, 2022
Julia code for Anderson acceleration. Nothing fancy.
Solvers fixed point problems x = G(x).
You must allocate storage for the function and fixed point map
history --> in the calling program <-- in the array Vstore.
For an n dimensional problem with Anderson(m), Vstore must have
at least 2m + 4 columns and 3m + 3 is better. If m=0 (Picard) then
V must have at least 4 columns.
Inputs:\n
- GFix!: fixed-point map, the ! indicates that GFix! overwrites G, your
preallocated storage for the function value G=G(xin).\n
So G=GFix!(G,xin) or G=GFix!(G,xin,pdata) returns
G=G(xin).\n
Your GFix function MUST end with --> return G <--. See the example
in the docstrings.
- x0: Initial iterate. It is a vector of size N\n
You should store it as (N) and design G! to use vectors of size (N).
If you use (N,1) consistently instead, the solvers may work, but I make
no guarantees.
- m: depth for Anderson acceleration. m=0 is Picard iteration
- Vstore: Working storage array. For an n dimensional problem Vstore
should have at least 3m+3 columns unless you are storage bound. If storage
is a problem, then you can allocate a minimum of 2m+4 columns. The smaller
allocation exacts a performance penalty, especially for small problems
and small values of m. So for Anderson(3), Vstore should be no smaller
than zeros(N,8) with zeros(N,11) a better choice. Vstore needs to
allocate for the history of differences of the residuals and fixed
point maps. The extra m-1 columns are for storing intermediate results
in the downdating phase of the QR factorization for the coefficient
matrix of the optimization problem. See the notebook or the print book
for the details of this mess.
If m=0, then Vstore needs 4 columns.
Keyword Arguments (kwargs):\n
maxit: default = 20\n
limit on nonlinear iterations\n
rtol and atol: default = 1.e-10\n
relative and absolute error tolerances\n
beta:\n
Anderson mixing parameter. Changes G(x) to (1-beta)x + beta G(x).
Equivalent to accelerating damped Picard iteration. The history
vector is the one for the damped fixed point map, not the original
one. Keep this in mind when comparing results.
pdata:\n
precomputed data for the fixed point map.
Things will go better if you use this rather than hide the data
in global variables within the module for your function.
keepsolhist: default = false\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
Output:\n
- A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
-- solution = converged result
-- functionval = G(solution)
You might want to use functionval as your solution since it's
a Picard iteration applied to the converged Anderson result. If G
is a contraction it will be better than the solution.
-- history = the vector of residual norms (||x-G(x)||) for the iteration
-- stats = named tuple (condhist, alphanorm) of the history of the
condition numbers of the optimization problem
and l1 norm of the coefficients.
This is only for diagnosing
problems and research. Condihist[k] and alphanorm[k] are
the condition number and coefficient norm for the optimization
problem that computes iteration k+1 from iteration k.
I record this for iterations k=1, ... until the final iteration
K. So I do not record the stats for k=0 or the final iteration.
We did record the data for the final iteration in Toth/Kelley
2015 at the cost of an extra optimization problem solve.
Since we've already terminated, there's not any point in
collecting that data.\n
Bottom line: if history has length K+1 for iterations
0 ... K, then condhist and alphanorm have length K-1.
-- idid=true if the iteration succeeded and false if not.
-- errcode = 0 if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= -2 if || residual || > div_test || residual_0 ||
I have fixed div_test = 1.e4 for now. I terminate
the iteration when this happens to avoid generating
Infs and/or NaNs.
= 10 if no convergence after maxit iterations
-- solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
solhist is an N x K array where N is the length of x and K is the number of
iterations + 1.
### Examples for aasol
#### Duplicate Table 1 from Toth-Kelley 2015.
The final entries in the condition number and coefficient norm statistics
are never used in the computation and we don't compute them in Julia.
See the docstrings, notebook, and the print book for the story on this.
```jldoctest
julia> function tothk!(G, u)
G[1]=cos(.5*(u[1]+u[2]))
G[2]=G[1]+ 1.e-8 * sin(u[1]*u[1])
return G
end
tothk! (generic function with 1 method)
julia> u0=ones(2,); m=2; vdim=3*m+3; Vstore = zeros(2, vdim);
julia> aout = aasol(tothk!, u0, m, Vstore; rtol = 1.e-10);
julia> aout.history
8-element Vector{Float64}:
6.50111e-01
4.48661e-01
2.61480e-02
7.25389e-02
1.53107e-04
1.18513e-05
1.82466e-08
1.04725e-13
julia> [aout.stats.condhist aout.stats.alphanorm]
6Γ2 Matrix{Float64}:
1.00000e+00 1.00000e+00
2.01556e+10 4.61720e+00
1.37776e+09 2.15749e+00
3.61348e+10 1.18377e+00
2.54948e+11 1.00000e+00
3.67694e+10 1.00171e+00
```
Now we put a mixing or damping parameter in there with beta = .5. This
example is nasty enough to make mixing do well. Keep in mind
that the history is for the damped residual, not the original one.
```
julia> bout=aasol(tothk!, u0, m, Vstore; rtol = 1.e-10, beta=.5);
julia> bout.history
7-element Vector{Float64}:
3.25055e-01
3.70140e-02
1.81111e-03
9.55308e-04
1.25936e-05
1.40854e-09
2.18196e-12
```
#### H-equation example with m=2.
This takes more iterations than
Newton, which should surprise no one.
```jldoctest
julia> n=16; x0=ones(n,); Vstore=zeros(n,20); m=2;
julia> hdata=heqinit(x0,.99);
julia> hout=aasol(HeqFix!, x0, m, Vstore; pdata=hdata);
julia> hout.history
12-element Vector{Float64}:
1.47613e+00
7.47800e-01
2.16609e-01
4.32017e-02
2.66867e-02
6.82965e-03
2.70779e-04
6.51027e-05
7.35581e-07
1.85649e-09
4.94803e-10
5.18866e-12
```
"""
function aasol(
GFix!,
x0,
m,
Vstore;
maxit = 20,
rtol = 1.e-10,
atol = 1.e-10,
beta = 1.0,
pdata = nothing,
keepsolhist = false,
)
#
# Startup
#
# Set up the storage
#
(sol, gx, df, dg, res, DG, QP, Qd, solhist) =
Anderson_Init(x0, Vstore, m, maxit, beta, keepsolhist)
#
# Iteration 1
#
k = 0
~keepsolhist || (@views solhist[:, k+1] .= sol)
gx = EvalF!(GFix!, gx, sol, pdata)
(beta == 1.0) || (gx = betafix!(gx, sol, beta))
copy!(res, gx)
axpy!(-1.0, sol, res)
# res .= gx - sol
resnorm = norm(res)
resnorm_up_bd = 1.e4 * resnorm
tol = rtol * resnorm + atol
ItData = ItStatsA(resnorm)
toosoon = (resnorm <= tol)
if ~toosoon
#
# If we need more iterations, get organized.
#
# sol .= gx
copy!(sol, gx)
alpha = zeros(m + 1)
k = k + 1
~keepsolhist || (@views solhist[:, k+1] .= sol)
(gx, dg, df, res, resnorm) = aa_point!(gx, GFix!, sol, res, dg, df, beta, pdata)
updateHist!(ItData, resnorm)
end
n = length(x0)
RF = zeros(m, m)
RP = zeros(m, m)
ThetA = zeros(m)
TmPReS = zeros(m)
while ((k < maxit) && (resnorm > tol) && ~toosoon && (resnorm < resnorm_up_bd))
if m == 0
alphanrm = 1.0
condit = 1.0
copy!(sol, gx)
# sol .= gx
else
BuildDG!(DG, m, k + 1, dg)
(QP, RP) = aa_qr_update!(QP, RP, df, m, k - 1, Qd)
mk = min(m, k)
@views QA = QP[:, 1:mk]
@views RA = RP[1:mk, 1:mk]
@views theta = ThetA[1:mk]
@views tres = TmPReS[1:mk]
mul!(tres, QA', res)
theta .= RA \ tres
condit = cond(RA)
alphanrm = falpha(alpha, theta, min(m, k))
copy!(sol, gx)
# @views sol .-= DG[:, 1:mk] * theta
@views mul!(sol, DG[:, 1:mk], theta, -1.0, 1.0)
end
updateStats!(ItData, condit, alphanrm)
k += 1
~keepsolhist || (@views solhist[:, k+1] .= sol)
(gx, dg, df, res, resnorm) = aa_point!(gx, GFix!, sol, res, dg, df, beta, pdata)
updateHist!(ItData, resnorm)
end
(idid, errcode) = AndersonOK(resnorm, tol, k, m, toosoon, resnorm_up_bd)
aaout = CloseIteration(sol, gx, ItData, idid, errcode, keepsolhist, solhist)
return aaout
end
"""
BuildDG!(DG,m,k,dg)
Keeps the history of the fixed point map differences
"""
function BuildDG!(DG, m, k, dg)
if m == 1
@views copy!(DG[:, 1], dg)
elseif k > m + 1
for ic = 1:m-1
# @views DG[:, ic] .= DG[:, ic+1]
@views copy!(DG[:, ic], DG[:, ic+1])
end
@views copy!(DG[:, m], dg)
else
@views copy!(DG[:, k-1], dg)
end
end
"""
aa_point!(gx, gfix, sol, res, dg, df, pdata)
Evaluate the fixed point map at the new point.
Keep the books to get ready to update the coefficient matrix
for the optimization problem.
"""
function aa_point!(gx, gfix, sol, res, dg, df, beta, pdata)
# dg .= -gx
copy!(dg, -gx)
gx = EvalF!(gfix, gx, sol, pdata)
(beta == 1.0) || (gx = betafix!(gx, sol, beta))
# dg .+= gx
axpy!(1.0, gx, dg)
# df .= -res
copy!(df, -res)
# res .= gx - sol
# res .= gx
# res .-= sol
copy!(res, gx)
axpy!(-1.0, sol, res)
axpy!(1.0, res, df)
# df .+= res
resnorm = norm(res)
return (gx, dg, df, res, resnorm)
end
"""
betafix(gx, sol, dg, beta)
Put the mixing parameter beta in the right place.
"""
function betafix!(gx, sol, beta)
gx = axpby!((1.0 - beta), sol, beta, gx)
return gx
end
"""
aa_qr_update(Q, R, vnew, m, k, Qd)
Update the QR factorization for the Anderson acceleration optimization
problem.
Still need to make the allocation for Qd go away.
"""
function aa_qr_update!(Q, R, vnew, m, k, Qd)
(n, m) = size(Q)
aaqr_dim_check(Q, R, vnew, m, k)
if k == 0
R[1, 1] = norm(vnew)
@views Q[:, 1] .= vnew / norm(vnew)
else
if k > m - 1
downdate_aaqr!(Q, R, m, Qd)
end # inner if block
kq = min(k, m - 1)
update_aaqr!(Q, R, vnew, m, kq)
end # outer if block
return (Q, R)
end
function update_aaqr!(Q, R, vnew, m, k)
(nq, mq) = size(Q)
(k > m - 1) && error("Dimension error in Anderson QR")
@views Qkm = Q[:, 1:k]
@views hv = vec(R[1:k+1, k+1])
Orthogonalize!(Qkm, hv, vnew, "cgs2")
@views R[1:k+1, k+1] .= hv
@views Q[:, k+1] .= vnew
# return (Q = Q, R = R)
end
function downdate_aaqr!(Q, R, m, Qd)
(nq, mq) = size(Q)
(pd, md) = size(Qd)
(md == m - 1) || @error("dimension error in downdate")
@views Rp = R[:, 2:m]
G = qr!(Rp)
Rd = Matrix(G.R)
Qx = Matrix(G.Q)
@views R[1:m-1, 1:m-1] .= Rd
@views R[:, m] .= 0.0
if (pd == nq)
mul!(Qd, Q, Qx)
@views Q[:, 1:m-1] .= Qd
else
blocksize = pd
(dlow, dhigh) = blockdim(nq, blocksize)
blen = length(dlow)
for il = 1:blen
asize = dhigh[il] - dlow[il] + 1
@views QZ = Qd[1:asize, :]
@views Qsec = Q[dlow[il]:dhigh[il], :]
@views mul!(QZ, Qsec, Qx)
@views Qsec[:, 1:m-1] .= QZ
end
end
@views Q[:, m] .= 0.0
return (Q, R)
end
function aaqr_dim_check(Q, R, vnew, m, k)
(mq, nq) = size(Q)
(mr, nr) = size(R)
n = length(vnew)
dimqok = ((mq == n) && (nq == m))
dimrok = ((mr == m) && (nr == m))
dimok = (dimqok && dimrok)
dimok || error("array size error in AA update")
end
function blockdim(n, block)
p = Int(floor(n / block))
res = n - p * block
ilow = Int64[]
ihigh = Int64[]
for jb = 1:p
lowval = (jb - 1) * block + 1
push!(ilow, lowval)
highval = ilow[jb] + block - 1
push!(ihigh, highval)
end
if res > 0
lowval = p * block + 1
push!(ilow, lowval)
push!(ihigh, n)
end
return (ilow, ihigh)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 13987 | """
nsol(F!, x0, FS, FPS, J!=diffjac!; rtol=1.e-6, atol=1.e-12,
maxit=20, solver="newton", sham=5, armmax=10, resdec=.1,
dx = 1.e-7, armfix=false,
pdata = nothing, jfact = klfact,
printerr = true, keepsolhist = false, stagnationok=false)
C. T. Kelley, 2022
Julia versions of the nonlinear solvers from my SIAM books.
Herewith: nsol
You must allocate storage for the function and Jacobian in advance
--> in the calling program <-- ie. in FS and FPS
Inputs:\n
- F!: function evaluation, the ! indicates that F! overwrites FS, your
preallocated storage for the function.\n
So FS=F!(FS,x) or FS=F!(FS,x,pdata) returns FS=F(x)\n
Your function MUST have --> return FS <-- at the end.
See the examples in the docstrings and in TestProblems/Systems/simple.jl
- x0: initial iterate\n
- FS: Preallocated storage for function. It is a vector of size N\n
You should store it as (N) and design F! to use vectors of size (N).
If you use (N,1) consistently instead, the solvers may work, but I make
no guarantees.
- FPS: preallocated storage for Jacobian. It is an N x N matrix\n
- J!: Jacobian evaluation, the ! indicates that J! overwrites FPS, your
preallocated storage for the Jacobian. If you leave this out the
default is a finite difference Jacobian.\n
So, FP=J!(FP,FS,x) or FP=J!(FP,FS,x,pdata) returns FP=F'(x). \n
(FP,FS, x) must be the argument list, even if FP does not need FS.
One reason for this is that the finite-difference Jacobian
does and that is the default in the solver.\n
Your Jacobian function MUST have --> return FP <-- at the end.
See the examples in the docstrings and in TestProblems/Systems/simple.jl
- Precision: Lemme tell ya 'bout precision. I designed this code for
full precision functions and linear algebra in any precision you want.
You can declare
FPS as Float64, Float32, or Float16 and nsol will do the right thing if
YOU do not destroy the declaration in your J! function. I'm amazed
that this works so easily. If the Jacobian is reasonably well
conditioned, you can cut the cost of Jacobian factorization and
storage in half with no loss. For large dense Jacobians and inexpensive
functions, this is a good deal.\n
BUT ... There is very limited support for direct sparse solvers in
anything other than Float64. I recommend that you only use Float64
with direct sparse solvers unless you really know what you're doing. I
have a couple examples in the notebook, but watch out.
----------------------
Keyword Arguments (kwargs):\n
rtol and atol: relative and absolute error tolerances\n
maxit: limit on nonlinear iterations\n
solver: default = "newton"\n
Your choices are "newton" or "chord". However,
you have sham at your disposal only if you chose newton. "chord"
will keep using the initial derivative until the iterate converges,
uses the iteration budget, or the line search fails. It is not the
same as sham=Inf, which is smarter.\n
sham: default = 5 (ie Newton)\n
This is the Shamanskii method. If sham=1, you have Newton.
The iteration updates the derivative every sham iterations.
The convergence rate has local q-order sham+1 if you only count
iterations where you update the derivative. You need not
provide your own derivative function to use this option. sham=Inf
is chord only if chord is converging well.\n
I made sham=1 the default for scalar equations. For systems I'm
more aggressive and want to invest as little energy in linear algebra
as possible. So the default is sham=5.
armmax: upper bound on step size reductions in line search\n
resdec: default = .1\n
This is the target value for residual reduction.
The default value is .1. In the old MATLAB codes it was .5.
I only turn Shamanskii on if the residuals are decreasing
rapidly, at least a factor of resdec, and the line search is quiescent.
If you want to eliminate resdec from the method ( you don't ) then set
resdec = 1.0 and you will never hear from it again.
dx: default = 1.e-7\n
difference increment in finite-difference derivatives
h=dx*norm(x,Inf)+1.e-8
armfix: default = false\n
The default is a parabolic line search (ie false). Set to true and
the step size will be fixed at .5. Don't do this unless you are doing
experiments for research.\n
pdata:\n
precomputed data for the function/Jacobian.
Things will go better if you use this rather than hide the data
in global variables within the module for your function/Jacobian
If you use pdata in either of F! or J!, you must use in in the
calling sequence of both.
jfact: default = klfact (tries to figure out best choice) \n
If your Jacobian has any special structure, please set jfact
to the correct choice for a factorization.
I use jfact when I call PrepareJac! to evaluate the
Jacobian (using your J!) and factor it. The default is to use
klfact (an internal function) to do something reasonable.
For general dense matrices, klfact picks lu! to compute an LU factorization
and share storage with the Jacobian. You may change LU to something else by,
for example, setting jfact = cholesky! if your Jacobian is spd.
klfact knows about banded matrices and picks qr. You should,
however RTFM, allocate the extra two upper bands, and use jfact=qr!
to override klfact.
klfact uses lu for general sparse matrices.
If you give me something that klfact does not know how to dispatch on,
then nothing happens. I just return the original Jacobian matrix and
nsol will use backslash to compute the Newton step.
I know that this is probably not optimal in your situation, so it is
good to pick something else, like jfact = lu.
If you want to manage your own factorization within your Jacobian
evaluation function, then set\n
jfact = nofact\n
and nsol will not attempt to factor your Jacobian. That is also what
happens when klfact does not know what to do. Your Jacobian is sent
directly to Julia's \\ operation
Please do not mess with the line that calls PrepareJac!.
FPF = PrepareJac!(FPS, FS, x, ItRules)
FPF is not the same as FPS (the storage you allocate for the Jacobian)
for a reason. FPF and FPS do not have the same type, even though they
share storage. So, FPS=PrepareJac!(FPS, FS, ...) will break things.
printerr: default = true\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist: default = false\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
stagnationok: default = false\n
Set this to true if you want to disable the line search and either
observe divergence or stagnation. This is only useful for research
or writing a book.
Output:\n
- A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
-- solution = converged result
-- functionval = F(solution)
-- history = the vector of residual norms (||F(x)||) for the iteration
-- stats = named tuple of the history of (ifun, ijac, iarm), the number
of functions/derivatives/steplength reductions at each iteration.
I do not count the function values for a finite-difference derivative
because they count toward a Jacobian evaluation.
-- idid=true if the iteration succeeded and false if not.
-- errcode = 0 if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
= 1 if the line search failed
-- solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
solhist is an N x K array where N is the length of x and K is the number of
iterations + 1. So, for scalar equations, it's a row vector.
------------------------
### Examples for nsol
#### World's easiest problem example.
Test 64 and 32 bit Jacobians. No meaningful difference in the residual histories or the converged solutions.
```jldoctest
julia> function f!(fv,x)
fv[1]=x[1] + sin(x[2])
fv[2]=cos(x[1]+x[2])
#
# The return fv part is important even though f! overwrites fv.
#
return fv
end
f (generic function with 1 method)
julia> x=ones(2); fv=zeros(2); jv=zeros(2,2); jv32=zeros(Float32,2,2);
julia> nout=nsol(f!,x,fv,jv; sham=1);
julia> nout32=nsol(f!,x,fv,jv32; sham=1);
julia> [nout.history nout32.history]
5Γ2 Matrix{Float64}:
1.88791e+00 1.88791e+00
2.43119e-01 2.43120e-01
1.19231e-02 1.19231e-02
1.03266e-05 1.03265e-05
1.46388e-11 1.45995e-11
julia> [nout.solution nout.solution-nout32.solution]
2Γ2 Array{Float64,2}:
-7.39085e-01 -5.48450e-14
2.30988e+00 -2.26485e-14
```
#### H-equation example.
I'm taking the sham=5 default here, so the convergence is not quadratic. The good news is that we evaluate the Jacobian only once.
```jldoctest
julia> n=16; x0=ones(n); FV=ones(n); JV=ones(n,n);
julia> hdata=heqinit(x0, .5);
julia> hout=nsol(heqf!,x0,FV,JV;pdata=hdata);
julia> hout.history
4-element Array{Float64,1}:
6.17376e-01
3.17810e-03
2.75227e-05
2.35817e-07
```
"""
function nsol(
F!,
x0,
FS,
FPS,
J! = diffjac!;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 20,
solver = "newton",
sham = 5,
armmax = 10,
resdec = 0.1,
dx = 1.e-7,
armfix = false,
pdata = nothing,
jfact = klfact,
printerr = true,
keepsolhist = false,
stagnationok = false,
)
itc = 0
idid = true
iline = false
#
# If I'm letting the iteration stagnate and turning off the
# linesearch, then the line search cannot fail.
#
stagflag = stagnationok && (armmax == 0)
#=
First evaluation of the function. I evaluate the derivative when
Shamanskii tells me to, at the first iteration (duh!), and when
the rate of residual reduction is below the target value of resdec.
=#
(ItRules, x, n, solhist) = Newtoninit(
x0,
dx,
F!,
J!,
solver,
sham,
armmax,
armfix,
resdec,
maxit,
printerr,
pdata,
jfact,
keepsolhist,
)
# keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
#
# First Evaluation of the function. Initialize the iteration stats.
# Fix the tolerances for convergence and define the derivative FPF
# outside of the main loop for scoping.
#
FS = EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
tol = rtol * resnorm + atol
FPF = []
ItData = ItStats(resnorm)
newiarm = -1
newfun = 0
newjac = 0
derivative_is_old = false
residratio = 1.0
armstop = true
#
# Preallocate a few vectors for the step, trial step, trial function
#
step = copy(x)
xt = copy(x)
FT = copy(x)
#
# If the initial iterate satisfies the termination criteria, tell me.
#
toosoon = (resnorm <= tol)
#
# The main loop stops on convergence, too many iterations, or a
# line search failure after a derivative evaluation.
#
T = eltype(FPS)
while resnorm > tol && itc < maxit && (armstop || stagnationok)
#
# Evaluate and factor the Jacobian.
#
newfun = 0
newjac = 0
#
# Evaluate and factor the Jacobian if (1) you are using the chord
# method and it's the intial iterate, or
# (2) it's Newton and you are on the right part of the Shamaskii loop,
# or the line search failed with a stale deriviative, or the residual
# reduction ratio is too large. This leads to a tedious barrage
# of conditionals that I have parked in a function.
#
# I'm storing the factorization in FPF unless you have asked for
# jfact = nofact or nsol can't figure out how to factor the Jacobian.
# In those case you just get the Jacobian back and I use \\.
#
evaljac = test_evaljac(ItRules, itc, newiarm, residratio)
if evaljac
FPF = PrepareJac!(FPS, FS, x, ItRules)
newfun += solver == "secant"
newjac += ~(solver == "secant")
end
derivative_is_old = (newjac == 0) && (solver == "newton")
if n > 1
# If the Jacobian precision is worse than Float32, you'll have to
# do some scaling to avoid underflow in the terminal phase of
# the nonlinear iteration. So, I do it for anything worse that
# Float64 to make the logic simple.
T == Float64 ? (step .= -(FPF \ FS)) :
(ns = norm(FS, Inf); step .= -ns * (FPF \ T.(FS / ns)))
else
# scalar equation
step = -FS / FPF
end
#
# Compute the trial point, evaluate F and the residual norm.
#
AOUT = armijosc(xt, x, FT, FS, step, resnorm, ItRules, derivative_is_old)
#
# update solution/function value
#
if n > 1
x .= AOUT.ax
FS .= AOUT.afc
else
# scalar equation
x = AOUT.ax
FS = AOUT.afc
end
#
# If the line search fails and the derivative is current,
# stop the iteration. Print an error message unless
# stagnationok == true and armmax=0
#
armstop = AOUT.idid || derivative_is_old
iline = ~armstop && ~stagflag
#
# Keep the books.
#
residm = resnorm
resnorm = AOUT.resnorm
residratio = resnorm / residm
updateStats!(ItData, newfun, newjac, AOUT)
newiarm = AOUT.aiarm
itc += 1
~keepsolhist || (@views solhist[:, itc+1] .= x)
end
(idid, errcode) = NewtonOK(resnorm, iline, tol, toosoon, itc, ItRules)
newtonout = CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist)
return newtonout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 12974 | """
nsoli(F!, x0, FS, FPS, Jvec=dirder; rtol=1.e-6, atol=1.e-12,
maxit=20, lmaxit=-1, lsolver="gmres", eta=.1,
fixedeta=true, Pvec=nothing, pside="right",
armmax=10, dx = 1.e-7, armfix=false, pdata = nothing,
printerr = true, keepsolhist = false,
Krylov_Data = nothing, stagnationok=false)
)
C. T. Kelley, 2022
Julia versions of the nonlinear solvers from my SIAM books.
Herewith: nsoli
You must allocate storage for the function and the Krylov basis in advance
--> in the calling program <-- ie. in FS and FPS
Inputs:\n
- F!: function evaluation, the ! indicates that F! overwrites FS, your
preallocated storage for the function.\n
So FS=F!(FS,x) or FS=F!(FS,x,pdata) returns FS=F(x)
Your function MUST have --> return FS <-- at the end.
See the examples in the docstrings
- x0: initial iterate\n
- FS: Preallocated storage for function. It is a vector of size N\n
You should store it as (N) and design F! to use vectors of size (N).
If you use (N,1) consistently instead, the solvers may work, but I make
no guarantees.
- FPS: preallocated storage for the Krylov basis. It is an N x m matrix where
you plan to take at most m-1 GMRES iterations before a restart. \n
- Jvec: Jacobian vector product, If you leave this out the
default is a finite difference directional derivative.\n
So, FP=Jvec(v,FS,x) or FP=Jvec(v,FS,x,pdata) returns FP=F'(x) v. \n
(v, FS, x) or (v, FS, x, pdata) must be the argument list,
even if FP does not need FS.
One reason for this is that the finite-difference derivative
does and that is the default in the solver.
- Precision: Lemme tell ya 'bout precision. I designed this code for
full precision functions and linear algebra in any precision you want.
You can declare FPS as Float64 or Float32 and nsoli
will do the right thing. Float16 support is there, but not working well.
If the Jacobian is reasonably well conditioned, you can cut the cost
of orthogonalization and storage (for GMRES) in half with no loss.
There is no benefit if your linear solver is not GMRES or if
orthogonalization and storage of the Krylov vectors is only a
small part of the cost of the computation. So if your preconditioner
is good and you only need a few Krylovs/Newton, reduced precision won't
help you much.
BiCGSTAB does not benefit from reduced precision.
----------------------
Keyword Arguments (kwargs):\n
rtol and atol: relative and absolute error tolerances\n
maxit: limit on nonlinear iterations\n
lmaxit: limit on linear iterations. If lmaxit > m-1, where FPS has
m columns, and you need more than m-1 linear iterations, then GMRES
will restart.
The default is -1 for GMRES. This means that you'll take m-1 iterations,
where size(V) = (n,m), and get no restarts. For BiCGSTAB the default is 10.
lsolver: the linear solver, default = "gmres"\n
Your choices will be "gmres" or "bicgstab". However,
gmres is the only option for now.
eta and fixed eta: eta > 0 or there's an error
The linear solver terminates when ||F'(x)s + F(x) || <= etag || F(x) ||
where
etag = eta if fixedeta=true
etag = Eisenstat-Walker as implemented in book if fixedeta=false
The default, which may change, is eta=.1, fixedeta=true
Pvec: Preconditioner-vector product. The rules are similar to Jvec
So, Pv=Pvec(v,x) or Pv=Pvec(v,x,pdata) returns P(x) v where
P(x) is the preconditioner. You must use x as an input even
if your preconditioner does not depend on x
pside: apply preconditioner on pside, default = "right". I do not
recommend "left". See Chapter 3 for the story on this.
armmax: upper bound on step size reductions in line search\n
dx: default = 1.e-7\n
difference increment in finite-difference derivatives
h=dx*norm(x,Inf)+1.e-8
armfix: default = false\n
The default is a parabolic line search (ie false). Set to true and
the step size will be fixed at .5. Don't do this unless you are doing
experiments for research.\n
pdata:\n
precomputed data for the function, Jacobian-vector, and Preconditioner-vector
products. Things will go better if you use this rather than hide the data
in global variables within the module for your function/Jacobian
If you use pdata in any of F!, Jvec, or Pvec, you must use in in all of them.
printerr: default = true\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist: default = false\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
Krylov_Data: default = nothing\n
This is a structure where I put the internal storage for the solvers.
You can (but probably should not) preallocate this your self with the
nkl_init function.\n
Krylov_Data
= nkl_init(n,lsolver)
This is a dangerous thing to mess with and I only recommend it if
the allocations in nsoli become a problem in continuation or IVP
integration. Krylov_Data is where I store the solution at the end
of the iteration and if you reuse it without copying the solution
to somewhere else, you'll lose it and it will be overwritten with
the new solution. The continuation case study uses this and you
should look at that to see what I did.
stagnationok: default = false\n
Set this to true if you want to disable the line search and either
observe divergence or stagnation. This is only useful for research
or writing a book.
Output:\n
- A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
-- solution = converged result
-- functionval = F(solution)
-- history = the vector of residual norms (||F(x)||) for the iteration
-- stats = named tuple of the history of (ifun, ijac, iarm, ikfail), the
number of functions/Jacobian-vector prods/steplength reductions/linear solver
failures at each iteration. Linear solver failures DO NOT mean that the
nonlinear solver will fail. You should look at this stat if, for example,
the line search fails. Increasing the size of FPS and/or lmaxit might
solve the problem.
I do not count the function values for a finite-difference derivative
because they count toward a Jacobian-vector product.
-- idid=true if the iteration succeeded and false if not.
-- errcode = 0 if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
= 1 if the line search failed
-- solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
solhist is an N x K array where N is the length of x and K is the number of
iteration + 1. So, for scalar equations, it's a row vector.
------------------------
### Example for nsoli
#### Simple 2D problem.
You should get the same results as for nsol.jl because
GMRES will solve the equation for the step exactly in two iterations. Finite
difference Jacobians and analytic Jacobian-vector products for full precision
and finite difference Jacobian-vector products for single precision.
BiCGSTAB converges in 5 iterations and each nonlinear iteration costs
two Jacobian-vector products. Note that the storage for the Krylov
space in GMRES (jvs) is replace by a single vector (fpv) when BiCGSTAB
is the linear solver.
```jldoctest
julia> function f!(fv,x)
fv[1]=x[1] + sin(x[2])
fv[2]=cos(x[1]+x[2])
return fv
end
f! (generic function with 1 method)
julia> function JVec(v, fv, x)
jvec=zeros(2);
p=-sin(x[1]+x[2])
jvec[1]=v[1]+cos(x[2])*v[2]
jvec[2]=p*(v[1]+v[2])
return jvec
end
JVec (generic function with 1 method)
julia> x0=ones(2); fv=zeros(2); jv=zeros(2,2);
julia> jv32=zeros(Float32,2,2);
julia> jvs=zeros(2,3); jvs32=zeros(Float32,2,3);
julia> nout=nsol(f!,x0,fv,jv; sham=1);
julia> kout=nsoli(f!,x0,fv,jvs,JVec;
fixedeta=true, eta=.1, lmaxit=2);
julia> kout32=nsoli(f!,x0,fv,jvs32;
fixedeta=true, eta=.1, lmaxit=2);
julia> [nout.history kout.history kout32.history]
5Γ3 Array{Float64,2}:
1.88791e+00 1.88791e+00 1.88791e+00
2.43119e-01 2.43120e-01 2.43119e-01
1.19231e-02 1.19231e-02 1.19230e-02
1.03266e-05 1.03261e-05 1.03264e-05
1.46388e-11 1.40862e-11 1.39825e-11
julia> fpv=zeros(2);
julia> koutb=nsoli(f!,x0,fv,fpv,JVec;
fixedeta=true, eta=.1, lmaxit=2, lsolver="bicgstab");
julia> koutb.history
6-element Vector{Float64}:
1.88791e+00
2.43120e-01
1.19231e-02
4.87500e-04
7.54236e-06
3.84646e-07
```
"""
function nsoli(
F!,
x0,
FS,
FPS,
Jvec = dirder;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 20,
lmaxit = -1,
lsolver = "gmres",
eta = 0.1,
fixedeta = true,
Pvec = nothing,
pside = "right",
armmax = 10,
dx = 1.e-7,
armfix = false,
pdata = nothing,
printerr = true,
keepsolhist = false,
Krylov_Data = nothing,
stagnationok = false,
)
itc = 0
idid = true
iline = false
#
# If I'm letting the iteration stagnate and turning off the
# linesearch, then the line search cannot fail.
#
stagflag = stagnationok && (armmax == 0)
#=
Named tuple with the iteration data. This makes communiction
with the linear solvers and the line search easier.
=#
(ItRules, x, n, solhist) = Newton_Krylov_Init(
x0,
dx,
F!,
Jvec,
Pvec,
pside,
lsolver,
eta,
fixedeta,
armmax,
armfix,
maxit,
lmaxit,
printerr,
pdata,
Krylov_Data,
keepsolhist,
)
# keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
#
# First Evaluation of the function. Initialize the iteration stats.
# Fix the tolerances for convergence and define the derivative FPF
# outside of the main loop for scoping.
#
FS = EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
tol = rtol * resnorm + atol
FPF = []
ItData = ItStatsK(resnorm)
newiarm = -1
newfun = 0
newjac = 0
newikfail = 0
ke_report = false
residratio = 1.0
armstop = true
etag = eta
#
# Get the preallocatred vectors for the step, trial step, trial function
#
knl_store = ItRules.knl_store
step = knl_store.step
xt = knl_store.xt
FT = knl_store.FT
#
# If the initial iterate satisfies the termination criteria, tell me.
#
toosoon = (resnorm <= tol)
#
# The main loop stops on convergence, too many iterations, or a
# line search failure after a derivative evaluation.
#
while resnorm > tol && itc < maxit && (armstop || stagnationok)
#
newfun = 0
newjac = 0
newikfail = 0
#
#
# The GMRES solver will do the orthogonalization in lower
# precision. I've tested Float32, but see the docstrings
# for all the caveats. This is not the slam dunk it was
# for Gaussian elimination on dense matrices.
#
step .*= 0.0
etag = forcing(itc, residratio, etag, ItRules, tol, resnorm)
kout = Krylov_Step!(step, x, FS, FPS, ItRules, etag)
step .= kout.step
#
# For GMRES you get 1 jac-vec per iteration and there is no jac-vec
# for the initial inner iterate of zero. For BiCGSTAB it's two
# jac-vecs per iteration.
#
newjac = kout.Lstats.lits
(lsolver == "gmres") || (newjac *= 2)
linok = kout.Lstats.idid
linok || (ke_report = Krylov_Error(lmaxit, ke_report); newikfail = 1)
#
# Compute the trial point, evaluate F and the residual norm.
# The derivative is never old for Newton-Krylov
#
AOUT = armijosc(xt, x, FT, FS, step, resnorm, ItRules, false)
#
# update solution/function value
#
x .= AOUT.ax
FS .= AOUT.afc
#
# If the line search fails
# stop the iteration. Print an error message unless
# stagnationok == true
#
armstop = AOUT.idid
iline = ~armstop && ~stagflag
#
# Keep the books.
#
residm = resnorm
resnorm = AOUT.resnorm
residratio = resnorm / residm
updateStats!(ItData, newfun, newjac, AOUT, newikfail)
newiarm = AOUT.aiarm
itc += 1
keepsolhist && (@views solhist[:, itc+1] .= x)
# ~keepsolhist || (@views solhist[:, itc+1] .= x)
end
# solution = x
# functionval = FS
(idid, errcode) = NewtonOK(resnorm, iline, tol, toosoon, itc, ItRules)
newtonout = CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist)
return newtonout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 9216 | """
ptcsol(F!, x0, FS, FPS, J! = diffjac!; rtol=1.e-6, atol=1.e-12,
maxit=20, delta0=1.e-6, dx=1.e-7, pdata = nothing, jfact = klfact,
printerr = true, keepsolhist = false, jknowsdt = false)
C. T. Kelley, 2022
Julia versions of the nonlinear solvers from my SIAM books.
Herewith: some new stuff ==> ptcsol
PTC finds the steady-state solution of u' = -F(u), u(0) = u_0.
The - sign is a convention.
You must allocate storage for the function and Jacobian in advance
--> in the calling program <-- ie. in FS and FPS
Inputs:\n
- F!: function evaluation, the ! indicates that F! overwrites FS, your
preallocated storage for the function.\n
So, FS=F!(FS,x) or FS=F!(FS,x,pdata) returns FS=F(x)\n
Your function MUST have --> return FS <-- at the end.
See the examples in the TestProblems/Systems/FBeam!.jl
- x0: initial iterate\n
- FS: Preallocated storage for function. It is a vector of size N\n
You should store it as (N) and design F! to use vectors of size (N).
If you use (N,1) consistently instead, the solvers may work, but I make
no guarantees.
- FPS: preallocated storage for Jacobian. It is an N x N matrix\n
If FPS is sparse, you __must__ allocate storage for the diagonal so
I will have room to put 1/dt in there.
- J!: Jacobian evaluation, the ! indicates that J! overwrites FPS, your
preallocated storage for the Jacobian. If you leave this out the
default is a finite difference Jacobian.\n
So, FP=J!(FP,FS,x) or FP=J!(FP,FS,x,pdata) returns FP=F'(x);
(FP,FS, x) must be the argument list, even if FP does not need FS.
One reason for this is that the finite-difference Jacobian
does and that is the default in the solver.\n
Your Jacobian function MUST have --> return FP <-- at the end.
See the examples in the TestProblems/Systems/FBeam!.jl
You may have a better way to add (1/dt) I to your Jacobian. If you
want to do this yourself then your Jacobian function should be
FP=J!(FP,FS,x,dt) or FP=J!(FP,FS,x,dt,pdata) and return
F'(x) + (1.0/dt)*I. \n
You will also have to set the kwarg __jknowsdt__ to true.
- Precision: Lemme tell ya 'bout precision. I designed this code for
full precision
functions and linear algebra in any precision you want. You can declare
FPS as Float64, Float32, or Float16 and ptcsol will do the right thing if
YOU do not destroy the declaration in your J! function. I'm amazed
that this works so easily. If the Jacobian is reasonably well
conditioned, you can cut the cost of Jacobian factorization and
storage in half with no loss. For large dense Jacobians and inexpensive
functions, this is a good deal.\n
BUT ... There is very limited support for direct sparse solvers in
anything other than Float64. I recommend that you only use Float64
with direct sparse solvers unless you really know what you're doing. I
have a couple examples in the notebook, but watch out.
----------------------
Keyword Arguments (kwargs):\n
rtol and atol: relative and absolute error tolerances\n
delta0: initial pseudo time step. The default value of 1.e-3 is a bit conservative
and is one option you really should play with. Look at the example
where I set it to 1.0!\n
maxit: limit on nonlinear iterations, default=100. \n
This is coupled to delta0. If your choice of delta0 is too small (conservative)
then you'll need many iterations to converge and will need a larger
value of maxit
For PTC you'll need more iterations than for a straight-up
nonlinear solve. This is part of the price for finding the
stable solution.
dx: default = 1.e-7\n
difference increment in finite-difference derivatives
h=dx*norm(x)+1.e-6
pdata:\n
precomputed data for the function/Jacobian.
Things will go better if you use this rather than hide the data
in global variables within the module for your function/Jacobian
jfact: default = klfact (tries to figure out best choice) \n
If your Jacobian has any special structure, please set jfact
to the correct choice for a factorization.
I use jfact when I call PTCUpdate to evaluate the
Jacobian (using your J!) and factor it. The default is to use
klfact (an internal function) to do something reasonable.
For general dense matrices, klfact picks lu! to compute an LU factorization
and share storage with the Jacobian. You may change LU to something else by,
for example, setting jfact = cholseky! if your Jacobian is spd.
klfact knows about banded matrices and picks qr. You should,
however RTFM, allocate the extra two upper bands, and use jfact=qr!
to override klfact.
klfact uses lu for general sparse matrices.
If you give me something that klfact does not know how to dispatch on,
then nothing happens. I just return the original Jacobian matrix and
ptcsol will use backslash to compute the Newton step.
I know that this is probably not optimal in your situation, so it is
good to pick something else, like jfact = lu.
printerr: default = true\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist: default = false\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
jknowsdt: default = false\n
Set this to true if your Jacobian evaluation function returns
F'(x) + (1/dt) I. You'll also need to follow the rules above for
the Jacobian evaluation function. I do not recommend this and if
your Jacobian is anything other than a matrix I can't promise
anything. I've tested this for matrix outputs only.
Output:\n
A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
solution = converged result
functionval = F(solution)
history = the vector of residual norms (||F(x)||) for the iteration
Unlike nsol, nsoli, or even ptcsoli, ptcsol has a fixed cost per
iteration of one function, one Jacobian, and one Factorization. Hence
iteration statistics are not interesting and not in the output.
idid=true if the iteration succeeded and false if not.
errcode = 0 if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
solhist is an N x K array where N is the length of x and K is the number
of iteration + 1. So, for scalar equations, it's a row vector.
### Example for ptcsol
#### The buckling beam problem.
You'll need to use TestProblems for this to work.
```jldoctest
julia> using SIAMFANLEquations.TestProblems
julia> n=63; maxit=1000; delta = 0.01; lambda = 20.0;
julia> bdata = beaminit(n, 0.0, lambda); x = bdata.x;
julia> u0 = x .* (1.0 .- x) .* (2.0 .- x);
julia> u0 .*= exp.(-10.0 * u0);
julia> FS = copy(u0); FPS = copy(bdata.D2);
julia> pout = ptcsol( FBeam!, u0, FS, FPS, BeamJ!;
rtol = 1.e-10, pdata = bdata, delta0 = delta, maxit = maxit);
julia> # It takes a few iterations to get there.
length(pout.history)
25
julia> [pout.history[1:5] pout.history[21:25]]
5Γ2 Array{Float64,2}:
6.31230e+01 9.75412e-01
7.52624e+00 8.35295e-02
8.31545e+00 6.58797e-04
3.15455e+01 4.12697e-08
3.66566e+01 6.29295e-12
julia> # We get the nonnegative steady state.
maximum(pout.solution)
2.19086e+00
```
"""
function ptcsol(
F!,
x0,
FS = [],
FPS = [],
J! = diffjac!;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 20,
delta0 = 1.e-6,
dx = 1.e-7,
pdata = nothing,
jfact = klfact,
printerr = true,
keepsolhist = false,
jknowsdt = false,
)
itc = 0
idid = true
#
# Initialize the iteration
# As with the other codes, ItRules packages all the details of
# the problem so it's easy to pass them around.
#
(ItRules, x, n, solhist) =
PTCinit(x0, dx, F!, J!, delta0, maxit, pdata, jfact, keepsolhist, jknowsdt)
#
# First Evaluation of the function. Initialize the iteration history.
# Fix the tolerances for convergence and define the derivative FPF
# outside of the main loop for scoping.
#
FS = EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
tol = rtol * resnorm + atol
ItData = ItStatsPTC(resnorm)
#
# Preallocate a vector for the step
#
step = copy(x)
#
# If the initial iterate satisfies the termination criteria, tell me.
#
toosoon = (resnorm <= tol)
#
# The main loop stops on convergence or too many iterations.
#
delta = delta0
while resnorm > tol && itc < maxit
#
# Evaluate and factor the Jacobian; update x, F(x), and delta.
#
(x, delta, FS, resnorm) = PTCUpdate(FPS, FS, x, ItRules, step, resnorm, delta)
#
# Keep the books
#
updateStats!(ItData, resnorm)
itc += 1
~keepsolhist || (@views solhist[:, itc+1] .= x)
end
(idid, errcode) = PTCOK(resnorm, tol, toosoon, ItRules, printerr)
itout = CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist)
return (itout)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 11331 | """
function ptcsoli( F!, x0, FS, FPS, Jvec = dirder; rtol = 1.e-6, atol = 1.e-12,
maxit = 20, lmaxit = -1, lsolver = "gmres", eta = 0.1, fixedeta = true,
Pvec = nothing, PvecKnowsdelta = false, pside = "right", delta0 = 1.e-6,
dx = 1.e-7, pdata = nothing, printerr = true, keepsolhist = false,
)
C. T. Kelley, 2022
Julia versions of the nonlinear solvers from my SIAM books.
New for this book ==> ptcsoli
PTC finds the steady-state solution of u' = -F(u), u(0) = u_0.
The - sign is a convention.
You must allocate storage for the function and Krylov basis in advance
--> in the calling program <-- ie. in FS and FPS
Inputs:\n
- F!: function evaluation, the ! indicates that F! overwrites FS, your
preallocated storage for the function.\n
So, FS=F!(FS,x) or FS=F!(FS,x,pdata) returns FS=F(x)
Your function MUST have --> return FS <-- at the end.
See the example in TestProblems/Systems/FBeam!.jl
- x0: initial iterate\n
- FS: Preallocated storage for function. It is a vector of size N\n
You should store it as (N) and design F! to use vectors of size (N).
If you use (N,1) consistently instead, the solvers may work, but I make
no guarantees.
- FPS: preallocated storage for the Krylov basis. It is an N x m matrix where
you plan to take at most m-1 GMRES iterations before a restart. \n
- Jvec: Jacobian vector product, If you leave this out the
default is a finite difference directional derivative.\n
So, FP=Jvec(v,FS,x) or FP=Jvec(v,FS,x,pdata) returns FP=F'(x) v. \n
(v, FS, x) or (v, FS, x, pdata) must be the argument list,
even if FP does not need FS.
One reason for this is that the finite-difference derivative
does and that is the default in the solver.
- Precision: Lemme tell ya 'bout precision. I designed this code for
full precision functions and linear algebra in any precision you want.
You can declare FPS as Float64 or Float32 and ptcsoli
will do the right thing. Float16 support is there, but not working well.
If the Jacobian is reasonably well conditioned, you can cut the cost
of orthogonalization and storage (for GMRES) in half with no loss.
There is no benefit if your linear solver is not GMRES or if
orthogonalization and storage of the Krylov vectors is only a
small part of the cost of the computation. So if your preconditioner
is good and you only need a few Krylovs/Newton, reduced precision won't
help you much.
BiCGSTAB does not benefit from reduced precision.
----------------------
Keyword Arguments (kwargs):\n
rtol and atol: relative and absolute error tolerances\n
delta0: initial pseudo time step. The default value of 1.e-3 is a bit conservative
and is one option you really should play with. Look at the example
where I set it to 1.0!\n
maxit: limit on nonlinear iterations, default=100. \n
This is coupled to delta0. If your choice of delta0 is too small (conservative)
then you'll need many iterations to converge and will need a larger
value of maxit
For PTC you'll need more iterations than for a straight-up
nonlinear solve. This is part of the price for finding the
stable solution. \n
lmaxit: limit on linear iterations. If lmaxit > m-1, where FPS has
m columns, and you need more than m-1 linear iterations, then GMRES
will restart.
The default is -1. For GMRES this means that you'll take m-1 iterations, where
size(V) = (n,m), and get no restarts. For BiCGSTAB you'll then get the default
of 10 iterations.
lsolver: the linear solver, default = "gmres"\n
Your choices will be "gmres" or "bicgstab". However,
gmres is the only option for now. \n
eta and fixed eta: eta > 0 or there's an error.
The linear solver terminates when ||F'(x)s + F(x) || <= etag || F(x) ||
where
etag = eta if fixedeta=true
etag = Eisenstat-Walker as implemented in book if fixedeta=false
The default, which may change, is eta=.1, fixedeta=true \n
Pvec: Preconditioner-vector product. The rules are similar to Jvec
So, Pv=Pvec(v,x) or Pv=Pvec(v,x,pdata) returns P(x) v where
P(x) is the preconditioner. You must use x as an input even
if your preconditioner does not depend on x.\n
PvecKnowsdelta: If you want your preconditioner-vector product to depend on
the pseudo-timestep delta, put an array deltaval in your precomputed
data. Initialize it as
deltaval = zeros(1,)
and let ptcsoli know about it by setting the kwarg
PvecKnowsdelta = true
ptcsoli will update the value in deltaval with every change
to delta with pdata.deltaval[1]=delta
so your preconditioner-vector product can get to it.\n
pside: apply preconditioner on pside, default = "right". I do not
recommend "left". The problem with "left" for ptcsoli is
that it can fail to satisfy the inexact Newton condition for
the unpreconditioned equation, especially early in the iteration
and lead to an incorrect result (unstable solution or wrong
branch of steady state).
See Chapter 3 for the story on this. \n
dx: default = 1.e-7\n
difference increment in finite-difference derivatives
h=dx*norm(x)+1.e-8 \n
pdata:\n
precomputed data for the function, Jacobian-vector, and Preconditioner-vector
products. Things will go better if you use this rather than hide the data
in global variables within the module for your function/Jacobian
If you use pdata in any of F!, Jvec, or Pvec, you must use in in all of them.
precomputed data for the function/Jacobian.
Things will go better if you use this rather than hide the data
in global variables within the module for your function/Jacobian. \n
printerr: default = true\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false. \n
keepsolhist: default = false\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.\n
Output:\n
A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
solution = converged result
functionval = F(solution)
history = the vector of residual norms (||F(x)||) for the iteration
stats = named tuple of the history of (ifun, ijac, ikfail), the number
of functions/jacobian-vector products/linear solver failures at each iteration.
I do not count the function values for a finite-difference derivative
because they count toward a Jacobian-vector product.
Linear solver failures need not cause the nonlinear iteration to fail.
You get a warning and that is all. \n
idid=true if the iteration succeeded and false if not. \n
errcode = 0 if the iteration succeeded \n
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations \n
solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
solhist is an N x K array where N is the length of x and K is the number
of iteration + 1. So, for scalar equations, it's a row vector.
### Example for ptcsol
#### The buckling beam problem.
You'll need to use TestProblems for
this to work. The preconditioner is a solver for the high order term.
```jldoctest
julia> using SIAMFANLEquations.TestProblems
julia> function PreCondBeam(v, x, bdata)
J = bdata.D2
ptv = J\\v
end
PreCondBeam (generic function with 1 method)
julia> n=63; maxit=1000; delta0 = 0.01; lambda = 20.0;
julia> # Set up the precomputed data
julia> bdata = beaminit(n, 0.0, lambda);
julia> x = bdata.x; u0 = x .* (1.0 .- x) .* (2.0 .- x);
julia> u0 .*= exp.(-10.0 * u0); FS = copy(u0); FPJV=zeros(n,20);
julia> pout = ptcsoli( FBeam!, u0, FS, FPJV;
delta0 = delta0, pdata = bdata, eta = 1.e-2,
rtol = 1.e-10, maxit = maxit, Pvec = PreCondBeam);
julia> # It takes a few iterations to get there.
length(pout.history)
25
julia> [pout.history[1:5] pout.history[21:25]]
5Γ2 Matrix{Float64}:
6.31230e+01 1.79574e+00
7.45927e+00 2.65956e-01
8.73595e+00 6.58220e-03
2.91937e+01 8.34114e-06
3.47970e+01 5.06847e-09
julia> # We get the nonnegative stedy state.
julia> maximum(pout.solution)
2.19086e+00
julia> # Now use BiCGSTAB for the linear solver
julia> FPJV=zeros(n);
julia> pout = ptcsoli( FBeam!, u0, FS, FPJV;
delta0 = delta0, pdata = bdata,
eta = 1.e-2, rtol = 1.e-10, maxit = maxit,
Pvec = PreCondBeam, lsolver="bicgstab");
julia> # Same number of iterations as GMRES, but each one costs double
julia> # the Jacobian-vector products and much less storage
julia> length(pout.history)
25
julia> [pout.history[1:5] pout.history[21:25]]
5Γ2 Matrix{Float64}:
6.31230e+01 1.68032e+00
7.47081e+00 2.35073e-01
8.62095e+00 5.18260e-03
2.96495e+01 3.23803e-06
```
"""
function ptcsoli(
F!,
x0,
FS,
FPS,
Jvec = dirder;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 20,
lmaxit = -1,
lsolver = "gmres",
eta = 0.1,
fixedeta = true,
Pvec = nothing,
PvecKnowsdelta = false,
pside = "right",
delta0 = 1.e-6,
dx = 1.e-7,
pdata = nothing,
printerr = true,
keepsolhist = false,
)
itc = 0
idid = true
#
# Initialize the iteration
# As with the other codes, ItRules packages all the details of
# the problem so it's easy to pass them around.
#
(ItRules, x, n) = PTCKrylovinit(
x0,
dx,
F!,
Jvec,
delta0,
Pvec,
PvecKnowsdelta,
pside,
lsolver,
eta,
fixedeta,
lmaxit,
maxit,
printerr,
pdata,
)
keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
#
# First Evaluation of the function. Initialize the iteration history.
# Fix the tolerances for convergence and define the derivative FPF
# outside of the main loop for scoping.
#
FS = EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
ItData = ItStatsPTCK(resnorm)
tol = rtol * resnorm + atol
etag = eta
ke_report = false
residratio = 1.0
#
# Preallocate a vector for the step
#
step = copy(x)
#
# If the initial iterate satisfies the termination criteria, tell me.
#
toosoon = (resnorm <= tol)
#
# The main loop stops on convergence or too many iterations.
#
delta = delta0
while resnorm > tol && itc < maxit
residm = resnorm
newjac = 0
newikfail = 0
#
# Comppute the Jacobian-vector product; update x, F(x), and delta.
#
etag = forcing(itc, residratio, etag, ItRules, tol, resnorm)
(x, delta, FS, resnorm, Lstats) =
PTCUpdatei(FPS, FS, x, ItRules, step, resnorm, delta, etag)
resdiratio = resnorm / residm
newjac = Lstats.lits
linok = Lstats.idid
linok || (ke_report = Krylov_Error(lmaxit, ke_report); newikfail = 1)
#
# Keep the books
#
updateStats!(ItData, resnorm, newjac, newikfail)
itc += 1
~keepsolhist || (@views solhist[:, itc+1] .= x)
end
(idid, errcode) = PTCOK(resnorm, tol, toosoon, ItRules, printerr)
itout = CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist)
return itout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 6251 | """
nsolsc(f,x0, fp=difffp; rtol=1.e-6, atol=1.e-12, maxit=10,
solver="newton", sham=1, armmax=10, resdec=.1, dx=1.e-7,
armfix=false, pdata=nothing,
printerr=true, keepsolhist=true, stagnationok=false)
C. T. Kelley, 2022
Newton's method for scalar equations. Has most of the features a
code for systems of equations needs. This is a wrapper for a call
to nsol.jl, the real code for systems.
Input:\n
f: function\n
x0: initial iterate\n
fp: derivative. If your derivative function is fp, you give me
its name. For example fp=foobar tells me that foobar is your
function for the derivative. The default is a forward difference
Jacobian that I provide.\n
Keyword Arguments (kwargs):\n
rtol, atol: real and absolute error tolerances\n
maxit: upper bound on number of nonlinear iterations\n
solver:\n
Your choices are "newton"(default) or "chord". However,
you have sham at your disposal only if you choose newton. "chord"
will keep using the initial derivative until the iterate converges,
uses the iteration budget, or the line search fails. It is not the
same as sham=Inf, which is smarter.\n
If you use secant and your initial iterate is poor, you have made
a mistake. I will help you by driving the line search with a finite
difference derivative.\n
sham:\n
This is the Shamanskii method. If sham=1, you have Newton.
The iteration updates the derivative every sham iterations.
The convergence rate has local q-order sham+1 if you only count
iterations where you update the derivative. You need not
provide your own derivative function to use this option. sham=Inf
is chord only if chord is converging well.\n
armmax: upper bound on stepsize reductions in linesearch
resdec: target value for residual reduction. \n
The default value is .1. In the old MATLAB codes it was .5.
I only turn Shamanskii on if the residuals are decreasing
rapidly, at least a factor of resdec, and the line search is quiescent.
If you want to eliminate resdec from the method ( you don't ) then set
resdec = 1.0 and you will never hear from it again.
dx:\n
This is the increment for forward difference, default = 1.e-7.
dx should be roughly the square root of the noise in the function.
armfix:\n
The default is a parabolic line search (ie false). Set to true and
the stepsize will be fixed at .5. Don't do this unless you are doing
experiments for research.
pdata:\n
precomputed data for the function/derivative.
Things will go better if you use this rather than hide the data
in global variables within the module for your function/derivative
If you use this option your function and derivative must take pdata
as a second argument. eg f(x,pdata) and fp(x,pdata)
printerr:\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist:\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
stagnationok:\n
Set this to true if you want to disable the line search and either
observe divergence or stagnation. This is only useful for research
or writing a book.
Output:\n
A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
solution = converged result
functionval = F(solution)
history = the vector of residual norms (||F(x)||) for the iteration
stats = named tuple of the history of (ifun, ijac, iarm), the number
of functions/derivatives/steplength reductions at each iteration.
I do not count the function values for a finite-difference derivative
because they count toward a Jacobian evaluation. I do count them for
the secant method model.
idid=true if the iteration succeeded and false if not.
errcode = 0 if if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
= 1 if the line search failed
solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
nsolsc builds solhist with a function from the Tools directory. For
systems, solhist is an N x K array where N is the length of x and K
is the number of iteration + 1. So, for scalar equations (N=1), solhist
is a row vector. Hence the use of solhist' in the example below.
### Examples for nsolsc.jl
```jldoctest
julia> nsolout=nsolsc(atan,1.0;maxit=5,atol=1.e-12,rtol=1.e-12);
julia> nsolout.history
6-element Array{Float64,1}:
7.85398e-01
5.18669e-01
1.16332e-01
1.06102e-03
7.96200e-10
2.79173e-24
```
# If you have an analytic derivative, I will use it.
```jldoctest
julia> fs(x)=x^2-4.0; fsp(x)=2x;
julia> nsolout=nsolsc(fs,1.0,fsp; maxit=5,atol=1.e-9,rtol=1.e-9);
julia> [nsolout.solhist'.-2 nsolout.history]
6Γ2 Array{Float64,2}:
-1.00000e+00 3.00000e+00
5.00000e-01 2.25000e+00
5.00000e-02 2.02500e-01
6.09756e-04 2.43940e-03
9.29223e-08 3.71689e-07
2.22045e-15 8.88178e-15
```
# You can also use anonymous functions
```jldoctest
julia> nsolout=nsolsc(atan,10.0,x -> 1.0/(1.0+x^2);
atol=1.e-9,rtol=1.e-9);
julia> nsolout.history
8-element Vector{Float64}:
1.47113e+00
1.19982e+00
1.10593e+00
6.48297e-01
2.56983e-01
1.19361e-02
1.13383e-06
9.71970e-19
```
"""
function nsolsc(
f,
x0,
fp = difffp;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 10,
solver = "newton",
sham = 1,
armmax = 5,
resdec = 0.1,
dx = 1.e-7,
armfix = false,
pdata = nothing,
printerr = true,
keepsolhist = true,
stagnationok = false,
)
#
# The scalar code is a simple wrapper for the real code (nsol). The
# wrapper puts placeholders for the memory allocations and the precomputed
# data.
#
fp0 = copy(x0)
fpp0 = copy(x0)
newtonout = nsol(
f,
x0,
fp0,
fpp0,
fp;
rtol = rtol,
atol = atol,
maxit = maxit,
solver = solver,
sham = sham,
armmax = armmax,
resdec = resdec,
dx = dx,
armfix = armfix,
pdata = pdata,
printerr = printerr,
keepsolhist = keepsolhist,
stagnationok = stagnationok,
)
return newtonout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4061 | """
ptcsolsc(f, x0, fp=difffp; rtol=1.e-6, atol=1.e-12, maxit=100,
delta0=1.e-6, dx=1.e-7, pdata=nothing, printerr = true, keepsolhist=true)
C. T. Kelley, 2022
Scalar pseudo-transient continuation solver. PTC is designed to find
stable steady state solutions of
dx/dt = - f(x)
The scalar code is a simple wrapper around a call to ptcsol.jl, the
PTC solver for systems.
--> PTC is ABSOLUTELY NOT a general purpose nonlinear solver.
Input:\n
f: function\n
x: initial iterate/data\n
fp: derivative. If your derivative function is fp, you give me
its name. For example fp=foobar tells me that foobar is your
function for the derivative. The default is a forward difference
Jacobian that I provide.\n
Keyword Arguments:\n
rtol, atol: real and absolute error tolerances\n
maxit: upper bound on number of nonlinear iterations. This is
coupled to delta0. If your choice of delta0 is too small (conservative)
then you'll need many iterations to converge and will need a larger
value of maxit.
delta0: initial pseudo time step. The default value of 1.e-3 is a bit
conservative and is one option you really should play with. Look at the example
where I set it to 1.0!\n
dx: default = 1.e-7\n
difference increment in finite-difference derivatives
h=dx*norm(x)+1.e-6
pdata:\n
precomputed data for the function/derivative.
Things will go better if you use this rather than hide the data
in global variables within the module for your function/derivative
If you use this option your function and derivative must take pdata
as a second argument. eg f(x,pdata) and fp(x,pdata)
printerr: default = true\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist: if true you get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
Output: A tuple (solution, functionval, history, idid, errcode, solhist) where
history is the array of absolute function values |f(x)|
of residual norms and time steps. Unless something has gone badly wrong,
delta approx |f(x_0)|/|f(x)|.
idid=true if the iteration succeeded and false if not.
errcode = 0 if if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
solhist=entire history of the iteration if keepsolhist=true\n
ptcsolsc builds solhist with a function from the Tools directory. For
systems, solhist is an N x K array where N is the length of x and K
is the number of iteration + 1. So, for scalar equations (N=1), solhist
is a row vector. Hence I use [ptcout.solhist' ptcout.history] in the
example below.
If the iteration fails it's time to play with the tolerances, delta0, and maxit.
You are certain to fail if there is no stable solution to the equation.
### Examples for ptcsolsc
```jldoctest
julia> ptcout=ptcsolsc(sptest,.2;delta0=2.0,rtol=1.e-3,atol=1.e-3);
julia> [ptcout.solhist' ptcout.history]
7Γ2 Array{Float64,2}:
2.00000e-01 9.20000e-02
9.66666e-01 4.19962e-01
8.75086e-01 2.32577e-01
7.99114e-01 1.10743e-01
7.44225e-01 4.00926e-02
7.15163e-01 8.19395e-03
7.07568e-01 4.61523e-04
```
"""
function ptcsolsc(
f,
x0,
fp = difffp;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 100,
delta0 = 1.e-3,
dx = 1.e-7,
pdata = nothing,
printerr = true,
keepsolhist = true,
)
#
# The scalar code is a simple wrapper for the real code (ptcsol). The
# wrapper puts placeholders for the memory allocations and the precomputed
# data.
#
fp0 = copy(x0)
fpp0 = copy(x0)
itout = ptcsol(
f,
x0,
fp0,
fpp0,
fp;
rtol = rtol,
atol = atol,
maxit = maxit,
delta0 = delta0,
dx = dx,
pdata = pdata,
printerr = printerr,
keepsolhist = keepsolhist,
)
# printerr=printerr,keepsolhist=keepsolhist)
return itout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 5547 | """
secant(f,x0; rtol=1.e-6, atol=1.e-12, maxit=10,
armmax=10, armfix=false, pdata=nothing,
printerr=true, keepsolhist=true, stagnationok=false)
C. T. Kelley, 2022
The secant method for scalar equations.
Input:\n
f: function\n
x0: initial iterate
Keyword Arguments (kwargs):\n
rtol, atol: real and absolute error tolerances\n
maxit: upper bound on number of nonlinear iterations\n
If you use secant and your initial iterate is poor, you have made
a mistake. You will get an error message.
armmax: upper bound on stepsize reductions in linesearch
armfix:\n
The default is a parabolic line search (ie false). Set to true and
the stepsize will be fixed at .5. Don't do this unless you are doing
experiments for research.
printerr:\n
I print a helpful message when the solver fails. To suppress that
message set printerr to false.
keepsolhist:\n
Set this to true to get the history of the iteration in the output
tuple. This is on by default for scalar equations and off for systems.
Only turn it on if you have use for the data, which can get REALLY LARGE.
stagnationok:\n
Set this to true if you want to disable the line search and either
observe divergence or stagnation. This is only useful for research
or writing a book.
Output:\n
A named tuple (solution, functionval, history, stats, idid,
errcode, solhist)
where
solution = converged result
functionval = F(solution)
history = the vector of residual norms (||F(x)||) for the iteration
stats = named tuple of the history of (ifun, ijac, iarm), the number
of functions/derivatives/steplength reductions at each iteration.
For the secant method, ijac = 0.
idid=true if the iteration succeeded and false if not.
errcode = 0 if if the iteration succeeded
= -1 if the initial iterate satisfies the termination criteria
= 10 if no convergence after maxit iterations
= 1 if the line search failed
solhist:\n
This is the entire history of the iteration if you've set
keepsolhist=true\n
secant builds solhist with a function from the Tools directory. For
systems, solhist is an N x K array where N is the length of x and K
is the number of iteration + 1. So, for scalar equations (N=1), solhist
is a row vector. Hence the use of solhist' in the example below.
### Example for secant.jl
```jldoctest
julia> secout=secant(atan,1.0;maxit=6,atol=1.e-12,rtol=1.e-12);
julia> secout.history
7-element Array{Float64,1}:
7.85398e-01
5.18729e-01
5.39030e-02
4.86125e-03
4.28860e-06
3.37529e-11
2.06924e-22
```
"""
function secant(
f,
x0;
rtol = 1.e-6,
atol = 1.e-12,
maxit = 10,
solver = "secant",
armmax = 5,
armfix = false,
dx = 1.e-7,
pdata = nothing,
printerr = true,
keepsolhist = true,
stagnationok = false,
)
itc = 0
idid = true
errcode = 0
iline = false
#=
The theory does not support convergence of the secant-Armijo iteration
and you assume a risk when you use it. The same is true for Broyden
and any other quasi-Newton method.
=#
fc = 0.0
fc = EvalF!(f, fc, x0, pdata)
fm = fc
xm = copy(x0)
xm = x0 * 1.0001
if xm == 0
xm = 0.0001
end
fm = fc
fm = EvalF!(f, fm, xm, pdata)
newfun0 = 1
derivative_is_old = false
resnorm = abs(fc)
jfact = nothing
stagflag = stagnationok && (armmax == 0)
(ItRules, x, n) =
Secantinit(x0, dx, f, solver, armmax, armfix, maxit, printerr, pdata, jfact)
#
# Initialize the iteration statistics
#
newiarm = -1
ItData = ItStats(resnorm, 2)
newfun = 0
newjac = 0
newsol = x
xt = x
keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
#
# Fix the tolerances for convergence and define the derivative df
# outside of the main loop for scoping.
#
tol = rtol * resnorm + atol
residratio = 1
df = 0.0
armstop = true
#
# If the initial iterate satisfies the termination criteria, tell me.
#
toosoon = (resnorm <= tol)
#
# The main loop stops on convergence, too many iterations, or a
# line search failure after a derivative evaluation.
#
while (resnorm > tol) && (itc < maxit) && (armstop || stagnationok)
newfun = 0
#
# Extra function call at the start.
#
newjac = 0
newfun = 0
#
df = (fc - fm) / (x - xm)
derivative_is_old = (newjac == 0) && (solver == "newton")
#
# Compute the Newton direction and call the line search.
#
xm = x
fm = fc
ft = fc
d = -fc / df
AOUT = armijosc(xt, x, ft, fc, d, resnorm, ItRules, derivative_is_old)
#
# update solution/function value
#
xm = x
x = AOUT.ax
fm = fc
fc = AOUT.afc
#
# If the line search fails and the derivative is current,
# stop the iteration.
#
armstop = AOUT.idid || derivative_is_old
iline = ~armstop && ~stagflag
newiarm = AOUT.aiarm
#
# Keep the books.
#
residm = resnorm
resnorm = AOUT.resnorm
residratio = resnorm / residm
updateStats!(ItData, newfun, newjac, AOUT)
#
itc += 1
~keepsolhist || (@views solhist[:, itc+1] .= x)
end
(idid, errcode) = NewtonOK(resnorm, iline, tol, toosoon, itc, ItRules)
newtonout = CloseIteration(x, fc, ItData, idid, errcode, keepsolhist, solhist)
return newtonout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2671 | """
Orthogonalize!(V, hv, vv, orth; verbose=false)
C. T. Kelley, 2022
Orthogonalize the Krylov vectors using your (my) choice of
methods. Anything other than classical Gram-Schmidt twice (cgs2) is
likely to become an undocumented and UNSUPPORTED option. Methods other
than cgs2 are mostly for CI for the linear solver.
DO NOT use anything other than "cgs2" with Anderson acceleration.
"""
function Orthogonalize!(V, hv, vv, orth = "cgs2"; verbose = false)
orthopts = ["mgs1", "mgs2", "cgs1", "cgs2"]
orth in orthopts || error("Impossible orth spec in Orthogonalize!")
if orth == "mgs1"
mgs!(V, hv, vv; verbose = verbose)
elseif orth == "mgs2"
mgs!(V, hv, vv, "twice"; verbose = verbose)
elseif orth == "cgs1"
cgs!(V, hv, vv, "once"; verbose = verbose)
else
cgs!(V, hv, vv, "twice"; verbose = verbose)
end
end
"""
mgs!(V, hv, vv, orth; verbose=false)
"""
function mgs!(V, hv, vv, orth = "once"; verbose = false)
k = length(hv) - 1
normin = norm(vv)
#p=copy(vv)
@views for j = 1:k
p = vec(V[:, j])
hv[j] = p' * vv
vv .-= hv[j] * p
end
hv[k+1] = norm(vv)
if (normin + 0.001 * hv[k+1] == normin) && (orth == "twice")
@views for j = 1:k
p = vec(V[:, j])
hr = p' * vv
hv[j] += hr
vv .-= hr * p
end
hv[k+1] = norm(vv)
end
nv = hv[k+1]
#
# Watch out for happy breakdown
#
#if hv[k+1] != 0
#@views vv .= vv/hv[k+1]
(nv != 0) || (verbose && (println("breakdown in mgs1")))
if nv != 0
vv ./= nv
end
end
"""
cgs!(V, hv, vv, orth="twice"; verbose=false)
Classical Gram-Schmidt.
"""
function cgs!(V, hv, vv, orth = "twice"; verbose = false)
#
# no explicit BLAS calls. mul! seems faster than BLAS
# since 1.6 and allocates far less memory.
#
k = length(hv)
T = eltype(V)
onep = T(1.0)
zerop = T(0.0)
@views rk = hv[1:k-1]
pk = zeros(T, size(rk))
qk = vv
Qkm = V
# Orthogonalize
# New low allocation stuff
mul!(rk, Qkm', qk, 1.0, 1.0)
### mul!(pk, Qkm', qk)
### rk .+= pk
## rk .+= Qkm' * qk
# qk .-= Qkm * rk
mul!(qk, Qkm, rk, -1.0, 1.0)
if orth == "twice"
# Orthogonalize again
# New low allocation stuff
mul!(pk, Qkm', qk)
## pk .= Qkm' * qk
# qk .-= Qkm * pk
mul!(qk, Qkm, pk, -1.0, 1.0)
rk .+= pk
end
# Keep track of what you did.
nqk = norm(qk)
(nqk != 0) || (verbose && (println("breakdown in cgs")))
(nqk > 0.0) && (qk ./= nqk)
hv[k] = nqk
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 7384 | """
kl\\_bicgstab( x0, b, atv, V, eta, ptv = nothing;
kl_store=nothing, side = "right", lmaxit = 10, pdata = nothing)
C. T. Kelley, 2022
BiCGSTAB linear solver. Deals with preconditioning.
Uses bicgstab\\_base with is oblivious to that.
The code works and does what it needs to do, but ...\n
The user interface is unstable and, even worse,
the nonlinear solvers are not hooked up yet.
The way this works it
Input:\n
x0: initial iterate, this is usually zero for nonlinear solvers
b: right hand side (duh!)
atv: matrix-vector product which depends on precomputed data pdta
I expect you to use pdata most or all of the time, so it is not
an optional argument, even if it's nothing (at least for now).
If your mat-vec is just A*v, you have to write a function where
A is the precomputed data.
API for atv is av=atv(v,pdata)
V: a vector for me to store a Jacobian-vector product. It goes where
FPS would go in gmres. You are best served if V is Float64.
eta: Termination happens when ||b - Ax|| <= eta || b ||
ptv: preconditioner-vector product, which will also use pdata. The
default is nothing, which is no preconditioning at all.
API for ptv is px=ptv(x,pdat) just like kl\\_gmres
Keyword arguments
kl\\_store: You have the option of giving me some room
for the vectors bicgstab needs to do its work. These in
which I will not overwrite and a couple of vectors I use
in the iteration. If you're only doing a linear solve, it
does no harm to let me allocate those vectores in kl\\_bicgstab.
The way to preallocate is ```kl_store=kstore(n,"bicgstab")``` where n
is the number of unknows. I call this myself in the initialization
phase if you don't do it ahead of me.
side: left or right preconditioning. The default is "right".
lmaxit: maximum number of linear iterations. The default is 10.
pdata: precomputed data. The default is nothing, but that ain't gonna
work well for nonlinear equations.
Output:\n
A named tuple (sol, reshist, lits, idid)
where
sol= final result
reshist = residual norm history
lits = number of iterations
idid = status of the iteration
true -> converged
false -> failed to converge
### Examples from the docstrings for kl\\_bicgstab
In these examples you have the matrix and use
```
function atv(x, A)
return A * x
end
```
to compute the matvec.
#### Three dimensional problem.
Will converge in the four iterations (worse than kl_gmres)
```jldoctest
julia> function atv(x, A)
return A * x
end
atv (generic function with 1 method)
julia> A = [0.001 0 0; 0 0.0011 0; 0 0 1.e4];
julia> V = zeros(3); b = [1.0; 1.0; 1.0]; x0 = zeros(3);
julia> gout.reshist
5-element Vector{Any}:
1.73205e+00
1.41421e+00
3.21642e-03
3.20321e-03
4.98049e-13
julia> norm(b - A*gout.sol,Inf)
3.68594e-13
```
#### Integral equation. Notice that pdata has the kernel of the
operator and we do the matvec directly. Just like the previous example.
We put the grid information and, for this artifical example, the solution
in the precomputed data.
```jldoctest
julia> function integop(u, pdata)
K = pdata.K
return u - K * u
end
integop (generic function with 1 method)
julia> function integopinit(n)
h = 1 / n
X = collect(0.5*h:h:1.0-0.5*h)
K = [ker(x, y) for x in X, y in X]
K .*= h
sol = [usol(x) for x in X]
f = sol - K * sol
pdata = (K = K, xe = sol, f = f)
return pdata
end
integopinit (generic function with 1 method)
julia> function usol(x)
return exp.(x) .* log.(2.0 * x .+ 1.0)
end
usol (generic function with 1 method)
julia> function ker(x, y)
ker = 0.1 * sin(x + exp(y))
end
ker (generic function with 1 method)
julia> n=100; pdata = integopinit(n); ue = pdata.xe; f=pdata.f;
julia> u0 = zeros(size(f)); V = zeros(size(f));
julia> gout.reshist
4-element Vector{Any}:
1.48252e+01
2.90538e-02
2.07823e-07
2.17107e-17
julia> norm(gout.sol-ue,Inf)
8.88178e-16
```
"""
function kl_bicgstab(
x0,
b,
atv,
V,
eta,
ptv = nothing;
kl_store = nothing,
side = "right",
lmaxit = 10,
pdata = nothing,
)
#
# If you give me too much storage, I will fix it for you.
#
isa(V, Vector) ? rhs = V : rhs = @view V[:, 1]
rhs .= b
n = length(b)
#
# If you're playing with both gmres and bicgstab you might have
# lmaxit set to -1. That will break things to I fixed that.
#
(lmaxit == -1) && (lmaxit = 10)
if side == "right" || ptv == nothing
itsleft = false
else
itsleft = true
rhs .= ptv(rhs, pdata)
end
n = length(x0)
kl_store = kstore(n, "bicgstab")
linsol = kl_store[1]
# linsol .= b
y0 = kl_store[2]
y0 .= x0
# linsol = copy(b)
# y0 = copy(x0)
Kpdata = (
pdata = pdata,
side = side,
ptv = ptv,
atv = atv,
linsol = linsol,
kl_store = kl_store,
)
bout = bicgstab_base(y0, rhs, Katv, eta; lmaxit = lmaxit, pdata = Kpdata)
if side == "left" || ptv == nothing
return bout
else
sol = y0
sol .= ptv(sol, pdata)
return (sol = sol, reshist = bout.reshist, lits = bout.lits, idid = bout.idid)
end
end
"""
bicgstab_base(x0, rhs, atv, eta;
lmaxit = 10, pdata = nothing)
Base BiCGSTAB. Overwrites initial iterate and right hand side.
"""
function bicgstab_base(x0, rhs, atv, eta; lmaxit = 10, pdata = nothing)
r = rhs
x = x0
(norm(x0) == 0.0) || (r .-= atv(x0, pdata))
k = 0
rho = zeros(lmaxit + 2)
rho[1] = 1.0
rho[2] = r' * r
alpha = 1.0
omega = 1.0
r0 = copy(r)
rnorm = norm(r0)
kl_store = pdata.kl_store
v = kl_store[3]
p = kl_store[4]
s = kl_store[5]
t = kl_store[6]
# v = zeros(size(x0))
# p = zeros(size(x0))
# s = zeros(size(x0))
# t = zeros(size(x0))
tol = eta * norm(rhs)
k = 0
reshist = []
push!(reshist, rnorm)
idid = true
while rnorm > tol && k < lmaxit
k += 1
abs(omega) > 0 || (println("Breakdown omega = 0"); break)
beta = (rho[k+1] / rho[k]) * (alpha / omega)
axpy!(-omega, v, p)
# p .= r + beta * (p - omega * v)
# p .= r + beta * p
axpby!(1.0, r, beta, p)
v .= atv(p, pdata)
tau = r0' * v
abs(tau) > 0 || (println("Breakdown r0'*v = 0 "); break)
alpha = rho[k+1] / tau
# s .= r - alpha * v
copy!(s, r)
axpy!(-alpha, v, s)
t .= atv(s, pdata)
norm(t) > 0 || (println("Breakdown t = 0"); break)
omega = (t' * s) / (t' * t)
rho[k+2] = -omega * (r0' * t)
# r .= s - omega * t
copy!(r, s)
axpy!(-omega, t, r)
# x .= x + alpha * p + omega * s
copy!(t, s)
axpby!(alpha, p, omega, t)
# x .= x + t
x .+= t
rnorm = norm(r)
push!(reshist, rnorm)
end
(rnorm <= tol) || (idid = false)
return (sol = x, reshist = reshist, lits = k, idid = idid)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 12562 | """
kl\\_gmres(x0, b, atv, V, eta, ptv=nothing; kl_store=nothing;
orth = "cgs2", side="right", lmaxit=-1, pdata=nothing)
C. T. Kelley, 2022
Gmres linear solver. Handles preconditioning and restarts.
Uses gmres_base which is completely oblivious to these things.
The deal is
Input:\n
x0: initial iterate, this is usually zero for nonlinear solvers
b: right hand side (duh!)
atv: matrix-vector product which depends on precomputed data pdta
I expect you to use pdata most or all of the time, so it is not
an optional argument, even if it's nothing (at least for now).
If your mat-vec is just A*v, you have to write a function where
A is the precomputed data.
API for atv is ```av=atv(v,pdata)```
V: Preallocated n x K array for the Krylov vectors. I store the initial
normalized residual in column 1, so you have at most K-1 iterations
before gmres\\_base returns a failure. kl\\_gmres will handle the
restarts and, if lmaxit > 0, keep going until you hit lmaxit GMRES
iterations. You may allocate V in Float32 and save on storage. The
benefit from doing this is not dramatic in terms of CPU time.
eta: Termination happens when ||b - Ax|| <= eta || b ||
ptv: preconditioner-vector product, which will also use pdata. The
default is nothing, which is no preconditioning at all.
API for ptv is px=ptv(x,pdata)
Keyword arguments
kl\\_store: You have the option (don't do it!) of giving me some room
for the vectors gmres needs. These include copies of x0 and b,
which I will not overwrite and a couple of vectors I use
in the iteration. If you're only doing a linear solve, PLEASE
let me allocate those vectores in kl\\_gmres. For computing a
Newton step or for repeated solves,
the way to do this is ```kl_store=kstore(n,"gmres")``` where n
is the number of unknows. I call this myself in the initialization
phase if you don't do it ahead of me.
Be very careful with this. kl_store is use to store the solution
to avoid overwriting the initial iterate. This means that
two calls to kl_gmres with the same kl_store will step on the
solution coming from the first call. If you let me allocate it
then it happens in local scope and will do no harm.
pdata: precomputed data. The default is nothing, but that ain't gonna
work well for nonlinear equations.
orth: your choice of the wise default, classical Gram-Schmidt twice,
or something slower and less stable. Those are classical once (really
bad) or a couple variants of modified Gram-Schmidt. mgs2 is what I
used in my old matlab codes. Not terrible, but far from great.
side: left or right preconditioning. The default is "right".
lmaxit: maximum number of linear iterations. The default is -1, which
means that the maximum number of linear iterations is K-1, which
is all V will allow without restarts. If lmaxit > K-1, then the
iteration will restart until you consume lmaxit iterations or
terminate successfully.
Other parameters on the way.
Output:\n
A named tuple (sol, reshist, lits, idid)
where
sol= final result
reshist = residual norm history
lits = number of iterations
idid = status of the iteration
true -> converged
false -> failed to converge
### Examples from the docstrings for kl\\_gmres
In these examples you have the matrix and use
```
function atv(x, A)
return A * x
end
```
to compute the matvec.
#### Three dimensional problem.
Will converge in the correct three iterations
only if you orthogonalize with CGS twice.
```jldoctest
julia> function atv(x, A)
return A * x
end
atv (generic function with 1 method)
julia> A = [0.001 0 0; 0 0.0011 0; 0 0 1.e4];
julia> V = zeros(3, 10); b = [1.0; 1.0; 1.0]; x0 = zeros(3);
julia> gout = kl_gmres(x0, b, atv, V, 1.e-10; pdata = A);
julia> gout.reshist
4-element Array{Float64,1}:
1.73205e+00
1.41421e+00
6.72673e-02
1.97712e-34
julia> norm(b - A*gout.sol,Inf)
1.28536e-10
```
#### Integral equation. Notice that pdata has the kernel of the
operator and we do the matvec directly. Just like the previous example.
We put the grid information and, for this artifical example, the solution
in the precomputed data.
```jldoctest
julia> function integop(u, pdata)
K = pdata.K
return u - K * u
end
integop (generic function with 1 method)
julia> function integopinit(n)
h = 1 / n
X = collect(0.5*h:h:1.0-0.5*h)
K = [ker(x, y) for x in X, y in X]
K .*= h
sol = [usol(x) for x in X]
f = sol - K * sol
pdata = (K = K, xe = sol, f = f)
return pdata
end
integopinit (generic function with 1 method)
julia> function usol(x)
return exp.(x) .* log.(2.0 * x .+ 1.0)
end
usol (generic function with 1 method)
julia> function ker(x, y)
ker = 0.1 * sin(x + exp(y))
end
ker (generic function with 1 method)
julia> n=100; pdata = integopinit(n); ue = pdata.xe; f=pdata.f;
julia> u0 = zeros(size(f)); V = zeros(n, 20); V32=zeros(Float32,n,20);
julia> gout = kl_gmres(u0, f, integop, V, 1.e-10; pdata = pdata);
julia> gout32 = kl_gmres(u0, f, integop, V32, 1.e-10; pdata = pdata);
julia> [norm(gout.sol-ue,Inf) norm(gout32.sol-ue,Inf)]
1Γ2 Array{Float64,2}:
4.44089e-16 2.93700e-07
julia> [gout.reshist gout32.reshist]
4Γ2 Array{Float64,2}:
1.48252e+01 1.48252e+01
5.52337e-01 5.52337e-01
1.77741e-03 1.77742e-03
1.29876e-19 8.73568e-11
```
"""
function kl_gmres(
x0,
b,
atv,
V,
eta,
ptv = nothing;
kl_store = nothing,
orth = "cgs2",
side = "right",
lmaxit = -1,
pdata = nothing,
)
# Build some precomputed data to inform KL_atv about
# preconditioning ...
# Do not overwrite the initial iterate or the right hand side.
n = length(x0)
# Get the vectors GMRES needs internally and make room to
# copy the initial iterate and right side
(kl_store !== nothing) || (kl_store = kstore(n, "gmres"))
y0 = kl_store[1]
y0 .= x0
rhs = kl_store[2]
rhs .= b
# Two vectors for internals
linsol = kl_store[3]
restmp = kl_store[4]
#
if side == "right" || ptv == nothing
itsleft = false
else
itsleft = true
rhs .= ptv(rhs, pdata)
end
(n, K) = size(V)
K > 1 || error("Must allocate for GMRES iterations. V must have
at least two columns")
klmaxit = lmaxit
lmaxit > 0 || (lmaxit = K - 1)
#
itvec = maxitvec(K, lmaxit)
ip = 1
idid = false
Kpdata =
(pdata = pdata, side = side, ptv = ptv, atv = atv, linsol = linsol, restmp = restmp)
gout = []
#
# Restarted GMRES loop.
#
while ip <= length(itvec) && idid == false
localout =
gmres_base(y0, rhs, Katv, V, eta, Kpdata; lmaxit = itvec[ip], orth = orth)
idid = localout.idid
gout = outup(gout, localout, ip, klmaxit)
reslen = length(localout.reshist)
#
ip += 1
end
#
# Fixup the solution if preconditioning from the right.
#
sol = y0
if side == "left" || ptv == nothing
return (sol = sol, reshist = gout.reshist, lits = gout.lits, idid = gout.idid)
else
sol .= ptv(sol, pdata)
return (sol = sol, reshist = gout.reshist, lits = gout.lits, idid = gout.idid)
end
end
"""
Katv(x,Kpdata)
Builds a matrix-vector product to hand to gmres_base or bicgstab_base.
Puts the preconditioner in there on the correct side.
"""
function Katv(x, Kpdata)
# y=copy(x)
y = Kpdata.linsol
pdata = Kpdata.pdata
ptv = Kpdata.ptv
atv = Kpdata.atv
side = Kpdata.side
sideok = (side == "left") || (side == "right")
sideok || error(
"Bad preconditioner side in Krylov solver, input side = ",
side,
". Side must be \"left\" or \"right\" ",
)
if ptv == nothing
y .= atv(x, pdata)
return y
elseif side == "left"
y .= atv(x, pdata)
return ptv(y, pdata)
elseif side == "right"
y .= ptv(x, pdata)
return atv(y, pdata)
end
end
"""
gmres_base(x0, b, atv, V, eta, pdata; orth="cgs2", lmaxit=-1)
Base GMRES solver. This is GMRES(m) with no restarts and no preconditioning.
The idea for the future is that it'll be called by kl_gmres (linear
solver) which is the backend of klgmres.
gmres_base overwrites x0 with the solution. This is one of many reasons
that you should not invoke it directly.
"""
function gmres_base(x0, b, atv, V, eta, pdata; orth = "cgs2", lmaxit = -1)
(n, m) = size(V)
#
# Allocate for Givens
#
# kmax = m - 1
kmax = m
lmaxit == -1 || (kmax = lmaxit)
kmax > m - 1 && error("lmaxit error in gmres_base")
r = pdata.restmp
r .= b
T = eltype(V)
h = zeros(T, kmax + 1, kmax + 1)
c = zeros(kmax + 1)
s = zeros(kmax + 1)
#
# Don't do the mat-vec if the intial iterate is zero
#
# y = pdata.linsol
(norm(x0) == 0.0) || (r .-= atv(x0, pdata))
# (norm(x0) == 0.0) || (y .= atv(x0, pdata); r .-=y;)
#
#
rho0 = norm(r)
rho = rho0
#
# Initial residual = 0? This can't be good.
#
rho == 0.0 && error("Initial resdiual in kl_gmres is zero. Why?")
#
g = zeros(size(c))
g[1] = rho
errtol = eta * norm(b)
reshist = []
#
# Initialize
#
idid = true
push!(reshist, rho)
k = 0
#
# Showtime!
#
# @views V[:, 1] .= r / rho
@views v1 = V[:, 1]
copy!(v1, r)
rhoinv = 1.0 / rho
v1 .*= rhoinv
# @views V[:,1] ./= rho
beta = rho
while (rho > errtol) && (k < kmax)
k += 1
@views V[:, k+1] .= atv(V[:, k], pdata)
@views vv = vec(V[:, k+1])
@views hv = vec(h[1:k+1, k])
@views Vkm = V[:, 1:k]
#
# Don't mourn. Orthogonalize!
#
Orthogonalize!(Vkm, hv, vv, orth)
#
# Build information for new Givens rotations.
#
if k > 1
hv = @view h[1:k, k]
giveapp!(c[1:k-1], s[1:k-1], hv, k - 1)
end
nu = norm(h[k:k+1, k])
if nu != 0
c[k] = conj(h[k, k] / nu)
s[k] = -h[k+1, k] / nu
h[k, k] = c[k] * h[k, k] - s[k] * h[k+1, k]
h[k+1, k] = 0.0
gv = @view g[k:k+1]
giveapp!(c[k], s[k], gv, 1)
end
#
# Update the residual norm.
#
rho = abs(g[k+1])
(nu > 0.0) || (println("near breakdown"); rho = 0.0)
push!(reshist, rho)
end
#
# At this point either k = kmax or rho < errtol.
# It's time to compute x and check out.
#
y = h[1:k, 1:k] \ g[1:k]
# qmf = view(V, 1:n, 1:k)
@views qmf = V[:, 1:k]
# mul!(r, qmf, y)
# r .= qmf*y
# x .+= r
# sol = x0
# mul!(sol, qmf, y, 1.0, 1.0)
mul!(x0, qmf, y, 1.0, 1.0)
(rho <= errtol) || (idid = false)
k > 0 || println("GMRES iteration terminates on entry.")
return (rho0 = rho0, reshist = Float64.(reshist), lits = k, idid = idid)
end
function giveapp!(c, s, vin, k)
for i = 1:k
w1 = c[i] * vin[i] - s[i] * vin[i+1]
w2 = s[i] * vin[i] + c[i] * vin[i+1]
vin[i:i+1] .= [w1, w2]
end
return vin
end
#
# The functions maxitvec and outup manage the restarts.
# There is no reason to look at them or fiddle with them.
#
function maxitvec(K, lmaxit)
levels = Int.(ceil(lmaxit / (K - 1)))
itvec = ones(Int, levels)
itvec[1:levels-1] .= K - 1
remainder = lmaxit - (levels - 1) * (K - 1)
itvec[levels] = remainder
return itvec
end
function outup(gout, localout, ip, klmaxit)
idid = localout.idid
#
# If I'm doing restarts I won't store the last residual
# unless the iteration is successful. The reason is that
# I will add that residual to the list when I restart.
#
if idid || klmaxit == -1
lreshist = localout.reshist
else
lk = length(localout.reshist)
lreshist = localout.reshist[1:lk-1]
end
if ip == 1
reshist = lreshist
lits = localout.lits
else
reshist = gout.reshist
append!(reshist, lreshist)
lits = gout.lits + localout.lits
end
gout = (reshist = reshist, lits = lits, idid = idid)
return gout
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 9450 | """
FCR_heat!(FS, x, hdata)
Nonlinear equation form of conductive-radiative heat transfer problem.
"""
function FCR_heat!(FS, x, hdata)
FS = heat_fixed!(FS, x, hdata)
FS .= x - FS
#axpy!(-1.0, x, FS)
return FS
end
"""
heat_fixed!(theta, thetain, hn_data)
Fixed point map for the conductive-radiative heat transfer problem.
"""
function heat_fixed!(theta, thetain, hn_data)
epsl = 1.0
epsr = 1.0
sn_data = hn_data.sn_data
nx = length(thetain)
theta .= thetain
source = sn_data.tmphf
source .*= 0.0
rhsd2 = hn_data.rhsd2
bcfix = hn_data.bcfix
D2 = hn_data.D2
Nc = hn_data.Nc
omega = hn_data.omega
source .= theta
source .^= 4
source .*= (1.0 - omega)
ltol = 1.e-12
flux = flux_solve(source, hn_data, ltol)
@views copy!(rhsd2, flux[2:nx-1])
rhsd2 .*= (1.0 - omega)
@views axpy!(-2.0, source[2:nx-1], rhsd2)
pn = 1.0 / (2.0 * Nc)
rhsd2 .*= pn
ldiv!(D2, rhsd2)
theta[1] = 0.0
theta[nx] = 0.0
@views theta[2:nx-1] .= rhsd2
axpy!(1.0, bcfix, theta)
return theta
end
"""
heat_init(nx, na, thetal, thetar, omega, tau, Nc)
Set up the conductive-radiative heat transfer problem
I pass a named tuple of precomputed and preallocated data to
all the functions and solvers.
"""
function heat_init(nx, na, thetal, thetar, omega, tau, Nc)
# Get the 1D Laplacian at the interior nodes. Form and store the LDLt
# facorization
np = nx - 2
D2M = Lap1d(np)
D2 = ldlt(D2M)
# Preallocate some room. I'm using kstore to store the internal
# vectors for kl_gmres since I do a complete GMRES iteration
# for every call to the fixed point map. Kids, don't try this at home!
rhsd2 = zeros(np)
h = tau / (nx - 1.0)
kl_store = kstore(nx, "gmres")
xv = collect(0:h:tau)
bcfix = thetal .+ (thetar - thetal) * xv
#
# Precomputed data for the transport problem.
#
sn_data = sn_init(nx, na, x -> omega, tau, thetal^4, thetar^4)
#
# Stuff it all in one place.
#
hn_data = (
sn_data = sn_data,
bcfix = bcfix,
D2 = D2,
rhsd2 = rhsd2,
omega = omega,
Nc = Nc,
kl_store = kl_store,
thetal = thetal,
thetar = thetar,
)
return hn_data
end
"""
sn_init(nx, na2, fs, tau, vleft, vright; siewert=false)
I pass a named tuple of precomputed and preallocated data to
all the functions and solvers.
The input to this is obvious stuff.
nx = number of spatial grid points
na2 = number of angles. The angular mesh is (na2/2) Gaussian quadaratures
on [-1,0) and (0,1]
fs:function ; scattering coefficient is fs(x)
Boundary conditions for the transport problem are constant vectors
filled with vleft/vright.
phi_left, phi_right = ones(na2/2) * vleft/vright
"""
function sn_init(nx, na2, fs, tau, vleft, vright; siewert = false)
#
# Set up the quadrature rule in angle
#
# Only used for CI
if siewert
#
# I don't need the weights to make tables, but I need
# to return something.
#
angles = [-0.05; collect(-0.1:-0.1:-1.0); 0.05; collect(0.1:0.1:1.0)]
weights = angles
# the real deal
else
# (angles, weights) = hard_gauss()
(angles, weights) = sn_angles(na2)
end
na = floor(Int, na2 / 2)
#
# scattering coefficient
#
dx = tau / (nx - 1)
x = collect(0:dx:tau)
c = fs.(x)
#
# Preallocated storage for intermediate results
#
phi0 = zeros(nx)
tmpf = zeros(nx)
tmp1 = zeros(nx)
tmphf = zeros(nx)
rhsg = zeros(nx)
ptmp = zeros(na)
#
# Preallocated storage for source iteration
#
psi_left = vleft * ones(na)
psi_right = vright * ones(na)
# Preallocating the angular flux is not really necessary
# since you can compute the scalar flux on the fly as you do it.
# However, the preallocation makes the code much easier to understand
# and map to/from the text.
psi = zeros(na2, nx)
source_average = zeros(nx - 1)
source_total = zeros(nx)
#
# Preallocated storage for the Krylov basis in the GMRES solve
#
V = zeros(nx, 13)
#
return sn_data = (
c = c,
dx = dx,
psi = psi,
angles = angles,
weights = weights,
phi0 = phi0,
tmp1 = tmp1,
tmpf = tmpf,
tmphf = tmphf,
rhsg = rhsg,
source_average = source_average,
source_total = source_total,
nx = nx,
ptmp = ptmp,
psi_left = psi_left,
psi_right = psi_right,
V = V,
)
end
#function hard_gauss()
#
# Return the weights/nodes for double 20 pt gauss
# I could use FastGaussQuadrature.jl for this but am
# trying to avoid dependencies, especially for big things
# like StaticArrays.jl
#
# If you want to try FastGaussQuadrature.jl, see the function below,
# which I have commented out.
#
# m = 40
# ri = zeros(40)
# wi = zeros(40)
# r = zeros(40)
# w = zeros(40)
# ri[20] = 0.993128599185095
# ri[19] = 0.963971927277914
# ri[18] = 0.912234428251326
# ri[17] = 0.839116971822218
# ri[16] = 0.746331906460151
# ri[15] = 0.636053680726515
# ri[14] = 0.510867001950827
# ri[13] = 0.373706088715420
# ri[12] = 0.227785851141645
# ri[11] = 0.076526521133497
# wi[20] = 0.017614007139152
# wi[19] = 0.040601429800387
# wi[18] = 0.062672048334109
# wi[17] = 0.083276741576705
# wi[16] = 0.101930119817240
# wi[15] = 0.118194531961518
# wi[14] = 0.131688638449177
# wi[13] = 0.142096109318382
# wi[12] = 0.149172986472604
# wi[11] = 0.152753387130726
# for i = 1:10, ri[i] in -ri[21-i]
# wi[i] = wi[21-i]
# end
# mm = floor(Int, m / 2)
# for i = 1:mm
# r[i+mm] = (1.0 + ri[i]) * 0.5
# w[i+mm] = wi[i] * 0.5
# r[i] = -r[i+mm]
# w[i] = wi[i] * 0.5
# end
# return (r, w)
#end
"""
sn_angles(na2=40)
Get double Gauss nodes and weights for SN
This function uses FastGaussQuadrature
"""
function sn_angles(na2 = 40)
na = floor(Int, na2 / 2)
2 * na == na2 || error("odd number of angles")
baseangles, baseweights = gauss(na)
posweights = baseweights * 0.5
negweights = copy(posweights)
posangles = (baseangles .+ 1.0) * 0.5
negangles = -copy(posangles)
weights = [negweights; posweights]
angles = [negangles; posangles]
angles, weights
end
"""
flux_solve(source, hn_data, tol)
Solve the transport equation with the source from the heat
conduction problem. The output is what kl_gmres returns, so
the solution is kout.sol
"""
function flux_solve(source, hn_data, tol)
sn_data = hn_data.sn_data
b = getrhs(source, sn_data)
kl_store = hn_data.kl_store
kout =
kl_gmres(sn_data.phi0, b, AxB, sn_data.V, tol; pdata = sn_data, kl_store = kl_store)
return kout.sol
end
function AxB(flux, sn_data)
nx = length(flux)
angles = sn_data.angles
na2 = length(angles)
na = floor(Int, na2 / 2)
#tmp1=zeros(nx)
#tmpf=zeros(nx)
tmpf = sn_data.tmpf
tmp1 = sn_data.tmp1
tmp1 .*= 0.0
tmpf .= flux
tmp2 = zeros(na)
tmpf = source_iteration!(tmpf, tmp2, tmp2, tmp1, sn_data)
axpy!(-1.0, flux, tmpf)
tmpf .*= -1.0
return tmpf
end
function getrhs(source, sn_data)
nx = sn_data.nx
#rhs=zeros(nx)
rhs = sn_data.rhsg
rhs .*= 0.0
angles = sn_data.angles
na2 = length(angles)
na = floor(Int, na2 / 2)
rhs = source_iteration!(rhs, sn_data.psi_left, sn_data.psi_right, source, sn_data)
return rhs
end
function source_iteration!(flux, psi_left, psi_right, source, sn_data)
psi = sn_data.psi
psi = transport_sweep!(psi, flux, psi_left, psi_right, source, sn_data)
weights = sn_data.weights
nx = sn_data.nx
na2 = length(weights)
#
# Take the 0th moment to get the flux.
#
g = reshape(flux, 1, nx)
wt = reshape(weights, 1, na2)
mul!(g, wt, psi)
return flux
end
"""
transport_sweep!(psi, phi, psi_left, psi_right, source, sn_data)
Take a single transport sweep.
"""
function transport_sweep!(psi, phi, psi_left, psi_right, source, sn_data)
angles = sn_data.angles
#
c = sn_data.c
dx = sn_data.dx
#
na2 = length(angles)
na = floor(Int, na2 / 2)
nx = length(phi)
source_average = sn_data.source_average
source_total = sn_data.source_total
copy!(source_total, phi)
source_total .*= 0.5
source_total .*= c
axpy!(1.0, source, source_total)
@views copy!(source_average, source_total[2:nx])
@views source_average .+= source_total[1:nx-1]
source_average .*= 0.5
@views forward_angles = angles[na+1:na2]
@views backward_angles = angles[1:na]
vfl = (forward_angles / dx) .+ 0.5
vfl = 1.0 ./ vfl
vfr = (forward_angles / dx) .- 0.5
psi .*= 0.0
@views psi[1:na, nx] .= psi_right
@views psi[na+1:na2, 1] .= psi_left
#
# Forward sweep
#
@views for ix = 2:nx
copy!(psi[na+1:na2, ix], psi[na+1:na2, ix-1])
psi[na+1:na2, ix] .*= vfr
psi[na+1:na2, ix] .+= source_average[ix-1]
psi[na+1:na2, ix] .*= vfl
end
#
# Backward sweep
#
@views for ix = nx-1:-1:1
copy!(psi[1:na, ix], psi[1:na, ix+1])
psi[1:na, ix] .*= vfr
psi[1:na, ix] .+= source_average[ix]
psi[1:na, ix] .*= vfl
end
return psi
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4337 | function heq_continue(n = 100; version = "pac")
#
# Original form: heqfv1!. c is the parameter
# PAC form: heqfv3!. arclength is the parameter
#
FPS = zeros(n, 20)
FS = zeros(n)
vorig = (version == "orig")
vpac = (version == "pac")
(vorig || vpac) || error("incorrect version in heq_continue")
vorig && (initfun = solutionv1_init)
vpac && (initfun = solutionv3_init)
(
FFUN,
fdata,
pval,
nval,
xin,
x,
x0,
xold,
xdot,
bif_update,
setlam,
lambda,
dlam,
lambdamax,
) = initfun(n, FPS, FS)
qdata = (
fdata = fdata,
FS = FS,
FPS = FPS,
dlam = dlam,
xix = xin,
xold = xold,
xdot = xdot,
lambdamax = lambdamax,
bif_update = bif_update,
setlam = setlam,
)
(pval, nval, x, lambdaz) = knl_continue(FFUN, qdata, pval, nval, x, x0, lambda)
return (pval = pval, nval = nval, x = x, lambdaz = lambdaz)
end
function solutionv1_init(n, FPS = [], FS = [])
x0 = ones(n)
x = copy(x0)
xold = copy(x0)
xdot = copy(x0)
xin = copy(x0)
lambda = 0.0
dlam = 0.01
lambdamax = 1.0
pval = [lambda]
nval = [norm(x, 1) / n]
hdata = heqinit(x0, 0.5)
FFUN = heqf!
#
function bif_update_1!(pval, nval, x, lambda)
c = lambda
n = length(x)
push!(nval, norm(x, 1) / n)
push!(pval, c)
end
#
function setlam_v1!(qdata, lambda, xdot = [], xold = [])
hdata = qdata.fdata
c = lambda
setc!(hdata, c)
end
#
return (
FFUN = FFUN,
fdata = hdata,
pval = pval,
nval = nval,
xin = xin,
x = x,
x0 = x0,
xold = xold,
xdot = xdot,
bif_update = bif_update_1!,
setlam = setlam_v1!,
lambda = lambda,
dlam = dlam,
lambdamax = lambdamax,
)
end
function solutionv3_init(n, FPS = [], FS = [])
# Solution at s=0, which we will not compute
# This is the pseudo-arclength version, so x = (H, c)
pval = [0.0]
nval = [1.0]
FFUN = heqfv3!
#
lambda = 0.0
dlam = 0.1
#
#
lambdamax = 150.0
#
# Set up the (bogus) precomputed data
#
x0 = ones(n)
znew = ones(n)
xin = ones(n - 1)
@views xin .= x0[1:n-1]
hdata = heqinit(xin, dlam)
#
# Now compute an honest solution to start the continuation
# we need at least two points on the path before we can come up
# with xdot. The plan is to solve the equation and then approximate
# xdot vi xdot = (xc - xold)/ds
#
FST = zeros(n - 1)
FSTP = zeros(n - 1, 20)
nout = nsoli(heqf!, xin, FST, FSTP; pdata = hdata)
#
@views znew[1:n-1] .= nout.solution
znew[n] = dlam
push!(pval, dlam)
@views nrm = norm(znew[1:n-1], 1) / (n - 1)
push!(nval, nrm)
zold = ones(n)
zold[n] = 0.0
xdot = (znew - zold) / dlam
xold = znew
x0 = 2.0 * znew - zold
fdata = (hdata = hdata, xold = xold, xdot = xdot, dlam = dlam, xin = xin)
return (
FFUN = FFUN,
fdata = fdata,
pval = pval,
nval = nval,
xin = xin,
x = znew,
x0 = x0,
xold = xold,
xdot = xdot,
bif_update = bif_update_3!,
setlam = setlam_v3!,
lambda = lambda,
dlam = dlam,
lambdamax = lambdamax,
)
end
function setlam_v3!(qdata, lambda, xdot, xold)
qdata.fdata.xdot .= xdot
qdata.fdata.xold .= xold
end
function bif_update_3!(pval, nval, x, lambda)
n = length(x)
c = x[n]
push!(pval, c)
@views nrm = norm(x[1:n-1], 1) / (n - 1)
push!(nval, nrm)
end
#
# Make the input z = ((x, c) , s)
#
function heqfv3!(F, z, fdata)
np = length(z)
n = np - 1
hdata = fdata.hdata
dlam = fdata.dlam
zdot = fdata.xdot
zold = fdata.xold
xin = fdata.xin
cdot = zdot[np]
cold = zold[np]
@views xold = zold[1:n]
@views xdot = zdot[1:n]
@views xin .= z[1:n]
c = z[np]
setc!(hdata, c)
@views FST = F[1:n]
FST = heqf!(FST, xin, hdata)
dx = xin
dx .-= xold
Nval = 100.0 * (dot(xdot, dx) / n) + cdot * (c - cold) - dlam
F[np] = Nval
return F
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2347 | function knl_continue(FFUN, qdata, pval, nval, x, x0, lambda)
#
# preallocated arrays from qdata
#
FS = qdata.FS
FPS = qdata.FPS
n = length(x0)
KDS = nkl_init(n, "gmres")
#
# precomputed data for the function, coming from qdata
#
fdata = qdata.fdata
#
# storage for xdot and xold
#
xdot = qdata.xdot
xold = qdata.xold
#
# range for the parameter
#
dlam = qdata.dlam
lambdamax = qdata.lambdamax
lambda = lambda + dlam
#
# functions to update the parameter and collect the data for
# the bifurcation diagram
#
setlam = qdata.setlam
bif_update = qdata.bif_update
#
#
#
dlamm1 = 1.0 / dlam
idid = true
lambdaz = lambda
while lambda <= lambdamax && idid
#
# setlam informs FFUN about xold, xdot, and lambda by updating
# qdata.fdata. This is the biggest, but not the only, part of this
# deal that is not for general use.
#
setlam(qdata, lambda, xdot, xold)
#
# When I send fdata to FFUN I tell it about xdot and xold so it can
# compute the normalization.
#
nout = nsoli(
FFUN,
x0,
FS,
FPS;
pdata = fdata,
eta = 0.01,
Krylov_Data = KDS,
fixedeta = true,
atol = 1.e-8,
)
#
# Stop the continuation if nsoli fails. This will usually be
# because the change in x becomes too much for the predictor to track.
#
idid = nout.idid
#
# Record the solution and compute the derivative in lambda.
# xdot needs to go to the pseudo-arclength computation.
#
x = nout.solution
xdot .= xold
axpby!(dlamm1, x, -dlamm1, xdot)
#
# Linear predictor
#
x0 .= xold
axpby!(2.0, x, -1.0, x0)
xold .= x
#
# update the data for the diagram
#
bif_update(pval, nval, x, lambda)
#
# and continue to continue
#
lambdaz = lambda
lambda = lambda + dlam
#
if abs(lambda - lambdamax) < 1.e-12
lambda = lambdamax
end
#
end
#
return (pval, nval, x, lambdaz)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 720 | """
spitchfork(u,lambda)
The nonlinearity f(u) = u^3 - lamba u. The dynamics for du/du = -f(u)
have a pitchfork bifurcation at lambda=0. The steady-state solution
u=0 is unique for lambda < 0 and there are three steady-state solutions
if lambda > 0. This is a simple-minded version of the buckling beam problem.
The function sptest(u) = spitchfork(u,.5) is the one I call in the testing.
"""
function spitchfork(u, lambda)
fu = u^3 - lambda * u
return fu
end
function sptest(u)
lambda = 0.5
spt = spitchfork(u, lambda)
return spt
end
function spitchp(u, lambda)
fp = 3 * u^2 - lambda
return fp
end
function sptestp(u)
lambda = 0.5
sptp = spitchp(u, lambda)
return sptp
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4885 | """
pdeF!.jl
This file contains everything you need to run the Ellptic PDE examples.
This includes the version with an explict sparse matrix Jacobian and
the fixed point formulations using the fish2d.jl preconditioner.
I've also parked the exact solution in here so you can do the grid refinement
study.
Look at pdeinit for the construction of the precomputed data. There is
a lot of it.
If you only want to run the examples, you should not have to look
at the code.
"""
### And now for the functions ...
"""
pdeF!(FV, u, pdata)
Residual using sparse matrix-vector multiplication
"""
function pdeF!(FV, u, pdata)
D2 = pdata.D2
CV = pdata.CV
rhs = pdata.RHS
p1 = pdata.jvect1
# FV .= D2 * u + 20.0 * u .* (CV * u) - rhs
# FV .= D2*u
# p1 .= CV*u
mul!(FV, D2, u)
mul!(p1, CV, u)
p1 .*= 20.0
p1 .*= u
FV .+= p1
FV .-= rhs
end
"""
pdeJ!(FP, F, u, pdata)
Sparse matrix Jacobian. The package does not do its own sparse
differencing. The Jacobian for this problem is easy enough to
compute analytically.
"""
function pdeJ!(FP, F, u, pdata)
D2 = pdata.D2
CV = pdata.CV
CT = pdata.CT
cu = CV * u
#DC=spdiagm(0 => 20*cu); DU=spdiagm(0 => 20*u)
DC = Diagonal(20 * cu)
DU = Diagonal(20 * u)
#
# The easy way to compute the Jacobian is
#FP .= D2 + DU*CV + DC
# but you allocate yourself silly with that one.
# So we preallocate room for DU*CV in CT and sum the terms for FP
# one at a time. I have to use Diagonal instead of spdiagm if I want
# mul! to work fast.
#
FP .= D2
FP .+= DC
mul!(CT, DU, CV)
#CT .= CV; lmul!(DU,CT);
FP .+= CT
# I should be able to do mul!(FP,DU,CV), but it's 1000s of times slower.
end
"""
Jvec2d(v, FS, u, pdata)
Analytic Jacobian-vector product for PDE example
"""
function Jvec2d(v, FS, u, pdata)
D2 = pdata.D2
CV = pdata.CV
CT = pdata.CT
jvec = pdata.jvec
p1 = pdata.jvect1
# jvec .= D2 * v
# p1 .= CV * u
mul!(jvec, D2, v)
mul!(p1, CV, u)
p1 .*= 20.0
p1 .*= v
jvec .+= p1
# p1 .= CV * v
mul!(p1, CV, v)
p1 .*= 20.0
p1 .*= u
jvec .+= p1
return jvec
end
"""
hardleft!(FV, u, pdata)
Convection-diffusion equation with left preconditioning hard-wired in
"""
function hardleft!(FV, u, pdata)
fdata = pdata.fdata
# Call the nonlinear function
FV = pdeF!(FV, u, pdata)
# and apply the preconditioner.
FV .= Pfish2d(FV, fdata)
return FV
end
"""
hardleftFix!(FV, u, pdata)
Fixed point form of the left preconditioned nonlinear
convection-diffusion equation
"""
function hardleftFix!(FV, u, pdata)
FV = hardleft!(FV, u, pdata)
# G(u) = u - FV
axpby!(1.0, u, -1.0, FV)
return FV
end
"""
pdeinit(n)
collects the precomputed data for the elliptic pde example. This
includes
- the sparse matrix representation of the operators,
- the right side of the equation,
- the exact solution,
- the data that the fft-based fast Poisson solver (fish2d) needs
"""
function pdeinit(n)
# Make the grids
n2 = n * n
h = 1.0 / (n + 1.0)
x = collect(h:h:1.0-h)
# collect the operators
D2 = Lap2d(n)
DX = Dx2d(n)
DY = Dy2d(n)
CV = (DX + DY)
# I need a spare sparse matrix to save allocations in the Jacobian computation
CT = copy(CV)
# Exact solution and its derivatives
uexact = solexact(x)
dxe = dxexact(x)
dye = dyexact(x)
d2e = l2dexact(x)
dxv = reshape(dxe, n2)
dyv = reshape(dye, n2)
d2v = reshape(d2e, n2)
uv = reshape(uexact, n2)
fdata = fishinit(n)
# The right side of the equation
RHS = d2v + 20.0 * uv .* (dxv + dyv)
# preallocate a few vectors
jvec = zeros(n2)
jvect1 = zeros(n2)
# Pack it and ship it.
pdedata =
(D2 = D2, CV = CV, CT = CT, RHS = RHS, jvec, jvect1, fdata = fdata, uexact = uexact)
end
"""
This collection of functions
builds u, u_x, u_y, and the negative Laplacian for the
example problem in the book. Here
u(x,y) = 10 x y (1-x)(1-y) exp(x^4.5)
which is the example from FA01.
"""
function w(x)
w = 10.0 * x .* (1.0 .- x) .* exp.(x .^ (4.5))
end
function wx(x)
wx = 4.5 * (x .^ (3.5)) .* w(x) + 10.0 * exp.(x .^ (4.5)) .* (1.0 .- 2.0 * x)
end
function wxx(x)
wxx =
(4.5 * 3.5) * (x .^ (2.5)) .* w(x) +
4.5 * (x .^ (3.5)) .* wx(x) +
+10.0 * 4.5 * (x .^ (3.5)) .* exp.(x .^ (4.5)) .* (1.0 .- 2.0 * x) +
-20.0 * exp.(x .^ (4.5))
end
function v(x)
v = x .* (1.0 .- x)
end
function vx(x)
vx = 1.0 .- 2.0 * x
end
function vxx(x)
vxx = -2.0 * ones(size(x))
end
function solexact(x)
solexact = w(x) * v(x)'
end
function l2dexact(x)
l2dexact = -(w(x) * vxx(x)') - (wxx(x) * v(x)')
end
function dxexact(x)
dxexact = wx(x) * v(x)'
end
function dyexact(x)
dxexact = w(x) * vx(x)'
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2213 | """
FBeam!(FV, U, bdata)
Function evaluation for PTC example.
F(u) = -u'' - lambda sin(u)
"""
function FBeam!(FV, U, bdata)
D2 = bdata.D2
lambda = bdata.lambda
su = lambda * sin.(U)
FV .= (D2 * U - su)
#
# The return FV is important.
#
return FV
end
"""
BeamJ!(FP,FV,U,bdata)
Jacobian for the beam problem
F(u) = -u'' - lambda sin(u)
so
F'(u) w = D2 w - lambda cos(u) w
"""
function BeamJ!(FP, FV, U, bdata)
D2 = bdata.D2
lambda = bdata.lambda
cu = lambda * cos.(U)
CU = Diagonal(cu)
# n = length(U)
# zr = zeros(n - 1)
FP .= D2 - CU
#
# The return FP is important.
#
return FP
end
"""
BeamtdJ!(FP, FV, U, bdata)
Jacobian evaluation for the time-dependent beam problem.
If F^n(w) = w - u_n + dt F(w) = 0 then
F^n(w)' = I + dt F'(w)
"""
function BeamtdJ!(FP, FV, U, bdata)
FP .= BeamJ!(FP, FV, U, bdata)
dt = bdata.dt
FP .= I + dt * FP
end
"""
FBeamtd!(FV, U, bdata)
Function evaluation for the time-dependent beam problem.
The implicit Euler step for u_t = - F(u) is
u_{n+1} = u_n - dt F(u_{n+1})
so the nonlinear equation is
F^n(w) = w - u_n + dt F(w) = 0
"""
function FBeamtd!(FV, U, bdata)
un = bdata.UN
dt = bdata.dt
FV .= FBeam!(FV, U, bdata)
dU = U - un
FV .= dU + dt * FV
#axpby!(1.0,dU,dt,FV)
end
"""
beaminit(n,dt,lambda=20.0)
Set up the beam problem with n interior grid points.
dt is only needed for the temporal integration examples
"""
function beaminit(n, dt, lambda = 20.0)
#
# deltaval is a place to store the current pseudo-time step. I need this
# for preconditioning. You MUST have this in your pdata because ptcsoli
# writes to it: pdata.deltaval[1]=delta
# So it has to be there in exactly this way.
#
deltaval = zeros(1)
D2 = Lap1d(n)
dx = 1.0 / (n + 1)
x = collect(dx:dx:1.0-dx)
UN = zeros(size(x))
bdata =
(D2 = D2, x = x, dx = dx, dt = dt, lambda = lambda, UN = UN, deltaval = deltaval)
end
"""
Lap1d(n)
returns -d^2/dx^2 on [0,1] zero BC
"""
function Lap1d(n; beam=false)
dx = 1 / (n + 1)
d = 2.0 * ones(n)
sup = -ones(n - 1)
D2 = SymTridiagonal(d, sup)
D2 ./= (dx*dx)
return D2
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3984 | """
Fbvp! and Jbvp! are the function and Jacobian evaluations for the
boundary value problem example in Chapter 2.
The Jacobian is banded and I've padded the storage so I can use lu! or qr!
for the linear solver. You need to be careful about this and RTFM (ie the
LAPACK or LINPACK) manual to get the padding right. The short story is
that if your upper/lower bandwidths are lu/ll, then you must store in
in a matrix with bandwidts lu+2/ll with zeros in the unused bands.
That aside, there is not much here that I did not explain in the book.
"""
function Fbvp!(FV, U, bdata)
n2 = length(U)
n = bdata.n
n2 == 2n || error("dimension error in Fbvp")
force = bdata.force
tv = bdata.tv
tvdag = bdata.tvdag
h = bdata.h
FV[1] = U[2]
FV[2n] = U[2n-1]
v = view(U, 1:2:2n-1)
vp = view(U, 2:2:2n)
force .= Phi.(tv, tvdag, vp, v)
h2 = 0.5 * h
@inbounds @simd for ip = 1:n-1
FV[2*ip+1] = v[ip+1] - v[ip] - h2 * (vp[ip] + vp[ip+1])
FV[2*ip] = vp[ip+1] - vp[ip] + h2 * (force[ip] + force[ip+1])
end
return FV
end
function Jbvp!(FVP, FV, x, bdata)
n = bdata.n
tv = bdata.tv
tvdag = bdata.tvdag
h = bdata.h
h2 = h * 0.5
zdat = bdata.zdat
DiagFP = bdata.DiagFP
jacinit!(FVP, DiagFP)
#
# Using @view to avoid allocations. Build the vector I'll
# need to populate the Jacobian.
#
# @views zdat[1:n] .= (h .* tv[1:n] .* x[1:2:2n-1] .- h2)
@views zdat .= (h .* tv[1:n] .* x[1:2:2n-1] .- h2)
#
# The diagnd function gets the the diagonals so I can populate
# them without allocations.
#
nup = diagind(FVP, 1)
FUP = view(FVP, nup)
@views FUP[2:2:2n-2] .= zdat[2:n]
ndown = diagind(FVP, -1)
FDOWN = view(FVP, ndown)
@views FDOWN[1:2:2n-3] .= zdat[1:n-1]
return FVP
end
function Phi(t, tdag, vp, v)
phi = 4.0 * tdag * vp + (t * v - 1.0) * v
return phi
end
function bvpinit(n, T = Float64)
#
# Allocate space for the Jacobian, compute the parts of the Jacobian
# that don't depend on the iteration, and store a few vectors.
#
h = 20.0 / (n - 1)
h2 = h * 0.5
tv = collect(0:h:20.0)
tvdag = collect(0:h:20.0)
@views tvdag[2:n] .= (1.0 ./ tv[2:n])
force = zeros(n)
D = ones(T, 2n)
D[1] = 0.0
D[2n] = 0.0
h4 = 4 * h2
@views D[2:2:2n-2] .= (-1 .+ h4 .* tvdag[1:n-1])
D1 = zeros(T, 2n - 1)
D1[1] = 1.0
# @views D1[3:2:2n-1] .= -h2
view(D1, 3:2:2n-1) .= -h2
Dm1 = zeros(T, 2n - 1)
view(Dm1, 2:2:2n-2) .= -h2
Dm1[2n-1] = 1.0
# @views Dm1[2:2:2n-2] .= -h2
Dm2 = zeros(T, 2n - 2)
view(Dm2, 1:2:2n-3) .= -1.0
# @views Dm2[1:2:2n-3] .= -1.0
D2 = zeros(T, 2n - 2)
@views D2[2:2:2n-2] .= (1.0 .+ h4 .* tvdag[2:n])
#
# The bandwidths are lu=ll=2, so my padded matrix gets lu=4.
# Allocate the storage and precompute the bands that don't change.
#
# DiagFP = (Dm2 = Dm2, Dm1 = Dm1, D = D, D1 = D1, D2 = D2)
DiagFP = [Dm2, Dm1, D, D1, D2]
zdat = zeros(T, n)
return (
h = h,
tv = tv,
force = force,
tvdag = tvdag,
zdat = zdat,
n = n,
DiagFP = DiagFP,
)
end
function jacinit!(FVP, DiagFP)
#
# Fill the unused padding bands with zeros.
#
for ip = 3:4
# view(FVP,band(ip)) .= 0.0
FVP[band(ip)] .= 0.0
end
#
# Get the bands you've computed.
# Put the good bands in the right place.
#
for ip = -2:2
ib = ip + 3
FVP[band(ip)] .= DiagFP[ib]
end
# FVP[band(-2)] .= DiagFP.Dm2
# FVP[band(-1)] .= DiagFP.Dm1
# FVP[band(0)] .= DiagFP.D
# FVP[band(1)] .= DiagFP.D1
# FVP[band(2)] .= DiagFP.D2
# view(FVP,band(-2)) .= DiagFP.Dm2
# view(FVP,band(-1)) .= DiagFP.Dm1
# view(FVP,band(0)) .= DiagFP.D
# view(FVP,band(1)) .= DiagFP.D1
# view(FVP,band(2)) .= DiagFP.D2
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4669 | """
Hequation.jl
This file contains the function/Jacobian evaluations for
the Chandrasekhar H-equation examples and everything you should
need to run them.
If you only want to run the examples, you should not have to look
at the code.
"""
### And now for the functions ...
"""
function heqJ!(FP,F,x,pdata)
The is the Jacobian evaluation playing by nsol rules. The
precomputed data is a big deal for this one.
"""
function heqJ!(FP::Array{T,2}, F, x, pdata) where {T<:Real}
pseed = pdata.pseed
mu = pdata.mu
n = length(x)
#
# Look at the formula in the notebook and you'll see what I did here.
#
pmu = pdata.pmu
Gfix = pdata.gtmp
@views Gfix .= x - F
@views Gfix .= -(Gfix .* Gfix .* pmu)
@views @inbounds for jfp = 1:n
FP[:, jfp] .= Gfix[:, 1] .* pseed[jfp:jfp+n-1]
FP[jfp, jfp] = 1.0 + FP[jfp, jfp]
end
return FP
end
"""
heqf!(F,x,pdata)
The function evaluation as per nsold rules.
The precomputed data is a big deal for this example. In particular,
the output pdata.FFB from plan_fft! goes to the fixed point map
computation. Things get very slow if you do not use plan_fft or plan_fft!
"""
function heqf!(F, x, pdata)
HeqFix!(F, x, pdata)
#
# naked BLAS call to fix the allocation blues
#
# Using any variation of F.=x-F really hurts
#
axpby!(1.0, x, -1.0, F)
return F
end
"""
function HeqFix!(Gfix,x,pdata)
The fixed point map. Gfix goes directly into the function and
Jacobian evaluations for the nonlinear equations formulation.
The precomputed data is a big deal for this example. In particular,
the output pdata.FFA from plan_fft goes to the fixed point map
computation. Things get very slow if you do not use plan_fft.
"""
function HeqFix!(Gfix, x, pdata)
n = length(x)
Gfix .= x
heq_hankel!(Gfix, pdata)
Gfix .*= pdata.pmu
Gfix .= 1.0 ./ (1.0 .- Gfix)
end
"""
heqinit(x0::Array{T,1}, c) where T :< Real
Initialize H-equation precomputed data.
"""
function heqinit(x0::Array{T,1}, c) where {T<:Real}
(c > 0) || error("You can't set c to zero.")
n = length(x0)
cval = ones(1)
cval[1] = c
vsize = (n)
bsize = (2 * n,)
ssize = (2 * n - 1,)
FFA = plan_fft(ones(bsize))
mu = collect(0.5:1:n-0.5)
pmu = mu * c
mu = mu / n
hseed = zeros(ssize)
for is = 1:2*n-1
hseed[is] = 1.0 / is
end
hseed = (0.5 / n) * hseed
pseed = hseed
gtmp = zeros(vsize)
rstore = zeros(bsize)
zstore = zeros(bsize) * (1.0 + im)
hankel = zeros(bsize) * (1.0 + im)
FFB = plan_fft!(zstore)
bigseed = zeros(bsize)
@views bigseed .= [hseed[n:2*n-1]; 0; hseed[1:n-1]]
@views hankel .= conj(FFA * bigseed)
return (
cval = cval,
mu = mu,
hseed = hseed,
pseed = pseed,
gtmp = gtmp,
pmu = pmu,
rstore = rstore,
zstore = zstore,
hankel = hankel,
FFB = FFB,
)
end
"""
setc!(pdata, cin)
If you are varying c in a computation, this function
lets you set it.
But! You can't set c to zero.
"""
function setc!(pdata, cin)
(cin > 0) || error("You can't set c to zero")
c = pdata.cval[1]
cfix = cin / c
pdata.pmu .*= cfix
pdata.cval[1] = cin
end
"""
heq_hankel!(b,pdata)
Multiply an nxn Hankel matrix with seed in R^(2N-1) by a vector b
FFA is what you get with plan_fft before you start computing
"""
function heq_hankel!(b, pdata)
reverse!(b)
heq_toeplitz!(b, pdata)
end
"""
heq_toeplitz!(b,pdata)
Multiply an nxn Toeplitz matrix with seed in R^(2n-1) by a vector b
"""
function heq_toeplitz!(b, pdata)
n = length(b)
y = pdata.rstore
y .*= 0.0
@views y[1:n] = b
heq_cprod!(y, pdata)
@views b .= y[1:n]
end
"""
heq_cprod!(b,pdata)
Circulant matrix-vector product with FFT
compute u = C b
Using in-place FFT
"""
function heq_cprod!(b, pdata)
xb = pdata.zstore
xb .*= 0.0
xb .+= b
pdata.FFB \ xb
hankel = pdata.hankel
xb .*= hankel
pdata.FFB * xb
b .= real.(xb)
end
"""
Alternative formulation for CI. Tuned to match the paper
author="P. B. Bosma and W. A. DeRooij",
title="Efficient Methods to Calculate Chandrasekhar's H-functions",
journal="Astron. Astrophys.",
volume=126,
year=1983,
pages=283
"""
function heqbos!(F, x, pdata)
c = pdata
n = length(x)
mu = 0.5:1:n-0.5
mu = mu / n
h = 1.0 / n
cval = sqrt(1.0 - c)
A = zeros(n, n)
for j = 1:n
for i = 1:n
A[i, j] = mu[j] / (mu[i] + mu[j])
end
end
A = (c / 2) * h * A
F .= (A * x)
for ig = 1:n
F[ig] = 1.0 / (cval + F[ig])
end
F .= x - F
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3341 | """
PDE_Tools
This file has the operators I need for the PDE example. They
live in a separate file to make the CI easier for me to organize.
"""
# Famous sparse matrices
"""
Dx2d(n)
returns x partial on n x n grid.
Unit square, homogeneous Dirichlet BC
"""
function Dx2d(n)
h = 1 / (n + 1)
ssdiag = ones(n^2 - 1) / (2 * h)
for iz = n:n:n^2-1
ssdiag[iz] = 0.0
end
updiag = Pair(1, ssdiag)
lowdiag = Pair(-1, -ssdiag)
Dx = spdiagm(lowdiag, updiag)
return Dx
end
"""
Dy2d(n)
returns y partial on n x n grid.
Unit square, homogeneous Dirichlet BC
"""
function Dy2d(n)
h = 1 / (n + 1)
ssdiag = ones(n^2 - n) / (2 * h)
updiag = Pair(n, ssdiag)
lowdiag = Pair(-n, -ssdiag)
Dy = spdiagm(lowdiag, updiag)
return Dy
end
"""
Lap2d(n)
returns the negative Laplacian in two space dimensions
on n x n grid.
Unit square, homogeneous Dirichlet BC
"""
function Lap2d(n)
# hm2=1/h^2
hm2 = (n + 1.0)^2
maindiag = fill(4 * hm2, (n^2,))
sxdiag = fill(-hm2, (n^2 - 1,))
sydiag = fill(-hm2, (n^2 - n,))
for iz = n:n:n^2-1
sxdiag[iz] = 0.0
end
D2 = spdiagm(-n => sydiag, -1 => sxdiag, 0 => maindiag, 1 => sxdiag, n => sydiag)
return D2
end
"""
u=fish2d(f, fdata)
Fast Poisson solver in two space dimensions.
Same as the Matlab code.
Unit square + homogeneous Dirichlet BCs.
Grid is nx by nx
You give me f as a two-dimensional vector f(x,y).
I return the solution u.
"""
function fish2d(f, fdata)
u = fdata.utmp
v = fdata.uhat
T = fdata.T
ST = fdata.ST
(nx, ny) = size(f)
nx == ny || error("need a square grid in fish2d")
u .= f
u = ST * u
u = u'
u1 = reshape(u, (nx * nx,))
v1 = reshape(v, (nx * nx,))
v1 .= u1
ldiv!(u1, T, v1)
u = u'
u .= ST * u
u ./= (2 * nx + 2)
return u
end
"""
fishinit(n)
Run FFTW.plan_r2r to set up the solver. Do not mess
with this function.
"""
function fishinit(n)
#
# Get the sine transform from FFTW. This is faster/better/cleaner
# than what I did in the Matlab codes.
#
zstore = zeros(n, n)
ST = FFTW.plan_r2r!(zstore, FFTW.RODFT00, 1)
uhat = zeros(n, n)
fishu = zeros(n, n)
TD = newT(n)
T = lu!(TD)
fdata = (ST = ST, uhat = uhat, utmp = zstore, T = T, fishu = fishu)
return fdata
end
"""
T = newT(n)
Builds the n^2 x n^2 sparse tridiagonal matrix for
the 2D fast Poisson solver.
"""
function newT(n)
N = n * n
h = 1 / (n + 1)
x = h:h:1-h
h2 = 1 / (h * h)
LE = 2 * (2 .- cos.(pi * x)) * h2
fn = ones(N - 1) * h2
gn = ones(N - 1) * h2
dx = zeros(N)
for k = 1:n-1
fn[k*n] = 0.0
gn[k*n] = 0.0
dx[(k-1)*n+1:n*k] = LE[k] * ones(n)
end
dx[(n-1)*n+1:n*n] = LE[n] * ones(n)
T = Tridiagonal(-fn, dx, -gn)
return T
end
"""
Use fish2d and reshape for preconditioning.
"""
function Pfish2d(v, fdata)
n2 = length(v)
n = Int(sqrt(n2))
(n * n == n2) || error("input to Pfish2d not a square array")
v2 = reshape(v, (n, n))
u = fish2d(v2, fdata)
u = reshape(u, (n2,))
return u
end
"""
Pvec2d(v, u, pdata)
Returns inverse Laplacian * v
u is a dummy argument to make nsoli happy
Preconditioner for nsoli
"""
function Pvec2d(v, u, pdata)
fdata = pdata.fdata
p = Pfish2d(v, fdata)
return p
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 859 | """
simple!(FV,x)
This is the function for Figure 2.1
It also shows up in CI
"""
function simple!(FV, x)
FV[1] = x[1] * x[1] + x[2] * x[2] - 2.0
FV[2] = exp(x[1] - 1) + x[2] * x[2] - 2.0
#
# The return FV is important
#
return FV
end
function jsimple!(JacV, FV, x)
JacV[1, 1] = 2.0 * x[1]
JacV[1, 2] = 2.0 * x[2]
JacV[2, 1] = exp(x[1] - 1)
JacV[2, 2] = 2 * x[2]
#
# The return JacV is important
#
return JacV
end
"""
JVsimple(v, FV, x)
Jacobian-vector product for simple!. There is, of course, no reason
to use Newton-Krylov for this problem other than CI or demonstrating
how to call nsoli.jl.
"""
function JVsimple(v, FV, x)
jvec = zeros(2)
jvec[1] = 2.0 * x' * v
jvec[2] = v[1] * exp(x[1] - 1.0) + 2.0 * v[2] * x[2]
#
# The return jvec is important
#
return jvec
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2922 | #
# Keep the books for aasol
#
mutable struct ItStatsA{T<:Real}
condhist::Array{T,1}
alphanorm::Array{T,1}
history::Array{T,1}
end
function ItStatsA(rnorm)
ItStatsA([1.0], [1.0], [rnorm])
end
#
# Collect the stats at the end of the iteration
#
function CollectStats(ItData::ItStatsA)
stats = (condhist = ItData.condhist[2:end], alphanorm = ItData.alphanorm[2:end])
return stats
end
function updateStats!(ItData::ItStatsA, condhist, alphanorm)
append!(ItData.condhist, condhist)
append!(ItData.alphanorm, alphanorm)
end
function updateHist!(ItData::ItStatsA, rnorm)
append!(ItData.history, rnorm)
end
#
# Initialize Anderson iteration
#
function Anderson_Init(x0, Vstore, m, maxit, beta, keepsolhist)
blocksize = 1024
(0.0 < abs(beta) <= 1) || error("abs(beta) must be in (0,1]")
sol = copy(x0)
n = length(x0)
(mv, nv) = size(Vstore)
mv == n || error("Vstore needs ", n, " rows")
(nv >= 2 * (m + 1)) || error("Vstore needs ", 2 * m + 4, " columns")
#
# Just in case you are reusing Vstore for several problems, I will
# reinitialize it to zero.
#
Vstore .= 0.0
if m == 0
Qd = []
QP = []
DG = []
nvblock = 1
else
QP = @views Vstore[:, 1:m]
DG = @views Vstore[:, m+1:2*m]
if (nv >= 3 * m + 3)
(Qd = @views Vstore[:, 2*m+1:3*m-1])
nvblock = 3 * m
else
@warn "Low storage mode"
Qd = zeros(blocksize, m - 1)
nvblock = 2 * m + 1
end
end
gx = Anderson_vector_Init(Vstore, nvblock)
df = Anderson_vector_Init(Vstore, nvblock + 1)
dg = Anderson_vector_Init(Vstore, nvblock + 2)
res = Anderson_vector_Init(Vstore, nvblock + 3)
keepsolhist ? (solhist = solhistinit(n, maxit, sol)) : (solhist = [])
return (sol, gx, df, dg, res, DG, QP, Qd, solhist)
end
function Anderson_vector_Init(Vstore, nvblock)
gx = @views Vstore[:, nvblock]
return gx
end
#
# Figure out what idid and errcode are. Boring but must be done.
# This is a lot simpler than for Newton. There are no linesearches
# or Krylov iterations to keep track of.
#
function AndersonOK(resnorm, tol, k, m, toosoon, resnorm_up_bd)
idid = (resnorm <= tol)
idid ? (errcode = 0) : (errcode = 10)
nottoobig = (resnorm < resnorm_up_bd)
nottoobig || (errcode = -2; println("Diverging for m=$m in aasol.jl."))
toosoon && (errcode = -1)
(idid || ~nottoobig) ||
println("Failure to converge after $k iterations for m=$m in aasol.jl")
toosoon && println("Iteration terminates on entry to aasol.jl")
return (idid, errcode)
end
"""
falpha(alpha,theta,mk)
Map thetas to alphas for stats
"""
function falpha(alpha, theta, mk)
alpha[1] = theta[1]
for ia = 2:mk
alpha[ia] = theta[ia] - theta[ia-1]
end
alpha[mk+1] = 1.0 - theta[mk]
return norm(alpha, 1)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 995 | #
# The functions in this file look at the status of the iteration at the
# end and set idid and errcode.
#
# Nothing exciting in here, but it must be done.
#
"""
NewtonOK: Figure out idid and errcode for Newton's method
"""
function NewtonOK(resnorm, iline, tol, toosoon, itc, ItRules)
maxit = ItRules.maxit
armmax = ItRules.armmax
printerr = ItRules.printerr
resfail = (resnorm > tol)
idid = ~(resfail || toosoon)
errcode = 0
if ~idid
errcode =
NewtonError(resfail, iline, resnorm, toosoon, tol, itc, maxit, armmax, printerr)
end
return (idid, errcode)
end
"""
PTCOK: Figure out idid and errcode
"""
function PTCOK(resnorm, tol, toosoon, ItRules, printerr)
maxit = ItRules.maxit
delta0 = ItRules.delta0
errcode = 0
resfail = (resnorm > tol)
idid = ~(resfail || toosoon)
errcode = 0
if ~idid
(errcode = PTCError(resnorm, maxit, delta0, toosoon, tol, printerr))
end
return (idid, errcode)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 6611 | #
# The functions in this file manage Jacobian evaluations and
# factorizations and function evaluations. The function evaluation bits
# are used in the Newton-Krylov solvers too.
#
"""
For nsoli I use
PrepareJac!(FPS, FS, x, ItRules)
and for ptcsoli
PrepareJac!(FPS, FS, x, ItRules, dt)
Compute the Jacobian and perform the factorization. If know something
about the Jacobian, you can tell me what factorization to use.
For example, if your Jacobian is spd, fact=cholesky! would work well.
"""
function PrepareJac!(FPS, FS, x, ItRules)
F! = ItRules.f
J! = ItRules.fp
dx = ItRules.dx
fact = ItRules.fact
pdata = ItRules.pdata
EvalJ!(FPS, FS, x, F!, J!, dx, pdata)
TF = fact(FPS)
return TF
end
function PrepareJac!(FPS, FS, x, ItRules, dt)
dt > 0 || error("dt must be > 0 in PTC")
F! = ItRules.f
J! = ItRules.fp
dx = ItRules.dx
fact = ItRules.fact
jknowsdt = ItRules.jknowsdt
pdata = ItRules.pdata
EvalJ!(FPS, FS, x, F!, J!, dx, dt, pdata, jknowsdt)
TF = fact(FPS)
return TF
end
"""
PrepareJac!(fc, fm::Real, x, xm, ItRules, dt=0)
Scalar equations
"""
function PrepareJac!(fps::Real, fc, x, ItRules, dt = 0)
newjac = 0
newfun = 0
fp = ItRules.fp
f = ItRules.f
dx = ItRules.dx
pdata = ItRules.pdata
solver = ItRules.solver
df = fpeval_newton(x, f, fc, fp, dx, pdata)
dt == 0 || (df += 1.0 / dt)
newjac = newjac + 1
return df
end
"""
klfact(A)
Returns the default choice for the factorization unless you tell
me to do something else. QR is the default choice for banded because
that works properly with Float32.
"""
function klfact(A::Array{T,2}) where {T<:Real}
TF = lu!(A)
end
# The default for sparse is lu. lu! for sparse matrices is
# too complicated to put in here. You can use lu! if you
# set fact = nofact and manage the factorization in your Jacobian
# evaluation code. You'll also get to manage the storage. There's
# a project in chapter 2 about that.
#
function klfact(A::SparseMatrixCSC{Float64,Int64})
TF = lu(A)
end
# The default for banded matrices is qr, because I do not trust
# you to allocate the extra two upper bands so I cannot use qr!.
# I'm using qr! in the example in Chapter 2. Look at the source
# to see how I did that.
#
function klfact(A::BandedMatrix)
TF = qr(A)
end
# Default: do nothing.
function klfact(A)
TF = nofact(A)
end
function nofact(A)
TF = A
end
"""
EvalF!(F!, FS, x, pdata)
This is a wrapper for the function evaluation that figures out if
you are using precomputed data or not. No reason to get excited
about this.
"""
function EvalF!(F!, FS, x, q::Nothing)
FS = F!(FS, x)
return FS
end
function EvalF!(F!, FS, x, pdata)
FS = F!(FS, x, pdata)
return FS
end
function EvalF!(F!, FS::Real, x::Real, q::Nothing)
FS = F!(x)
return FS
end
function EvalF!(F!, FS::Real, x::Real, pdata)
FS = F!(x, pdata)
return FS
end
"""
If you let me handle dt in PTC
JV!(FPS, FS, x, J!, pdata)
If you put the (1/dt) * I in the Jacobian yourself
JV!(FPS, FS, x, dt, J!, pdata)
This is a wrapper for the Jacobian evaluation that figures out if
you are using precomputed data or not. No reason to get excited
about this.
"""
function JV!(FPS, FS, x, J!, pdata)
J!(FPS, FS, x, pdata)
end
function JV!(FPS, FS, x, dt, J!, pdata)
J!(FPS, FS, x, dt, pdata)
end
function JV!(FPS, FS, x, dt, J!, q::Nothing)
J!(FPS, FS, x, dt)
end
function JV!(FPS, FS, x, J!, q::Nothing)
J!(FPS, FS, x)
end
"""
for Newton
EvalJ!(FPS, FS, x, F!, J!, dx, pdata)
for PTC
EvalJ!(FPS, FS, x, F!, J!, dx, dt, pdata)
evaluates the Jacobian before the factorization in PrepareJac!
"""
function EvalJ!(FPS, FS, x, F!, J!, dx, dt, pdata, jknowsdt)
# if J! != diffjac!
# JV!(FPS, FS, x, J!, pdata)
# else
# diffjac!(FPS, FS, F!, x, dx, pdata)
# end
if jknowsdt
FPS = JV!(FPS, FS, x, dt, J!, pdata)
else
EvalJ!(FPS, FS, x, F!, J!, dx, pdata)
FPS .= FPS + (1.0 / dt) * I
end
return FPS
end
function EvalJ!(FPS, FS, x, F!, J!, dx, pdata)
if J! != diffjac!
JV!(FPS, FS, x, J!, pdata)
else
diffjac!(FPS, FS, F!, x, dx, pdata)
end
return FPS
end
"""
diffjac!(FPS::Array{T,2}, FS, F!, x, dx, pdata) where T <: Real
Computes a finite-difference dense and unstructured Jacobian.
This is not something an user wants to mess with. Look at the
docstrings to nsold to see more details.
Nothing much to see here. Move along.
"""
#function diffjac!(FPS::Array{T,2}, FS, F!, x, dx, pdata) where {T<:Real}
function diffjac!(FPS, FS, F!, x, dx, pdata)
h = dx * norm(x, Inf) + 1.e-8
n = length(x)
y = ones(size(x))
FY = ones(size(x))
for ic = 1:n
y .= x
y[ic] = y[ic] + h
EvalF!(F!, FY, y, pdata)
for ir = 1:n
FPS[ir, ic] = (FY[ir] - FS[ir]) / h
end
end
return FPS
end
"""
UpdateIteration
Take a trial step. Evaluate the function and the residual norm.
"""
function UpdateIteration(xt::Array{T}, x, FS, lambda, step, ItRules) where {T<:Real}
F! = ItRules.f
pdata = ItRules.pdata
copy!(xt, x)
BLAS.axpy!(lambda, step, xt)
EvalF!(F!, FS, xt, pdata)
resnorm = norm(FS)
return (xt, FS, resnorm)
end
function UpdateIteration(xt::T, xm, ft, lambda, d, ItRules) where {T<:Real}
f = ItRules.f
pdata = ItRules.pdata
xt = xm + lambda * d
fc = 0.0
fc = EvalF!(f, fc, xt, pdata)
#fc = f(xt)
residc = norm(fc)
return (xt, fc, residc)
end
"""
fpeval_newton
Evaluates f' by differences or the user's code.
"""
function fpeval_newton(x, f, fc, fp, h, pdata)
fps = string(fp)
df = 0.0
dps = string(difffp)
if fps == dps
df = difffp(x, f, fc, h, pdata)
else
df = EvalF!(fp, df, x, pdata)
end
return df
end
"""
difffp
forward differencing for scalar equations
"""
function difffp(x, f, fc, h, pdata)
fph = 0.0
fph = EvalF!(f, fph, x + h, pdata)
df = (fph - fc) / h
# df = (f(x + h) - fc) / h
return df
end
"""
test_evaljac(ItRules, itc, newiarm, residratio)
Figures out if it's time to reevaluate and refacto the Jacbian in
Newton's method.
"""
function test_evaljac(ItRules, itc, newiarm, residratio)
solver = ItRules.solver
sham = ItRules.sham
resdec = ItRules.resdec
evaljacit = (itc % sham == 0 || newiarm > 0 || residratio > resdec)
chordinit = (solver == "chord") && itc == 0
evaljac = (evaljacit && solver == "newton") || chordinit || solver == "secant"
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3762 | #
# The functions in this file initialize the iterations
#
"""
Newtoninit: set up Newton's method
"""
function Newtoninit(
x0,
dx,
F!,
J!,
solver,
sham,
armmax,
armfix,
resdec,
maxit,
printerr,
pdata,
jfact,
keepsolhist,
)
#
# Initialize the iteration.
#
n = length(x0)
x = copy(x0)
keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
ItRules = (
dx = dx,
f = F!,
fp = J!,
solver = solver,
sham = sham,
armmax = armmax,
armfix = armfix,
resdec = resdec,
maxit = maxit,
printerr = printerr,
pdata = pdata,
fact = jfact,
)
return (ItRules, x, n, solhist)
end
"""
Secantinit(x0, dx, f, solver,
armmax, armfix, maxit, printerr, pdata, jfact)
"""
function Secantinit(x0, dx, f, solver, armmax, armfix, maxit, printerr, pdata, jfact)
n = length(x0)
x = copy(x0)
ItRules = (
f = f,
solver = solver,
armmax = armmax,
armfix = armfix,
maxit = maxit,
printerr = printerr,
pdata = pdata,
fact = jfact,
)
return (ItRules, x, n)
end
"""
PTCinit(x0, dx, F!, J!, delta0, maxit, pdata, jfact, keepsolhist)
PTCinit: get organized for PTC
"""
function PTCinit(x0, dx, F!, J!, delta0, maxit, pdata, jfact, keepsolhist, jknowsdt = false)
#
# Initialize the iteration.
#
n = length(x0)
x = copy(x0)
keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
ItRules = (
dx = dx,
f = F!,
fp = J!,
delta0 = delta0,
maxit = maxit,
pdata = pdata,
fact = jfact,
jknowsdt = jknowsdt,
)
return (ItRules, x, n, solhist)
end
"""
Newton_Krylov_Init( x0, dx, F!, Jvec, Pvec, pside, lsolver, eta,
fixedeta, armmax, armfix, maxit, lmaxit, printerr, pdata, u
Krylov_Data, keepsolhist)
Newton_Krylov_Init: set up nsoli
"""
function Newton_Krylov_Init(
x0,
dx,
F!,
Jvec,
Pvec,
pside,
lsolver,
eta,
fixedeta,
armmax,
armfix,
maxit,
lmaxit,
printerr,
pdata,
Krylov_Data,
keepsolhist,
)
#
# Initialize the iteration.
#
eta > 0 || error("eta must be positive")
n = length(x0)
#
# Not for tourists! You have the opportunity, which you should decline,
# to allocate the internal space for gmres in the call to nsoli. Only
# do this for continuation or IVP integration, if at all. You can break
# stuff with this.
#
if Krylov_Data == nothing
Krylov_Data = nkl_init(n, lsolver)
# kl_store = kstore(n,lsolver)
# knl_store = knlstore(n)
end
kl_store = Krylov_Data.kl_store
knl_store = Krylov_Data.knl_store
x = knl_store.xval
x .= x0
keepsolhist ? (solhist = solhistinit(n, maxit, x)) : (solhist = [])
((lmaxit == -1) && (lsolver == "bicgstab")) && (lmaxit = 5)
ItRules = (
dx = dx,
f = F!,
Jvec = Jvec,
Pvec = Pvec,
pside = pside,
lsolver = lsolver,
kl_store = kl_store,
knl_store = knl_store,
eta = eta,
fixedeta = fixedeta,
lmaxit = lmaxit,
armmax = armmax,
armfix = armfix,
maxit = maxit,
printerr = printerr,
pdata = pdata,
)
return (ItRules, x, n, solhist)
end
"""
solhistinit(n, maxit, x)
Am I keeping the solution history? If so, allocate the space.
"""
function solhistinit(n, maxit, x)
#
# If you are keeping a solution history, make some room for it.
#
solhist = zeros(n, maxit + 1)
@views solhist[:, 1] .= x
return solhist
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3412 | #
# The functions and data structures in this file organize the
# iteration statistics and report the results after the iteration
# is complete
#
#
# Keep the books for nsol and secant
#
mutable struct ItStats{T<:Real}
ifun::Array{Int64,1}
ijac::Array{Int64,1}
iarm::Array{Int64,1}
history::Array{T,1}
end
#
# initfun = 1 unless it's the scalar secant method
# then it's 2
#
function ItStats(hist, initfun = 1)
ItStats([initfun], [0], [0], [hist])
end
function updateStats!(ItData::ItStats, newfun, newjac, AOUT)
newiarm = AOUT.aiarm
newfun = newfun + newiarm + 1
resnorm = AOUT.resnorm
append!(ItData.ifun, newfun)
append!(ItData.ijac, newjac)
append!(ItData.iarm, newiarm)
append!(ItData.history, resnorm)
end
function CollectStats(ItData::ItStats)
stats = (ifun = ItData.ifun, ijac = ItData.ijac, iarm = ItData.iarm)
return stats
end
#
# Keep the books for nsoli
#
mutable struct ItStatsK{T<:Real}
ifun::Array{Int64,1}
ijac::Array{Int64,1}
iarm::Array{Int64,1}
ikfail::Array{Int64,1}
history::Array{T,1}
end
function ItStatsK(hist)
ItStatsK([1], [0], [0], [0], [hist])
end
function updateStats!(ItData::ItStatsK, newfun, newjac, AOUT, newikfail)
newiarm = AOUT.aiarm
newfun = newfun + newiarm + 1
resnorm = AOUT.resnorm
append!(ItData.ifun, newfun)
append!(ItData.ijac, newjac)
append!(ItData.iarm, newiarm)
append!(ItData.ikfail, newikfail)
append!(ItData.history, resnorm)
end
function CollectStats(ItData::ItStatsK)
stats =
(ifun = ItData.ifun, ijac = ItData.ijac, iarm = ItData.iarm, ikfail = ItData.ikfail)
return stats
end
#
# Keep stats for PTC
#
mutable struct ItStatsPTC{T<:Real}
history::Array{T,1}
end
function ItStatsPTC(hist)
ItStatsPTC([hist])
end
function updateStats!(ItData::ItStatsPTC, resnorm)
append!(ItData.history, resnorm)
end
function CollectStats(ItData::ItStatsPTC)
stats = []
return stats
end
#
# Keep the books for PTC-Krylov
#
mutable struct ItStatsPTCK{T<:Real}
ifun::Array{Int64,1}
ijac::Array{Int64,1}
ikfail::Array{Int64,1}
history::Array{T,1}
end
function ItStatsPTCK(hist)
ItStatsPTCK([1], [0], [0], [hist])
end
function updateStats!(ItData::ItStatsPTCK, resnorm, newjac, newikfail)
append!(ItData.history, resnorm)
append!(ItData.ifun, 1)
append!(ItData.ijac, newjac)
append!(ItData.ikfail, newikfail)
end
function CollectStats(ItData::ItStatsPTCK)
stats = (ifun = ItData.ifun, ijac = ItData.ijac, ikfail = ItData.ikfail)
return stats
end
"""
CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist = [])
Collect the solution, function value, iteration stats and send them back.
"""
function CloseIteration(x, FS, ItData, idid, errcode, keepsolhist, solhist = [])
stats = CollectStats(ItData)
ithist = ItData.history
if keepsolhist
sizehist = length(ithist)
return (
solution = x,
functionval = FS,
history = ithist,
stats = stats,
idid = idid,
errcode = errcode,
solhist = solhist[:, 1:sizehist],
)
else
return (
solution = x,
functionval = FS,
history = ithist,
stats = stats,
idid = idid,
errcode = errcode,
)
end
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 5770 | #
# The functions in this file manage the Newton-Krylov step and
# the Jacobian/preconditioner - vector products.
#
"""
Krylov_Step!(step, x, FS, FPS, ItRules, etag, delta = 0)
Take a Newton-Krylov step. This function does lots of its work
mapping nonlinear problems to linear solvers. Only then do I get
to deploy the Krylov linear solvers.
"""
function Krylov_Step!(step, x, FS, FPS, ItRules, etag, delta = 0)
#
# Test for too much, too soon.
#
lsolver = ItRules.lsolver
lmaxit = ItRules.lmaxit
T = eltype(FPS)
kstep_test(FPS, step, lsolver)
Jvec = ItRules.Jvec
Pvec = ItRules.Pvec
kl_store = ItRules.kl_store
knl_store = ItRules.knl_store
pdata = ItRules.pdata
dx = ItRules.dx
f = ItRules.f
fixedeta = ItRules.fixedeta
# s0 = zeros(size(step))
#
# Initial iterate for step is zero.
#
s0 = step
s0 .*= 0.0
side = ItRules.pside
#
# map the Jacobian-vector and preconditioner-vector products
# from nsoli format to what the Krylov solvers want to see
#
kdata = (
pdata = pdata,
dx = dx,
xc = x,
f = f,
FS = FS,
Jvec = Jvec,
Pvec = Pvec,
delta = delta,
knl_store = knl_store,
)
Pvecg = Pvec2
Jvecg = Jvec2
Pvec == nothing && (Pvecg = Pvec)
Jvec == dirder && (Jvecg = Jvec)
#
#RHS=FS
#T == Float64 || (RHS=T.(FS))
if lsolver == "gmres"
kout = kl_gmres(
s0,
FS,
Jvecg,
FPS,
etag,
Pvecg;
kl_store = kl_store,
pdata = kdata,
side = side,
lmaxit = lmaxit,
)
else
kout = kl_bicgstab(
s0,
FS,
Jvecg,
FPS,
etag,
Pvecg;
kl_store = kl_store,
pdata = kdata,
side = side,
lmaxit = lmaxit,
)
end
step .= -kout.sol
reshist = kout.reshist
lits = kout.lits
idid = kout.idid
Lstats = (reshist = reshist, lits = lits, idid = idid)
return (step = step, Lstats = Lstats)
end
function Pvec2(v, kdata)
F = kdata.f
FS = kdata.FS
xc = kdata.xc
PV = kdata.Pvec
pdata = kdata.pdata
ptv = EvalPV(PV, v, xc, pdata)
return ptv
end
function Jvec2(v, kdata)
F = kdata.f
FS = kdata.FS
xc = kdata.xc
JV = kdata.Jvec
delta = kdata.delta
pdata = kdata.pdata
atv = EvalJV(JV, v, FS, xc, delta, pdata)
return atv
end
function EvalPV(PV, v, xc, q::Nothing)
ptv = PV(v, xc)
return ptv
end
function EvalPV(PV, v, xc, pdata)
ptv = PV(v, xc, pdata)
return ptv
end
function EvalJV(JV, v, FS, xc, delta, q::Nothing)
atv = JV(v, FS, xc)
ptcmv!(atv, v, delta)
# if delta > 0
# atv .= atv + (1.0 / delta) * v
# end
return atv
end
function EvalJV(JV, v, FS, xc, delta, pdata)
atv = JV(v, FS, xc, pdata)
ptcmv!(atv, v, delta)
# if delta > 0
# atv .= atv + (1.0 / delta) * v
# end
return atv
end
function dirder(v, kdata)
pdata = kdata.pdata
dx = kdata.dx / norm(v)
dxm1 = 1.0 / dx
F = kdata.f
FS = kdata.FS
xc = kdata.xc
delx = kdata.knl_store.delx
delx .= xc
delta = kdata.delta
# delx = copy(xc)
# delx .= xc + dx * v
# delx .+= dx*v
axpy!(dx, v, delx)
FPP = kdata.knl_store.FPP
FPP .= xc
# FPP = copy(xc)
EvalF!(F, FPP, delx, pdata)
axpby!(-dxm1, FS, dxm1, FPP)
ptcmv!(FPP, v, delta)
# atv = (FPP - FS) / dx
# ptcmv!(atv, v, delta)
# return atv
return FPP
end
function ptcmv!(atv, v, delta)
(delta == 0.0) || (atv .= atv + (1.0 / delta) * v)
#return atv
end
"""
forcing(itc, residratio, etag, ItRules, tol)
Compute the Eisenstat-Walker forcing term
"""
function forcing(itc, residratio, etag, ItRules, tol, resnorm)
gamma = 0.9
etamax = ItRules.eta
fixedeta = ItRules.fixedeta
if fixedeta || (itc == 0)
etag = etamax
else
etaRes = gamma * (etag^2)
etaA = gamma * (residratio^2)
etaflim = 0.5 * tol / resnorm
if etaRes <= 0.1
etasafe = min(etamax, etaA)
else
etasafe = min(etamax, max(etaA, etaRes))
end
etag = min(etamax, max(etasafe, etaflim))
end
return etag
end
"""
nkl_init(n, lsolver)
Preallocates data for internal stuff in nsoli.
You do not want to mess with this unless you are doing
IVP integration or continuation.
"""
function nkl_init(n, lsolver)
kl_store = kstore(n, lsolver)
knl_store = knlstore(n)
return (kl_store = kl_store, knl_store = knl_store)
end
"""
knlstore(n)
Preallocates the vectors Newton-Krylov uses internally.
"""
function knlstore(n)
xval = zeros(n)
step = zeros(n)
xt = zeros(n)
FT = zeros(n)
delx = zeros(n)
FPP = zeros(n)
return (step = step, xt = xt, FT = FT, delx = delx, FPP = FPP, xval = xval)
end
"""
kstore(n, lsolver)
Preallocates the vectors a Krylov method uses internally.
"""
function kstore(n, lsolver)
tmp1 = zeros(n)
tmp2 = zeros(n)
tmp3 = zeros(n)
tmp4 = zeros(n)
if lsolver == "gmres"
return (tmp1, tmp2, tmp3, tmp4)
else
tmp5 = zeros(n)
tmp6 = zeros(n)
tmp7 = zeros(n)
return (tmp1, tmp2, tmp3, tmp4, tmp5, tmp6, tmp7)
end
end
function kstep_test(FPS, step, lsolver)
solver_ok = (lsolver == "gmres") || (lsolver == "bicgstab")
solver_ok || error(lsolver, " ", "not supported")
# Do a bit of management
(nk,) = size(FPS)
n = length(step)
n == nk || error("Krylov storage vectors wrong length")
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 4365 | """
PTCUpdate(FPS, FS, x, ItRules, step, residm, dt)
Updates the PTC iteration. This is a much simpler algorithm that Newton.
We update the Jacobian every iteration and there is no line search
to manage.
Do not mess with this function!
In particular do not touch the line that calls PrepareJac!.
FPF = PrepareJac!(FPS, FS, x, ItRules,dt)
PrePareJac! builds F'(u) + (1/dt)I, factors it, and sends ptcsol that
factorization.
FPF is not the same as FPS (the storage you allocate for the Jacobian)
for a reason. FPF and FPS do not have the same type, even though they
share storage. So, FPS=PrepareJac!(FPS, FS, ...) will break things.
"""
function PTCUpdate(FPS, FS, x, ItRules, step, residm, dt)
T = eltype(FPS)
F! = ItRules.f
pdata = ItRules.pdata
#
FPF = PrepareJac!(FPS, FS, x, ItRules, dt)
#
# step .= -(FPF \ FS)
T == Float64 ? (step .= -(FPF \ FS)) : (step .= -(FPF \ T.(FS)))
#
# update solution/function value
#
x .= x + step
EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
#
# Update dt
#
dt *= (residm / resnorm)
return (x, dt, FS, resnorm)
end
"""
PTCUpdate(df::Real, fval, x, ItRules, step, residm, dt)
PTC for scalar equations.
"""
function PTCUpdate(df::Real, fval, x, ItRules, step, resnorm, dt)
f = ItRules.f
dx = ItRules.dx
fp = ItRules.fp
pdata = ItRules.pdata
df = fpeval_newton(x, f, fval, fp, dx, pdata)
idt = 1.0 / dt
step = -fval / (idt + df)
x = x + step
# fval = f(x)
fval = EvalF!(f, fval, x, pdata)
# SER
residm = resnorm
resnorm = abs(fval)
dt *= (residm / resnorm)
return (x, dt, fval, resnorm)
end
function PTCKrylovinit(
x0,
dx,
F!,
Jvec,
delta0,
Pvec,
PvecKnowsdelta,
pside,
lsolver,
eta,
fixedeta,
lmaxit,
maxit,
printerr,
pdata,
)
#
# Initialize the PTC-Krylov iteration.
#
n = length(x0)
x = copy(x0)
Krylov_Data = nkl_init(n, lsolver)
kl_store = Krylov_Data.kl_store
knl_store = Krylov_Data.knl_store
ItRules = (
dx = dx,
f = F!,
Jvec = Jvec,
delta0 = delta0,
Pvec = Pvec,
PvecKnowsdelta = PvecKnowsdelta,
pside = pside,
lsolver = lsolver,
kl_store = kl_store,
knl_store = knl_store,
eta = eta,
fixedeta = fixedeta,
lmaxit = lmaxit,
maxit = maxit,
printerr = printerr,
pdata = pdata,
)
return (ItRules, x, n)
end
"""
PTCUpdatei(FPS::AbstractArray, FS, x, ItRules, step, residm, delta)
Updates the PTC-Krylov iteration. This is a much simpler algorithm
than Newton-Krylov. In particular, there is no line search to manage.
Do not mess with this function!
"""
#function PTCUpdatei(FPS::AbstractArray, FS, x, ItRules, step, residm, delta, etag)
function PTCUpdatei(FPS, FS, x, ItRules, step, residm, delta, etag)
T = eltype(FPS)
F! = ItRules.f
pdata = ItRules.pdata
#
#
# step .= -(FPF \ FS)
step .*= 0.0
#
# If the preconditioner can use delta, tell it what delta is.
#
PvecKnowsdelta = ItRules.PvecKnowsdelta
delta2pdata(PvecKnowsdelta, delta, pdata)
#
kout = Krylov_Step!(step, x, FS, FPS, ItRules, etag, delta)
Lstats = kout.Lstats
step = kout.step
#
# update solution/function value
#
x .= x + step
EvalF!(F!, FS, x, pdata)
resnorm = norm(FS)
#
# Update delta
#
delta *= (residm / resnorm)
return (x, delta, FS, resnorm, Lstats)
end
"""
delta2pdata(PvecKnowsdelta, delta, pdata)
If your preconditioner is aware of the pseuto time step (delta)
put the value where it's supposed to be inside the precomputed data.
I also check that this has been done right and complain if not.
"""
function delta2pdata(PvecKnowsdelta, delta, pdata)
PvecKnowsdelta || return
# Once you're here you've told me that the preconditioner is delta-aware.
# I will look for the array deltaval before I write to it.
T = typeof(pdata)
Pnames = fieldnames(T)
valok = false
for ip in Pnames
valok = valok || :deltaval == ip
end
valok ? (pdata.deltaval[1] = delta) :
error("PvecKnowsdelta is set to true, but there the array
deltaval is not a field of pdata. Check the docstrings.")
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2905 | #
# The functions in this file manage error messages.
# I'm trying to give you useful hints if the iteration fails. These hints
# may become move detailed/verbose/bloviated over time.
#
"""
NewtonError(resfail, iline, resnorm, toosoon, itc, maxit, armmax, printerr)
Figure out what error message to print if the iteration fails.
"""
function NewtonError(resfail, iline, resnorm, toosoon, tol, itc, maxit, armmax, printerr)
errcode = 0
~toosoon || (errcode = Lottery_Winner(resnorm, tol, printerr))
itc < maxit || (errcode = MaxitError(resnorm, maxit, printerr))
~iline || (errcode = LineSearchFailure(maxit, itc, armmax, printerr))
errcode != 0 || println("Unknown Newton error. This is not supposed to happen.")
#
#
if printerr && ~toosoon
println("Give the history array a look to see what's happening.")
println(" ")
end
return errcode
end
"""
PTCError(resnorm, maxit, delta0, toosoon, tol, printerr)
"""
function PTCError(resnorm, maxit, delta0, toosoon, tol, printerr)
~toosoon || (errcode = Lottery_Winner(resnorm, tol, printerr))
#if toosoon
#errcode = Lottery_Winner(resnorm, tol, printerr)
#else
if printerr && ~toosoon
println("PTC failure; increase maxit and/or delta0")
println("Residual norm =", " ", resnorm)
println("Current values: maxit = ", maxit, ", delta0 = ", delta0)
println("Give the history array a look to see what's happening.")
println(" ")
end
toosoon || (errcode = 10)
return errcode
end
"""
LineSearchFailure
"""
function LineSearchFailure(maxit, itc, armmax, printerr)
if printerr
println("The line search failed at iteration", " ", itc)
println("Termination with failure")
println("Current values: maxit = ", maxit, ", armmax = ", armmax)
end
errcode = 1
return errcode
end
"""
MaxitError
"""
function MaxitError(resnorm, maxit, printerr)
if printerr
println("Maximum iterations (maxit) of ", maxit, " exceeded")
println("Convergence failure: residual norm too large ", resnorm)
println("Try increasing maxit and checking your function and
Jacobian for bugs.")
end
errcode = 10
return errcode
end
"""
Lottery_Winner(resnorm, tol)
"""
function Lottery_Winner(resnorm, tol, printerr)
if printerr
println("Congratulations, your initial iterate met the termination criteria.")
println("Residual norm = ", resnorm, " Tolerance = ", tol)
println(" ")
end
errcode = -1
return errcode
end
function Krylov_Error(lmaxit, ke_report)
if ke_report == false
println(
"Newton-Krylov: Linear solver did not meet termination criterion at least once.
This does not mean the nonlinear solver will fail. lmaxit= ",
lmaxit,
)
end
return true
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3992 | """
armijosc(xt, xc, ft, fc, d, residm, ItRules, derivative_is_old)
Line search for Newton's method. Read the notebook or print book
for the explanation. This is an internal function and I did not
design it to be hackable by the novice.
"""
function armijosc(xt, xc, ft, fc, d, residm, ItRules, derivative_is_old)
idid = true
alpha = 1.e-4
iarm = -1
lambda = 1.0
lam0 = 0.0
lamc = lambda
lamm = lamc
f = ItRules.f
# fp=ItRules.fp
# dx=ItRules.dx
ResidC = residm
armmax = ItRules.armmax
armfix = ItRules.armfix
#
if derivative_is_old
armmax = 0
end
#
# Take the full step and, if happy, go home.
#
(xt, ft, residt) = UpdateIteration(xt, xc, ft, lambda, d, ItRules)
armfail = residt > (1 - alpha * lambda) * residm
iarm += 1
#
#
# At this point I've taken a full step. I'll enter the loop only if
# that full step has failed.
#
ffc = residt^2
ff0 = residm^2
ffm = ffc
while armfail && iarm < armmax
#
# At this point the full step has failed. Now it's time to be
# serious about the line search.
#
lambda = update_lambda(iarm, armfix, lambda, lamc, ff0, ffc, ffm)
(xt, ft, residt) = UpdateIteration(xt, xc, ft, lambda, d, ItRules)
ffm = ffc
ffc = residt^2
iarm += 1
armfail = residt > (1 - alpha * lambda) * residm
end
if iarm >= armmax
idid = false
end
return (ax = xt, afc = ft, resnorm = residt, aiarm = iarm, idid = idid)
end
function update_lambda(iarm, armfix, lambda, lamc, ff0, ffc, ffm)
if iarm == 0 || armfix == true
lambda = lambda * 0.5
else
lamm = lamc
lamc = lambda
lambda = parab3p(lamc, lamm, ff0, ffc, ffm)
end
return lambda
end
"""
parab3p(lambdac, lambdam, ff0, ffc, ffm)
Three point parabolic line search.
input:\n
lambdac = current steplength
lambdam = previous steplength
ff0 = value of || F(x_c) ||^2
ffc = value of || F(x_c + lambdac d) ||^2
ffm = value of || F(x_c + lambdam d) ||^2
output:\n
lambdap = new value of lambda
internal parameters:\n
sigma0 = .1, sigma1=.5, safeguarding bounds for the linesearch
You get here if cutting the steplength in half doesn't get you
sufficient decrease. Now you have three points and can build a parabolic
model. I do not like cubic models because they either need four points
or a derivative.
So let's think about how this works. I cheat a bit and check the model
for negative curvature, which I don't want to see.
The polynomial is
p(lambda) = ff0 + (c1 lambda + c2 lambda^2)/d1
d1 = (lambdac - lambdam)*lambdac*lambdam < 0
So if c2 > 0 we have negative curvature and default to
lambdap = sigma0 * lambda
The logic is that negative curvature is telling us that
the polynomial model is not helping much, so it looks better
to take the smallest possible step. This is not what I did in the
matlab code because I did it wrong. I have sinced fixed it.
So (Students, listen up!) if c2 < 0 then all we gotta do is minimize
(c1 lambda + c2 lambda^2)/d1 over [.1* lambdac, .5*lambdac]
This means to MAXIMIZE c1 lambda + c2 lambda^2 becase d1 < 0.
So I find the zero of the derivative and check the endpoints.
"""
function parab3p(lambdac, lambdam, ff0, ffc, ffm)
#
# internal parameters
#
sigma0 = 0.1
sigma1 = 0.5
#
c2 = lambdam * (ffc - ff0) - lambdac * (ffm - ff0)
if c2 >= 0
#
# Sanity check for negative curvature
#
lambdap = sigma0 * lambdac
else
#
# It's a convex parabola, so use calculus!
#
c1 = lambdac * lambdac * (ffm - ff0) - lambdam * lambdam * (ffc - ff0)
lambdap = -c1 * 0.5 / c2
#
lambdaup = sigma1 * lambdac
lambdadown = sigma0 * lambdac
lambdap = max(lambdadown, min(lambdaup, lambdap))
end
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2205 | using SIAMFANLEquations
using SIAMFANLEquations.TestProblems
using SIAMFANLEquations.Examples
using Test
using LinearAlgebra: LinearAlgebra, Diagonal, I, diagm, mul!, norm, qr, qr!
#using LinearAlgebra.BLAS
import SIAMFANLEquations.Orthogonalize!
include("Chapter1/nsolsc_solution_test.jl")
include("Chapter1/ptcsolsc_test.jl")
include("Chapter2/basic2d_test.jl")
include("Chapter2/heq_lu_test.jl")
include("Chapter2/bvp_test.jl")
include("Chapter2/beam_test.jl")
include("Chapter2/pde_lin_test.jl")
include("Chapter2/nsolpde_test.jl")
include("Chapter2/knowsdt_test.jl")
include("Chapter3/gmres_test.jl")
include("Chapter3/mgs_test.jl")
include("Chapter3/bicgstab_test.jl")
include("Chapter3/Krylov_pde_test.jl")
include("Chapter3/ptcKrylovTest.jl")
include("Chapter3/ptcKrylovTestB.jl")
include("Chapter3/ptcKrylovTestC.jl")
include("Chapter3/nk_test.jl")
include("Chapter3/nk_pde.jl")
include("Chapter3/nk_heq.jl")
include("Chapter4/reldiff.jl")
include("Chapter4/alex_test.jl")
include("Chapter4/ci_pde_aa.jl")
include("Chapter4/heq_aa.jl")
include("Chapter4/linear_aa.jl")
include("Chapter5/transport_test.jl")
include("Chapter5/heat_test.jl")
include("Chapter5/heat_test2.jl")
include("Chapter5/continue_test.jl")
@testset "Scalar Equations: Chapter 1" begin
@test nsolsc_solution_test()
@test ptcsolsc_test()
end
@testset "nsol and ptcsol: Chapter 2" begin
@test basic2d_test()
@test bvp_test(201)
@test beam_test()
@test heq_lu_test()
@test pde_lin_test(31)
@test nsolpde_test(31)
@test knowsdt_test()
end
@testset "Newton-Krylov solvers: Chapter 3" begin
@test nk_test()
@test nk_pde()
@test nk_heq()
@test ptcKrylovTest()
@test ptcKrylovTestB()
@test ptcKrylovTestC()
end
@testset "Krylov solvers: Chapter 3" begin
@test gmres_test()
@test mgs_test()
@test bicgstab_test()
@test gmres_test_pde(31)
@test bicgstab_test_pde(31)
end
@testset "Anderson Acceleration: Chapter 4" begin
@test ci_pde_aa()
@test heq_aa()
@test linear_aa()
@test alex_test()
end
@testset "Case Studies: Chapter 5" begin
@test transport_test()
@test heat_test()
@test heat_test2()
@test continue_test()
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 5102 | """
nsolsc_solution_test
Test nsolsc with the atan function. Check answers and iteration stats.
"""
function nsolsc_solution_test()
#
# Local convergence with forward difference derivative
#
sdatal = nsolsc(atan, 1.0)
solok = (abs(sdatal.solution) < 1.e-8)
funok = (abs(sdatal.functionval) < 1.e-8)
hs = size(sdatal.history)
histok = (hs[1] == 5)
locok = funok && solok && histok
if ~locok
println("local FD fails")
end
#
# Local convergence with analytic derivative
#
sdataa = nsolsc(atan, 1.0, x -> 1.0 / (1.0 + x * x))
solok = (abs(sdataa.solution) < 1.e-8)
funok = (abs(sdataa.functionval) < 1.e-8)
hs = size(sdataa.history)
histok = (hs[1] == 5)
analyticok = funok && solok && histok
if ~analyticok
println("failure with analytic derivative ")
println(sdataa)
end
#
# Global convergence
#
sdatag = nsolsc(atan, 10.0; maxit = 11, armfix = true)
solok = (abs(sdatag.solution) < 1.e-8)
funok = (abs(sdatag.functionval) < 1.e-8)
hs = size(sdatag.history)
histok = (hs[1] == 12)
globok = funok && solok && histok
if ~globok
println("global FD fails")
end
#
# Global convergence with parab3p
#
sdatap3p = nsolsc(atan, 30.0; rtol = 1.e-10, maxit = 11)
solok = (abs(sdatap3p.solution) < 1.e-8)
funok = (abs(sdatap3p.functionval) < 1.e-8)
hs = size(sdatap3p.history)
histok = (hs[1] == 12)
p3pok = funok && solok && histok
if ~p3pok
println("parab3p fails")
end
#
# Local convergence with secant method
#
#sdatas=nsolsc(atan,1.0; solver="secant")
sdatas = secant(atan, 1.0)
solok = (abs(sdatas.solution) < 1.e-10)
funok = (abs(sdatas.functionval) < 1.e-10)
hs = size(sdatas.history)
histok = (hs[1] == 6)
secantok = funok && solok && histok
if ~secantok
println("secant failure")
end
#
# Initialize secant method when x0=0
#
#zedata=nsolsc(x -> cos(x) - x, 0.0;solver="secant",rtol=1.e-9)
zedata = secant(x -> cos(x) - x, 0.0; solver = "secant", rtol = 1.e-9)
solution = 7.390851333858823e-01
solok = (abs(zedata.solution - solution) < 1.e-9)
funok = (abs(zedata.functionval) < 1.e-9)
hs = size(zedata.history)
histok = (hs[1] == 7)
zecok = funok && solok && histok
if ~zecok
println("local FD fixup at zero fails")
end
#
# Tricky line search problem.
# The line search will fail in the middle of the iteration
# and demand a recompute of the derivative.
#
sdatal = nsolsc(
x -> (1.0 + 0.01 * x) * atan(x),
200.0;
sham = 5,
maxit = 20,
armmax = 10,
armfix = true,
rtol = 1.e-10,
)
solution = -100.0
solok = (abs(sdatal.solution - solution) < 1.e-8)
funok = (abs(sdatal.functionval) < 1.e-8)
hs = size(sdatal.history)
histok = (hs[1] == 6)
shamfastok = funok && solok && histok
if ~shamfastok
println("Fast Shamanskii response FAILURE")
end
#
# Test linesearch failure complaints.
#
armfail = nsolsc(atan, 10.0; armmax = 1, armfix = true, printerr = false)
afok = false
if armfail.idid == false && armfail.errcode == 1
afok = true
else
println("Armijo failure test FAILED.")
end
#
# Test residual failure mode and no history.
#
resok = false
resfail = nsolsc(atan, 10.0; maxit = 3, armfix = true, keepsolhist = false)
if resfail.idid == false && resfail.errcode == 10
resok = true
else
println("Residual failure test FAILED.")
end
#
# Test stagnation mode
#
stagdatan = nsolsc(
x -> tan(x) - x,
4.5,
x -> sec(x)^2 - 1.0;
rtol = 1.e-17,
atol = 1.e-17,
armfix = true,
maxit = 14,
)
fvals = stagdatan.history
avals = stagdatan.stats.iarm
ifvals = stagdatan.stats.ifun
jvals = stagdatan.stats.ijac
stagl = (length(fvals) == 6)
stagf = (fvals[5] < 1.e-15)
stags = (avals[6] == 5) && (ifvals[6] == 6) && (jvals[6] == 1)
stagok = stagl && stags && stagf
if ~stagok
println("Stagnation test FAILED")
end
#
#
# Test chord method
#
lttest = nsolsc(atan, 0.5; solver = "chord")
fvals = lttest.history
chordl = (length(fvals) == 11)
ratl = fvals[11] / fvals[10]
chordr = (abs(ratl - 0.25) < 1.e-7)
solok = (fvals[11] < 1.e-6)
chordok = chordl && chordr && solok
if ~chordok
println("Chord test failed")
end
#
# Make sure nsolsc knows if the solution and the intial iterate are
# the same
#
lotout = nsolsc(x -> x * exp(x), 0.0)
lotok = (lotout.errcode == -1)
if ~lotok
println("Lottery test failed")
end
return locok &&
globok &&
secantok &&
analyticok &&
zecok &&
shamfastok &&
afok &&
resok &&
p3pok &&
chordok &&
lotok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1477 | """
ptcsolsc_test
Make sure ptcsolsc finds the stable steady state sqrt(2)/2
f(u) = u^3 - lambda u = 0 with lambda = 1/2
The answer(s!!) are u=0, -sqrt(2)/2, and sqrt(2)/2.
The initial iterate is u0=.1, so the correct answer is sqrt(2)/2.
I'm testing for correctness and a match of the iterations statistics
"""
function ptcsolsc_test()
u0 = 0.1
ustable = 0.5 * sqrt(2.0)
uunstable = 0.0
lambda = 0.5
#
# Convergence to the right solution
#
ptcdata1 = ptcsolsc(sptest, u0, sptestp; delta0 = 1.0, rtol = 1.e-12)
ptcdata2 = ptcsolsc(
spitchfork,
u0;
delta0 = 1.0,
rtol = 1.e-12,
pdata = lambda,
keepsolhist = false,
)
ptcdatasec = secant(spitchfork, u0; rtol = 1.e-12, pdata = lambda, keepsolhist = false)
dh = ptcdata1.history - ptcdata2.history
ndh = norm(dh[:, 1], Inf)
fdok = (ndh < 1.e-7)
ptcerr = ptcdata1.solhist .- ustable
ptcfun = ptcdata1.history
secok = abs(ptcdatasec.solution) < 1.e-10
solok = (abs(ptcdata1.solution - ustable) < 1.e-10)
funok = (abs(ptcdata1.functionval) < 1.e-12)
histok = (length(ptcfun) == 18)
ptcdataf = ptcsolsc(sptest, u0; delta0 = 0.1, rtol = 1.e-12)
errcode = ptcdataf.errcode
failok = ~ptcdataf.idid && (errcode == 10)
ptcok = fdok && solok && funok && histok && failok && secok
if ~ptcok
println("Failure in Scalar PTC")
println(ptcdata1)
end
return ptcok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2834 | """
basic2d_test()
Test nsol with the simple 2D problem.
"""
function basic2d_test()
x0 = ones(2, 1)
fv = zeros(2, 1)
jv = zeros(2, 2)
jsv = zeros(Float32, 2, 2)
# local convergence testing
#
# single vs double Jacobian
# jfact=nofact should have no effect on the nonlinear iteration
#
nout = nsol(basic2d!, x0, fv, jv; rtol = 1.e-10, sham = 1)
sout = nsol(basic2d!, x0, fv, jsv; sham = 1, jfact = nofact)
dss = norm(nout.solution - sout.solution)
hss = norm(nout.history - sout.history)
singleok = (norm(dss) < 1.e-7) && (norm(hss) < 1.e-7)
if ~singleok
println("Single/Double test fails.")
end
#
# chord vs Newton
#
cout = nsol(basic2d!, x0, fv, jv; solver = "chord")
dsc = norm(nout.solution - cout.solution)
lch = length(cout.history)
lnh = length(nout.history)
jevals = sum(cout.stats.ijac)
chordok = (dsc < 1.e-6) && (lch == 17) && (lnh == 5) && (jevals == 1)
if ~chordok
println("Chord/Newton test fails")
end
#
# Analytic vs finite-difference Jacobian
#
eout = nsol(basic2d!, x0, fv, jv, jbasic2d!; sham = 1)
fdok =
(norm(eout.history - nout.history) < 1.e-6) &&
(norm(eout.solution - nout.solution) < 1.e-10)
if ~fdok
println("FD/Analytic test fails.")
end
#
# Shamanskii
#
s1out = nsol(basic2d!, x0, fv, jv; sham = 2, rtol = 1.e-10)
dout1 = norm(s1out.solution - nout.solution)
jevals1 = sum(s1out.stats.ijac)
s2out = nsol(basic2d!, x0, fv, jv; sham = 2, rtol = 1.e-10, resdec = 0.5)
jevals2 = sum(s2out.stats.ijac)
dout2 = norm(s2out.solution - nout.solution)
shamok = (dout1 < 1.e-10) && (dout2 < 1.e-10) && (jevals1 == 4) && (jevals2 == 3)
if ~shamok
println("Shamanskii test fails.")
end
#
# Global convergence
#
x0a = [2.0, 0.5]
FS = zeros(2)
FPS = zeros(2, 2)
FPSS = zeros(Float32, 2, 2)
nouta = nsol(simple!, x0a, FS, FPS; keepsolhist = true, sham = 1)
noutb = nsol(simple!, x0a, FS, FPSS, jsimple!; keepsolhist = true, sham = 1)
noutc = nsol(simple!, x0a, FS, FPSS, jsimple!; armmax = 0, sham = 1)
iarm = nouta.stats.iarm
iarm2 = noutc.idid
armok = (iarm[2] == 2) && ~iarm2
preok = (norm(noutb.solhist - nouta.solhist, Inf) < 1.e-6)
solok = (norm(noutb.solution - nouta.solution, Inf) < 1.e-10)
globok = armok && preok && solok
if ~globok
println("Global test fails.")
end
return chordok && singleok && fdok && shamok && globok
end
function basic2d!(FV, x)
FV[1] = x[1] * x[1] - 2.0
FV[2] = exp(x[1] - 1) + x[2] * x[2] - 2.0
return FV
end
function jbasic2d!(JV, FV, x)
JV[1, 1] = 2 * x[1]
JV[1, 2] = 0.0
JV[2, 1] = exp(x[1] - 1)
JV[2, 2] = 2 * x[2]
return JV
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 959 | """
function beam_test()
Test the time-dependent and steady state beam problem.
"""
function beam_test()
#
dt = 0.02
n = 20
stepnum = 5
(t, se, xe, fhist, fhistt) = ivpBeam(n, dt, stepnum)
beamtdout = (length(fhist) == 6) && (norm(fhistt, Inf) < 5.e-5)
beamtdout || println("error in beam_test beamtdout")
(pout, nout) = ptcBeam(10, 100)
pout2 = ptcBeam(10, 100; jknowsdt = true)
kdtdiff =
norm(pout.solution - pout2.solution, Inf) + norm(pout2.history - pout.history, Inf)
kdtok = (kdtdiff < 1.e-14)
kdtok || println("error is knowsdt test")
nsolp = norm(pout.solution)
nsoln = norm(nout.solution)
itp = length(pout.history)
pnormok = (nsolp > 5.0) && (nsoln < 1.e-15)
pnormok || println("error in beam_test pnromok")
presok = (itp < 100) && (pout.history[itp] < 1.e-10)
presok || println("error in beam_test presok")
return beamtdout && pnormok && presok && kdtok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 764 | """
bvp_test()
Test nsol on the boundary value problem from Chapter 2 with
the LAPACK band solver.
Compare two small grids.
"""
function bvp_test(nsmall = 101)
#
smallout = BVP_solve(nsmall; bfact = qr)
smallout2 = BVP_solve(nsmall)
hsmall = 20.0 / (nsmall - 1)
statss = smallout.bvpout.stats
hs = smallout.bvpout.history ./ sqrt(hsmall)
hs2 = smallout2.bvpout.history ./ sqrt(hsmall)
smok = norm(hs - hs2, Inf) < 1.e-13
#
nbig = 2 * nsmall
bigout = BVP_solve(nbig; bfact = qr!)
hbig = 20.0 / (nbig - 1)
statsb = bigout.bvpout.stats
bs = bigout.bvpout.history ./ sqrt(hbig)
#
armok = norm(statss.iarm - statsb.iarm) == 0
outok = norm(hs - bs, Inf) < 0.05
bvpok = outok && armok && smok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1433 | """
heq_lu_test()
Does the H-equation module do what it's supposed to?
"""
function heq_lu_test()
n = 32
c = 0.5
FS = ones(n)
x0 = ones(n)
FPS = ones(n, n)
FPSS = ones(Float32, n, n)
hdata = heqinit(x0, c)
nsoloutfd = nsolheq(x0, FS, FPS, hdata)
nsoloutbos = nsol(heqbos!, x0, FS, FPS; pdata = c, sham = 1)
dbos = norm(nsoloutbos.solution - nsoloutfd.solution)
bosok = dbos < 1.e-7
if ~bosok
println("Bosma and DeRooij test fails in H-equation")
end
nsoloutsp = nsolheq(x0, FS, FPSS, hdata; diff = :exact)
nsoloutdp = nsolheq(x0, FS, FPS, hdata; diff = :exact)
dsp = norm(nsoloutsp.history - nsoloutfd.history)
ddp = norm(nsoloutdp.history - nsoloutfd.history)
dsolsp = norm(nsoloutsp.solution - nsoloutfd.solution)
dsoldp = norm(nsoloutsp.solution - nsoloutdp.solution)
spok = (dsp < 1.e-7) && (dsolsp < 1.e-9) && (ddp < 1.e-7) && (dsoldp < 1.e-9)
if ~spok
println("Mixed precision test fails in H-equation")
end
#
# change c and use old solution as initial iterate
#
h5 = nsoloutfd.solution
setc!(hdata, 0.7)
nsoloutfd7 = nsolheq(h5, FS, FPS, hdata)
contok = (nsoloutfd7.history[4] < 1.e-12)
if ~contok
println("Update c test fails in H-equation")
end
heqok = spok && bosok && contok
if ~heqok
println("H-equation Chapter 2 test fails")
end
return heqok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1751 | """
knowsdt_test()
Test the jknowsdt keyword with a simple problem
u' = u (mu - u^2)
v' = u (mu - u^2) - 7 v
Remember that PTC thinks in terms of x' = - F(x)
Here mu=4 and the initial data are (u0, v0)=(.1, 10) so the correct
stable solution is (u,v)=(2,0).
"""
function knowsdt_test()
u0 = [0.1; 10.0]
FU = zeros(2)
JV = zeros(2, 2)
# Jacobian does not know about dt + finite difference
pout = ptcsol(Fode!, u0, FU, JV; delta0 = 0.01, maxit = 100)
# Analytc Jacobian does not know about dt
pout2 = ptcsol(Fode!, u0, FU, JV, Jval!; delta0 = 0.01, maxit = 100)
# Analytc Jacobian knows about dt
pout3 = ptcsol(Fode!, u0, FU, JV, Jval2!; delta0 = 0.01, maxit = 100, jknowsdt = true)
#
# Collect the output and figure out if you did things right.
#
hist = pout.history
hist2 = pout2.history
hist3 = pout3.history
sol = pout.solution
sol2 = pout2.solution
sol3 = pout3.solution
ustar = [2.0; 0.0]
dtdiff = norm(sol2 - sol3, Inf) + norm(hist2 - hist3, Inf)
soldiff = norm(sol - sol2, Inf)
stardiff = norm(sol3 - ustar, Inf)
hdiff = norm(hist - hist2, Inf)
dtpass = (soldiff < 1.e-9) && (hdiff < 1.e-6) && (dtdiff < 1.e-15)
dtpass || println("knowsdt_test fails")
return dtpass
end
function Fode!(FS, x)
#
# Dynamics are x' = -FS(x), so the zero solution is unstable for mu > 0
#
mu = 4.0
FS[1] = -x[1] * (mu - x[1] * x[1])
FS[2] = FS[1] + 7.0 * x[2]
return FS
end
function Jval2!(JV, FU, x, dt)
JV .= Jval!(JV, FU, x)
JV .= JV + (1.0 / dt) * I
end
function Jval!(JV, FU, x)
mu = 4.0
JV[1, 1] = -(mu - 3.0 * x[1] * x[1])
JV[1, 2] = 0.0
JV[2, 1] = JV[1, 1]
JV[2, 2] = 7.0
return JV
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 865 | """
nsolpde_test(n)
Test elliptic pde with nsol. Newton and Shamanskii. Query
convergence history, accuracy, agreement.
"""
function nsolpde_test(n)
h = 1 / (n + 1)
x = collect(h:h:1.0-h)
uexact = solexact(x)
ue = reshape(uexact, (n * n,))
houtn = NsolPDE(n)
histn = houtn.history
npass =
(length(histn) == 7) &&
(sum(houtn.stats.iarm) == 2) &&
(histn[7] / histn[1] < 1.e-13)
houts = NsolPDE(n; sham = Inf, resdec = 0.1)
hists = houts.history
spass =
(length(hists) == 8) &&
(sum(houts.stats.iarm) == 2) &&
(hists[8] / hists[1] < 1.e-7)
errn = norm(houtn.solution - ue, Inf)
errs = norm(houts.solution - ue, Inf)
delsol = norm(houts.solution - houtn.solution, Inf)
accpass = (errn < 1.e-3) && (errs < 1.e-3) && (delsol < 1.e-8)
return npass && accpass
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1873 | """
pde_lin_test(n)
Test the linear operators.
"""
function pde_lin_test(n)
h = 1.0 / (n + 1)
x = collect(h:h:1.0-h)
fdata = fishinit(n)
z = rand(n, n)
L2d = Lap2d(n)
DX = Dx2d(n)
DY = Dy2d(n)
lapok = lap_test(n, x, z, L2d, fdata)
discok = disc_test(n, x, L2d, DX, DY, fdata)
lapok && discok
end
"""
disc_test()
Have I broken the discretizations?
"""
function disc_test(n, x, L2d, DX, DY, fdata)
n2 = n * n
ue = solexact(x)
ux = dxexact(x)
uy = dyexact(x)
D2u = l2dexact(x)
ue1 = reshape(ue, (n2,))
uex1 = reshape(ux, (n2,))
uey1 = reshape(uy, (n2,))
ued21 = reshape(D2u, (n2,))
# test DX
dx21 = DX * ue1
dxerr = norm(dx21 - uex1, Inf)
# test DY
dy21 = DY * ue1
dyerr = norm(dy21 - uey1, Inf)
# test Laplacian
du21 = L2d * ue1
d2err = norm(du21 - ued21, Inf)
pass = (d2err < 0.75) && (dxerr < 0.1) && (dyerr < 1.e-12)
pass || println("Discretization test fails")
return pass
end
"""
lap_test()
Does the FFT invert the discrete Laplacian?
Does the discrete Laplacian pass a simple eigenvalue test.
"""
function lap_test(n, x, z, L2d, fdata)
randok = rand_test(z, n, L2d, fdata)
eigok = eig_test(n, x, fdata)
pass = randok && eigok
return pass
end
function rand_test(z, n, L2d, fdata)
n2 = n * n
z1 = reshape(z, (n2,))
y1 = L2d * z1
y = reshape(y1, (n, n))
mz = fish2d(y, fdata)
q = reshape(mz, (n2,))
pass = (norm(q - z1, Inf) < 1.e-12)
pass || println("rand_test fails, norm =", norm(q - z1))
return pass
end
function eig_test(n, x, fdata)
lambda = pi * pi * 5
efunx = sin.(pi * x)
efuny = sin.(2 * pi * x)
efunu = efunx * efuny'
vfun = fish2d(efunu, fdata)
pass = (norm(lambda * vfun - efunu, Inf) < 1.e-2)
pass || println("eig test fails")
return pass
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 6916 | """
gmres_test_pde(n)
PDE test from FR16. Test of kl_gmres with all kinds of preconditioning.
"""
function gmres_test_pde(n; orth = "cgs2", write = false, eta = 9.8 * 1.e-4)
pdata = pdegminit(n)
fdata = pdata.fdata
RHS = pdata.RHS
ue = pdata.ue
b = Pfish2d(RHS, fdata)
u0 = zeros(n * n)
V = zeros(n * n, 20)
# Solve with left preconditioning hard-wired in
goutp = kl_gmres(u0, b, pdelpatv, V, eta; pdata = pdata, orth = orth)
pcres = goutp.reshist
pcres /= pcres[1]
sollhw = goutp.sol
# Solve with right preconditioning hard-wired in
goutrp = kl_gmres(u0, RHS, pderatv, V, eta; pdata = pdata, orth = orth)
pcresr = goutrp.reshist
pcresr /= pcresr[1]
solrhw = pdeptv(goutp.sol, pdata)
# Put left preconditioning in the argument list.
goutpl2 =
kl_gmres(u0, RHS, pdeatv, V, eta, pdeptv; pdata = pdata, orth = orth, side = "left")
pcresl2 = goutpl2.reshist
pcresl2 /= pcresl2[1]
soll = goutpl2.sol
# Put right preconditioning in the argument list.
goutp2 = kl_gmres(
u0,
RHS,
pdeatv,
V,
eta,
pdeptv;
pdata = pdata,
orth = orth,
side = "right",
)
pcres2 = goutp2.reshist
pcres2 /= pcres2[1]
solr = goutp2.sol
soldel = norm(solrhw - solr, Inf)
solrdel = norm(sollhw - soll, Inf)
solerr = norm(soll - ue, Inf)
solerr2 = norm(solr - ue, Inf)
passfull = (
(soldel == 0) &&
(solrdel == 0) &&
(solerr < 1.e-2) &&
(solerr2 < 1.e-2) &&
(length(pcresr) == 12) &&
(length(pcres) == 9)
)
if write
println(soldel, " ", solrdel, " ", solerr, " ", solerr2)
end
passfull || println("Linear pde test for GMRES fails.")
# Now for some restarts ...
V = zeros(n * n, 5)
goutpl = kl_gmres(
u0,
RHS,
pdeatv,
V,
eta,
pdeptv;
pdata = pdata,
orth = orth,
side = "left",
lmaxit = 20,
)
goutpr = kl_gmres(
u0,
RHS,
pdeatv,
V,
eta,
pdeptv;
pdata = pdata,
orth = orth,
side = "right",
lmaxit = 20,
)
soldelr = norm(goutpl.sol - goutpr.sol, Inf)
solerrr = norm(goutpr.sol - ue)
solerrl = norm(goutpl.sol - ue)
numitsr = length(goutpr.reshist)
numitsl = length(goutpl.reshist)
pass_res = (
(soldelr < 1.e-3) &&
(solerr < 1.e-2) &&
(solerr2 < 1.e-2) &&
(numitsr == 16) &&
(numitsl == 13)
)
pass_res || println("Linear pde test for GMRES(m) fails.")
pass = passfull && pass_res
return pass
end
"""
bicgstab_test_pde(n)
PDE test from FR16. Test of kl_bicgstab with all kinds of preconditioning.
"""
function bicgstab_test_pde(n; write = false, eta = 9.8 * 1.e-4)
pdata = pdegminit(n)
RHS = pdata.RHS
ue = pdata.ue
V = zeros(n * n)
u0 = zeros(n * n)
#
# Solve with left preconditioning hard-wired in
#
fdata = pdata.fdata
b = Pfish2d(RHS, fdata)
goutp = kl_bicgstab(u0, b, pdelpatv, V, eta; pdata = pdata, lmaxit = 200)
pcres = goutp.reshist
pcres /= pcres[1]
sollhw = goutp.sol
#
# Solve with right preconditioning hard-wired in
#
goutrp = kl_bicgstab(u0, RHS, pderatv, V, eta; pdata = pdata, lmaxit = 200)
pcresr = goutrp.reshist
pcresr /= pcresr[1]
solrhw = copy(u0)
solrhw .= pdeptv(goutrp.sol, pdata)
solldiff = norm(solrhw - sollhw, Inf)
#
# Solve with right preconditioning
#
goutrp1 = kl_bicgstab(u0, RHS, pdeatv, V, eta, pdeptv; pdata = pdata, lmaxit = 200)
pcresr1 = goutrp1.reshist
pcresr1 /= pcresr1[1]
solr = goutrp1.sol
# solldiff += norm(solr-sollhw,Inf)
solldiff = max(solldiff, norm(solr - sollhw, Inf))
#
# Solve with left preconditioning
#
goutl1 = kl_bicgstab(
u0,
RHS,
pdeatv,
V,
eta,
pdeptv;
pdata = pdata,
lmaxit = 200,
side = "left",
)
pcresl1 = goutl1.reshist
pcresl1 /= pcresl1[1]
soll = goutl1.sol
# solldiff += norm(soll-sollhw,Inf)
solldiff = max(solldiff, norm(soll - sollhw, Inf))
#
# Hardwired and normal give same results?
#
leftdel = norm(soll - sollhw, Inf) + norm(pcres - pcresl1, Inf)
leftpass = (leftdel < 1.e-15)
rightdel = norm(solr - solrhw, Inf) + norm(pcresr1 - pcresr, Inf)
rightpass = (rightdel < 1.e-15)
#
# Solve with no preconditioning to duplicate fig 3.4 in red book
#
goutnp = kl_bicgstab(u0, RHS, pdeatv, V, eta; lmaxit = 200, pdata = pdata)
pcresnp = goutnp.reshist
pcresnp /= pcresnp[1]
solnone = goutnp.sol
solldiff = max(solldiff, norm(solnone - sollhw, Inf))
# solldiff += norm(solnone-sollhw,Inf)
#
# Are the answers close enough?
#
sollpass = (solldiff < 2.0 * eta)
#
# Are the iteration counts correct?
#
ll = length(pcres)
lr = length(pcresr1)
ln = length(pcresnp)
countok = ((ll == 7) && (lr == 8) && (ln == 37))
pass = sollpass && rightpass && leftpass & countok
return pass
end
function pdelpatv(u, pdata)
L = pdata.L
fdata = pdata.fdata
au = L * u
pau = Pfish2d(au, fdata)
return pau
end
function pderatv(u, pdata)
L = pdata.L
fdata = pdata.fdata
pau = Pfish2d(u, fdata)
au = L * pau
return au
end
function pdeptv(u, pdata)
fdata = pdata.fdata
ptv = Pfish2d(u, fdata)
end
function pdeatv(u, pdata)
xc = pdata.xc
L = pdata.L
mul!(xc, L, u)
return xc
end
"""
pdegminit(n)
collects the precomputed data for the linear elliptic pde example.
This is the example on page 54-55 of FR16.
This
includes
- the sparse matrix representation of the operators,
- the right side of the equation,
- the exact solution,
- the data that the fft-based fast Poisson solver (fish2d) needs
"""
function pdegminit(n)
# Make the grids
n2 = n * n
h = 1.0 / (n + 1.0)
x = collect(h:h:1.0-h)
o = ones(n)
Y = o * x'
y20 = 20.0 * reshape(Y, (n2,))
DiagY = Diagonal(y20)
# collect the operators
D2 = Lap2d(n)
DX = Dx2d(n)
DY = Dy2d(n)
L = D2 + I
L .+= DX
LY = copy(DY)
mul!(LY, DiagY, DY)
L .+= LY
# Exact solution and its derivatives
uexact = solexact(x)
dxe = dxexact(x)
dye = dyexact(x)
d2e = l2dexact(x)
dxv = reshape(dxe, (n2,))
dyv = reshape(dye, (n2,))
d2v = reshape(d2e, (n2,))
uv = reshape(uexact, (n2,))
# Preallocate a copy of the unknown for the function
# and preconditioner evaluation.
xc = copy(uv)
fdata = fishinit(n)
# The right side of the equation
RHS = d2v + dxv + y20 .* dyv + uv
# Pack it and ship it.
pdedata = (L, RHS = RHS, ue = uv, xc = xc, fdata = fdata)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2221 | """
bicgstab_test.jl
Tests the linear BiCGSTAB code, kl_bicgstab.
This is for CI only. Nothing to see here. Move along.
"""
function bicgstab_test()
pass3 = test3x3()
passint = test_integop()
passr1 = testR1()
passbicgs = pass3 && passint && passr1
return passbicgs
end
function test3x3()
A = [0.001 0 0; 0 0.0011 0; 0 0 1.e4]
V = zeros(3)
b = [1.0; 1.0; 1.0]
x0 = zeros(3)
eta = 1.e-10
gout = kl_bicgstab(x0, b, atv, V, 1.e-10; pdata = A)
pass =
(length(gout.reshist) == 5) &&
(norm(A * gout.sol - b, Inf) < 1.e-12) &&
gout.idid &&
(gout.lits == 4)
# return (gout=gout, pass=pass)
pass || println("3x3 test fails")
return pass
end
function testR1()
A = Float64.([1 2 3 4 5])
E = A' * A
A = I + E
b = ones(5)
x0 = zeros(5)
V = zeros(5)
gout = kl_bicgstab(x0, b, atv, V, 1.e-7; pdata = A)
pass = (length(gout.reshist) == 3) && (norm(A * gout.sol - b, Inf) < 1.e-12)
# return (gout=gout, pass=pass)
pass || println("R1 test fails")
return pass
end
function test_integop(n = 100)
pdata = integopinit(n)
f = pdata.f
ue = pdata.xe
u0 = zeros(size(f))
V = zeros(size(f))
gout = kl_bicgstab(u0, f, integop, V, 1.e-10; pdata = pdata)
realres = (I - pdata.K) * gout.sol - f
pass = ((norm(realres, Inf) < 1.e-12) && (length(gout.reshist) == 4))
# return (gout=gout, pass=pass)
pass || println("integop test fails")
return pass
end
function atv(x, A)
return A * x
end
function integop(u, pdata)
K = pdata.K
# f = pdata.f
return u - K * u
end
function integopinit(n)
h = 1 / n
X = collect(0.5*h:h:1.0-0.5*h)
K = [ker(x, y) for x in X, y in X]
# K = zeros(n, n)
# for j = 1:n
# for i = 1:n
# K[i, j] = ker(x[i], x[j])
# end
# end
K .*= h
# sol = exp.(x) .* log.(2.0 * x .+ 1.0)
# sol = usol.(X)
sol = [usol(x) for x in X]
f = sol - K * sol
pdata = (K = K, xe = sol, f = f)
return pdata
end
function usol(x)
return exp.(x) .* log.(2.0 * x .+ 1.0)
end
function ker(x, y)
ker = 0.1 * sin(x + exp(y))
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2476 | """
bicgstab_test_pde(n)
PDE test from FR16. Test of kl_bicgstab with all kinds of preconditioning.
"""
function bicgstab_test_pde(n; write = false, eta = 9.8 * 1.e-4)
pdata = pdegminit(n)
RHS = pdata.RHS
ue = pdata.ue
u0 = zeros(n * n)
#
# Solve with left preconditioning hard-wired in
#
fdata = pdata.fdata
b = Pfish2d(RHS, fdata)
goutp = kl_bicgstab(u0, b, pdelpatv, eta; pdata = pdata, lmaxit = 200)
pcres = goutp.reshist
pcres /= pcres[1]
sollhw = goutp.sol
#
# Solve with right preconditioning hard-wired in
#
goutrp = kl_bicgstab(u0, RHS, pderatv, eta; pdata = pdata, lmaxit = 200)
pcresr = goutrp.reshist
pcresr /= pcresr[1]
solrhw = copy(u0)
solrhw .= pdeptv(goutrp.sol, pdata)
solldiff = norm(solrhw - sollhw, Inf)
#
# Solve with right preconditioning
#
goutrp1 = kl_bicgstab(u0, RHS, pdeatv, eta, pdeptv; pdata = pdata, lmaxit = 200)
pcresr1 = goutrp1.reshist
pcresr1 /= pcresr1[1]
solr = goutrp1.sol
# solldiff += norm(solr-sollhw,Inf)
solldiff = max(solldiff, norm(solr - sollhw, Inf))
#
# Solve with left preconditioning
#
goutl1 = kl_bicgstab(
u0,
RHS,
pdeatv,
eta,
pdeptv;
pdata = pdata,
lmaxit = 200,
side = "left",
)
pcresl1 = goutl1.reshist
pcresl1 /= pcresl1[1]
soll = goutl1.sol
# solldiff += norm(soll-sollhw,Inf)
solldiff = max(solldiff, norm(soll - sollhw, Inf))
#
# Hardwired and normal give same results?
#
leftdel = norm(soll - sollhw, Inf) + norm(pcres - pcresl1, Inf)
leftpass = (leftdel < 1.e-15)
rightdel = norm(solr - solrhw, Inf) + norm(pcresr1 - pcresr, Inf)
rightpass = (rightdel < 1.e-15)
#
# Solve with no preconditioning to duplicate fig 3.4 in red book
#
goutnp = kl_bicgstab(u0, RHS, pdeatv, eta; lmaxit = 200, pdata = pdata)
pcresnp = goutnp.reshist
pcresnp /= pcresnp[1]
solnone = goutnp.sol
solldiff = max(solldiff, norm(solnone - sollhw, Inf))
#
# Solve with no preconditioning to get a failure
#
goutnf = kl_bicgstab(u0, RHS, pdeatv, V, eta; lmaxit = 20, pdata = pdata)
failpass = ~goutnf.idid && (goutnf.lits == 20)
#
# Are the answers close enough?
#
println(solldiff / eta)
sollpass = (solldiff < 2.0 * eta)
#
pass = sollpass && rightpass && leftpass && failpass
return pass
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 6921 | """
gmres_test.jl
Tests the linear GMRES code, kl_gmres.
This is for CI only. Nothing to see here. Move along.
"""
function gmres_test()
pass3 = test3x3()
passint = test_integop(40)
passint_rs = test_integop_restart(40)
passr1 = testR1()
passorth = orth_test()
passqr = qr_test()
passgm = pass3 && passint && passr1 && passint_rs
return passgm
end
"""
test3x3()
Do the nasty problem in Float64 or Float32. When you orthogonalize in Float32,
the iteration thinks it's ok and is wrong. This also tests the internal
function kstore.
"""
function test3x3()
A = [0.001 0 0; 0 0.0011 0; 0 0 1.e4]
V = zeros(3, 10)
V32 = zeros(Float32, 3, 10)
b = [1.0; 1.0; 1.0]
x0 = zeros(3)
eta = 1.e-10
passgm = true
rightsize = [10, 6, 5, 4]
rightsize32 = [10, 7, 7, 3]
Methods = ("cgs1", "mgs1", "mgs2", "cgs2")
TestC = (false, true, true, true)
i = 1
kl_store = kstore(3, "gmres")
kl_store32 = kstore(3, "gmres")
tol = 1.e-10
tol32 = 1.e-7
lhistpass = true
ididpass = true
locpass = true
for orth in Methods
gout = kl_gmres(x0, b, atv, V, tol; pdata = A, orth = orth, kl_store = kl_store)
resnorm = norm(A * gout.sol - b)
# If I don't have separate kl_stores then the gout.sol is overwritten.
# I will fix this at some point. For now, only the nonlinear solvers really
# use kl_store.
gout32 =
kl_gmres(x0, b, atv, V32, tol32; pdata = A, orth = orth, kl_store = kl_store32)
ithist = gout.reshist
ithist32 = gout32.reshist
lhist = length(ithist)
lhist32 = length(ithist32)
lhistpass = lhistpass && (lhist == rightsize[i])
lhistpass = lhistpass && (lhist32 == rightsize32[i])
ididpass = ididpass && (gout.idid == TestC[i])
ididpass = ididpass && (gout32.idid == TestC[i])
resnormx = norm(A * gout.sol - b)
resnorm32 = norm(A * gout32.sol - b)
locpass = (resnorm < 1.e-8) && (resnorm32 > 0.1)
c = A \ b
println(lhistpass, " ", ididpass, " ", locpass)
println(resnorm, " ", resnorm32, " ", resnormx, " ", norm(c - gout.sol))
# For the Float32 computation, the iteration terminates with success, but
# the real residual is bad.
println(lhistpass, " ", ididpass, " ", locpass)
locpass || println(
"failure at orth = ",
orth,
", lhist = ",
lhist,
", Res norm = ",
resnorm,
)
passgm = passgm && locpass && lhistpass && ididpass
i += 1
end
# return (pass = passgm, RH = R)
return passgm
end
function testR1()
A = Float64.([1 2 3 4 5])
E = A' * A
A = I + E
b = ones(5)
x0 = zeros(5)
V = zeros(5, 4)
gout = kl_gmres(x0, b, atv, V, 1.e-7; pdata = A)
lhist = length(gout.reshist)
nerr = norm(A * gout.sol - b, Inf)
pass = (lhist == 3) && (nerr < 1.e-14)
pass || println("Rank one test fails")
return pass
end
function test_integop(n)
pdata = integopinit(n)
f = pdata.f
ue = pdata.xe
u0 = zeros(size(f))
V = zeros(n, 20)
Methods = ("cgs1", "mgs1", "mgs2", "cgs2")
pass = true
#
# run through the orthogonalizers
#
for orth in Methods
goutinteg = kl_gmres(u0, f, integop, V, 1.e-10; pdata = pdata, orth = orth)
errn = norm(goutinteg.sol - ue, Inf)
rhist = goutinteg.reshist
lhist = length(rhist)
rred = rhist[4] ./ rhist[1]
lpass = (errn < 1.e-14) && (rred < 1.e-14) && (lhist == 4)
lpass || println("Failure with orth = ", orth)
pass = pass && lpass
end
#
# force a failure
#
failout = kl_gmres(u0, f, integop, V, 1.e-10; pdata = pdata, lmaxit = 2)
pass = pass && ~failout.idid
pass || println("Integral operator test fails.")
return pass
end
#
# Test integral equation with restarted GMRES
#
function test_integop_restart(n)
pdata = integopinit(n)
f = pdata.f
ue = pdata.xe
u0 = zeros(size(f))
V = zeros(n, 3)
V32 = zeros(Float32, n, 3)
gout = kl_gmres(u0, f, integop, V, 1.e-10; pdata = pdata, lmaxit = 20)
gout32 = kl_gmres(u0, f, integop, V32, 1.e-10; pdata = pdata, lmaxit = 20)
dhist = norm(gout.reshist - gout32.reshist, Inf)
lhist = length(gout.reshist)
gerr = norm(gout.sol - pdata.xe, Inf)
g32err = norm(gout32.sol - pdata.xe, Inf)
histpass = dhist < 3.e-7
histpass || println("restart history wrong size = ", dhist)
errpass = (gerr < 1.e-10) && (g32err < 1.e-10)
errpass || println("restart error too large")
lenpass = (lhist == 6)
lenpass || println("restart history wrong length")
return histpass && errpass && lenpass
end
function atv(x, A)
return A * x
end
function integop(u, pdata)
K = pdata.K
# f = pdata.f
return u - K * u
end
function integopinit(n)
h = 1 / n
X = collect(0.5*h:h:1.0-0.5*h)
K = [ker(x, y) for x in X, y in X]
# K = zeros(n, n)
# for j = 1:n
# for i = 1:n
# K[i, j] = ker(x[i], x[j])
# end
# end
K .*= h
# sol = exp.(x) .* log.(2.0 * x .+ 1.0)
# sol = usol.(X)
sol = [usol(x) for x in X]
f = sol - K * sol
pdata = (K = K, xe = sol, f = f)
return pdata
end
function usol(x)
return exp.(x) .* log.(2.0 * x .+ 1.0)
end
function ker(x, y)
ker = 0.1 * sin(x + exp(y))
end
"""
orth_test()
Used for CI to make sure the orthogonalizers do what I expect.
"""
function orth_test()
A = collect(0.01:0.01:0.25)
A = reshape(A, 5, 5)
A = I - A
B = Float32.(A)
C = Float16.(A)
pass64 = qr_test(A, 4.e-16)
pass32 = qr_test(B, 2.e-7)
pass16 = qr_test(C, 2.e-3)
return pass64 && pass32 && pass16
end
function qr_test(A = rand(3, 3), tol = 1.e-13)
OM = ("mgs1", "mgs2", "cgs1", "cgs2")
T = eltype(A)
passqr = true
for orth in OM
C = copy(A)
(Q, R) = qrctk!(C, orth)
fres = norm(Q * R - A, Inf) / norm(A, Inf)
ores = norm(Q' * Q - I, Inf)
npass = fres + ores
#println(eltype(Q)," ",eltype(R)," ",typeof(npass)," ",
# orth, " ", npass)
pass = (npass < tol)
pass || println(
"qr_test fails with precision = ",
T,
", method = ",
orth,
"error = ",
npass,
)
passqr = passqr && pass
end
passqr
end
function qrctk!(A, orth = "cgs2")
T = typeof(A[1, 1])
(m, n) = size(A)
R = zeros(T, n, n)
@views R[1, 1] = norm(A[:, 1])
@views A[:, 1] /= R[1, 1]
@views for k = 2:n
hv = vec(R[1:k, k])
Qkm = view(A, :, 1:k-1)
vv = vec(A[:, k])
Orthogonalize!(Qkm, hv, vv, orth)
end
return (Q = A, R = R)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 844 | #
# Get into the MGS reorthogonalization loop and see if it
# does its job.
#
function mgs_test(cond = 1.e6)
(A, x0, b) = data_cook(cond)
V = zeros(3, 20)
gout = kl_gmres(x0, b, matvec, V, 1.e-9; orth = "mgs1", pdata = A)
gout2 = kl_gmres(x0, b, matvec, V, 1.e-9; orth = "mgs2", pdata = A)
del = gout.reshist - gout2.reshist
mgs2ok = (norm(del, Inf) > 1.e-12) && gout.idid && gout2.idid
mgs2ok || println("mgs_test fails")
return mgs2ok
#return(gout, gout2, mgs2ok)
end
function matvec(x, A)
return A * x
end
function data_cook(cond)
u1 = [1, -2, 0] / sqrt(5.0)
u2 = [0, 0, 1]
u3 = [2, 1, 0] / sqrt(5.0)
U = [u1 u2 u3]
V = [u3 u1 u2]
D = diagm([1, cond, sqrt(cond)])
A = U * D * V'
xstar = ones(3)
b = A * xstar
x0 = [10.0, 10.0, 10.0]
return (A, x0, b)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1719 | """
nk_heq()
CI for nsoli and H-equation.
"""
function nk_heq()
n = 32
u0 = zeros(n)
FS = zeros(n)
FPS = zeros(n, 20)
FPJ = zeros(n, n)
FPV = zeros(n)
c = 0.999
atol = 1.e-9
rtol = 1.e-9
hdata = heqinit(u0, c)
dout =
nsol(heqf!, u0, FS, FPJ, heqJ!; rtol = rtol, atol = atol, pdata = hdata, sham = 1)
kout = nsoli(
heqf!,
u0,
FS,
FPS;
pdata = hdata,
rtol = rtol,
atol = atol,
lmaxit = -1,
eta = 0.1,
fixedeta = false,
)
kout2 = nsoli(
heqf!,
u0,
FS,
FPS;
pdata = hdata,
rtol = rtol,
atol = atol,
lmaxit = 2,
eta = 0.01,
)
kout3 = nsoli(
heqf!,
u0,
FS,
FPV;
pdata = hdata,
rtol = rtol,
atol = atol,
lmaxit = 40,
eta = 0.1,
fixedeta = true,
lsolver = "bicgstab",
)
ksol = kout.solution
dsol = dout.solution
ksol2 = kout2.solution
ksol3 = kout3.solution
soltest = norm(ksol - dsol, Inf) + norm(ksol - ksol2, Inf) + norm(ksol3 - dsol, Inf)
solpass = (soltest < 1.e-7)
solpass || println("solpass fails")
kfpass = (sum(kout2.stats.ikfail) == 9)
kfpass || println("kfpass fails")
histdiff = (dout.history - kout.history[1:8]) ./ dout.history[1]
histpass = (norm(histdiff, Inf) < 1.e-2)
histpass || println("histpass fails")
histdiffb = (kout.history - kout3.history) ./ kout.history[1]
histpassb = (norm(histdiffb, Inf) < 1.e-2)
histpassb || println("histpassb fails")
nkhpass = solpass && kfpass && histpass && histpassb
return nkhpass
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1876 | """
nk_pde(n)
Solve the Elliptic PDE using nsoli.jl on an n x n grid.
"""
function nk_pde(n = 15)
# Get some room for the residual
rtol = 1.e-7
atol = 1.e-10
u0 = zeros(n * n)
FV = copy(u0)
FVS = copy(u0)
# Get the precomputed data from pdeinit
pdata = pdeinit(n)
# Storage for the Jacobian-vector products
JV = zeros(n * n, 100)
# Call the solver with a finite-difference Jac-Vec
hout = nsoli(
pdeF!,
u0,
FV,
JV;
rtol = rtol,
atol = atol,
pdata = pdata,
eta = 0.1,
fixedeta = false,
maxit = 20,
)
houtb = nsoli(
pdeF!,
u0,
FV,
FVS;
rtol = rtol,
atol = atol,
pdata = pdata,
eta = 0.1,
fixedeta = false,
maxit = 20,
lmaxit = 20,
lsolver = "bicgstab",
)
# Call the solver a few times with an analytic Jac-Vec
hout2 = NsoliPDE(n; fixedeta = false)
hout3 = NsoliPDE(n; fixedeta = true)
hout4 = NsoliPDE(n; fixedeta = false, lsolver = "bicgstab")
soldiff = (
norm(hout3.solution - hout.solution, Inf) +
norm(hout3.solution - hout.solution, Inf) +
norm(houtb.solution - hout.solution, Inf) +
norm(hout4.solution - hout.solution, Inf)
)
solpass = (soldiff < 1.e-6)
solpass || println("solution compare fails in nk_pde, ", soldiff)
histdiffv = (hout.history - hout2.history) ./ hout.history[1]
histdiff = norm(histdiffv, Inf)
histpass = (histdiff < 0.1)
histpass || println("history compare fails in nk_pde, ", histdiff)
cost1 = sum(hout.stats.ijac)
cost2 = sum(hout2.stats.ijac)
cost3 = sum(hout3.stats.ijac)
costpass = (cost1 > 80) && (cost2 > 30) && (cost3 > cost2)
costpass || println(cost1, " ", cost2, " ", cost3)
return costpass
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 5033 | """
nk_test()
CI for nsoli
Testing Eisenstat-Walker and functions witout precomputed data
"""
function nk_test()
passsimple = nksimple()
passsimple || println("nksimple fails")
jvpass = jacvec2d()
jvpass || println("jacvec2d fails")
nkpass = passsimple && jvpass
return nkpass
end
"""
nksimple()
Test nsoli with the simple 2D problem and line search failure and success.
"""
function nksimple()
x0 = [2.0; 0.5]
FPS = zeros(2, 3)
FPJ = zeros(2, 2)
FS = copy(x0)
#
# For the easy problem we will do analytic Jacobians for
# Newton and forward difference directional derivatives for Newton-GMRES
#
dout = nsol(simple!, x0, FS, FPJ, jsimple!; sham = 1, keepsolhist = true)
koutx = nsoli(simple!, x0, FS, FPS; eta = 1.e-10, keepsolhist = true, fixedeta = false)
dsolhist = norm(koutx.solhist - dout.solhist, Inf)
shpass = (dsolhist < 1.e-7)
shpass || println("solhist compare fails in easy nksimple", dsolhist)
vconverge = krstest(dout, koutx, "nksimple")
#
# For the stagnating problem we will do analytic Jacobians for
# Newton and analytic Jacobian-vector products for Newton-GMRES
# This is also a test of the internal function nkl_init
#
KData = nkl_init(2, "gmres")
x0 = [3.0; 5.0]
dout = nsol(simple!, x0, FS, FPJ, jsimple!; sham = 1)
kout = nsoli(simple!, x0, FS, FPS, JVsimple; fixedeta = true, eta = 1.e-10)
kout2 = nsoli(
simple!,
x0,
FS,
FPS,
JVsimple;
fixedeta = true,
Krylov_Data = KData,
eta = 1.e-10,
)
KD_ok = krstest(kout2, kout, "KDtest")
KD_ok || println("Krylov_Data test fails")
vdiverge = krstest(dout, kout, "hard nksimple problem")
vdiverge || println("failure hard nksimple problem")
#
# Now
#
return vconverge && vdiverge && shpass && KD_ok
end
function krstest(dout, kout, tname)
hdiff = norm(kout.history - dout.history, Inf)
hpass = (hdiff < 5.e-7)
hpass || println("history compare fails in $tname")
#
adiff = kout.stats.iarm - dout.stats.iarm
apass = (sum(adiff) == 0)
apass || println("line search compare fails in $tname")
#
fdiff = kout.stats.ifun - dout.stats.ifun
fpass = (sum(fdiff) == 0)
fpass || println("function value compare fails in $tname")
#
soldiff = kout.solution - dout.solution
solpass = (norm(soldiff, Inf) < 1.e-9)
solpass || println("solution compare fails in $tname", norm(soldiff, Inf))
krpass = (fpass && apass && hpass && solpass)
end
"""
jacvec2d()
Analytic Jacobian-vector product. Compare Eisenstat-Walker to
fixed eta. Test precomputed data support.
"""
function jacvec2d()
x0 = ones(2)
fv = zeros(2)
jv = zeros(2, 2)
jvs = zeros(2, 3)
pdata = zeros(2)
nout = nsol(f!, x0, fv, jv; sham = 1, pdata = pdata)
kout =
nsoli(f!, x0, fv, jvs, JVec; fixedeta = false, eta = 0.9, lmaxit = 2, pdata = pdata)
kout2 = nsoli(
fv2!,
x0,
fv,
jvs,
JVecv2;
fixedeta = true,
eta = 0.1,
lmaxit = 2,
Pvec = PVecv2,
)
histdiff = norm(nout.history - kout2.history)
histpass = (histdiff < 1.e-5)
histpass || println("hist test fails in jacvec2d")
ncost = funcost(nout)
nplot = acost(nout)
kcost = funcost(kout)
kplot = acost(kout)
kcost2 = funcost(kout2)
kplot2 = acost(kout2)
costpass = (ncost == 10) && (kcost == 15) && (kcost2 == 14)
costpass || println("cost compare fails in jacvec2d")
costpass || println(ncost, " ", kcost, " ", kcost2)
soldiff = (
norm(kout.solution - nout.solution, Inf) +
norm(kout2.solution - nout.solution, Inf)
)
solpass = (soldiff < 1.e-7)
solpass || println("solution compare fails in jacvec2d")
jvpass = histpass && costpass && solpass
return jvpass
end
function f!(fv, x, pdata)
fv[1] = x[1] + sin(x[2])
fv[2] = cos(x[1] + x[2])
return fv
end
"""
fv2!(fv, x)
Function evaluation witout precomputed data for testing.
"""
function fv2!(fv, x)
fv[1] = x[1] + sin(x[2])
fv[2] = cos(x[1] + x[2])
return fv
end
"""
PVecv2(v, x)
Here's a preconditioner that does not need procomputed data and
does not do anything.
"""
function PVecv2(v, x)
return v
end
"""
JVecv2(v, fv, x)
Precondition without precomputed/stored data
"""
function JVecv2(v, fv, x)
jvec = zeros(2)
p = -sin(x[1] + x[2])
jvec[1] = v[1] + cos(x[2]) * v[2]
jvec[2] = p * (v[1] + v[2])
return jvec
end
"""
JVec(v, fv, x, pdata)
Precondition with precomputed/stored data
"""
function JVec(v, fv, x, pdata)
jvec = zeros(2)
p = -sin(x[1] + x[2])
pdata[1] = v[1] + cos(x[2]) * v[2]
pdata[2] = p * (v[1] + v[2])
return pdata
end
function funcost(itout)
netcost = itout.stats.ifun + itout.stats.ijac
cost = sum(netcost)
end
function acost(itout)
netcost = itout.stats.ifun + itout.stats.ijac
cost = cumsum(netcost)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 769 | function ptcKrylovTest(n = 63)
delta0 = 0.01
pout1 = ptciBeam()
pout2 = ptciBeam(n, delta0, false)
pout3 = ptciBeam(n, delta0, false, "left")
sol1 = pout1.solution
sol2 = pout2.solution
sol3 = pout3.solution
#
# sol3 is the wrong stable branch. Left preconditioning bites you!
#
solpass1a = (norm(sol1 - sol2, Inf) < 1.e-9)
solpass1b = (norm(sol1 + sol3, Inf) < 1.e-9)
solpass1 = solpass1a && solpass1b
solpass1 || println("solpass1 fails for ptcsoli")
histpass = (length(pout1.history) == 25)
histpass || println("histpass fails for ptcsoli")
solpass2 = (abs(norm(sol1, Inf) - 2.191) < 1.e-3)
solpass2 || println("solpass2 fails for ptcsoli")
ptcipass = solpass1 && histpass && solpass2
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 863 | function ptcKrylovTestB(n = 63)
delta0 = 0.01
pout1 = ptciBeam(; lsolver = "bicgstab")
pout2 = ptciBeam(n, delta0, false; lsolver = "bicgstab")
pout3 = ptciBeam(n, delta0, false, "left"; lsolver = "bicgstab")
sol1 = pout1.solution
sol2 = pout2.solution
sol3 = pout3.solution
#
# sol3 is the wrong stable branch. Left preconditioning bites you!
#
solpass1a = (norm(sol1 - sol2, Inf) < 1.e-9)
solpass1b = (norm(sol1 - sol3, Inf) < 1.e-9)
solpass1 = solpass1a && solpass1b
solpass1 || println("solpass1 fails for ptcsoli-bicgstab")
histpass = (length(pout1.history) == 25)
histpass || println("histpass fails for ptcsoli-bicgstab")
solpass2 = (abs(norm(sol1, Inf) - 2.191) < 1.e-3)
solpass2 || println("solpass2 fails for ptcsoli-bicgstab")
ptcipass = solpass1 && histpass && solpass2
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 596 | function ptcKrylovTestC(n = 63)
maxit=100; delta0 = 0.01; lambda = 20.0;
pout1 = ptciBeam()
bdata = beaminit(n, 0.0, lambda);
x = bdata.x; u0 = x .* (1.0 .- x) .* (2.0 .- x);
u0 .*= exp.(-10.0 * u0); FS = copy(u0); FPJV=zeros(n,20);
pout = ptcsoli( FBeam!, u0, FS, FPJV;
delta0 = delta0, pdata = bdata, eta = 1.e-2,
rtol = 1.e-10, maxit = maxit, Pvec = PreCondBeam);
delsol=norm(pout.solution-pout1.solution,Inf)
hpass=(length(pout.history) == 25)
solpass=(delsol < 1.e-9)
ptciok = hpass && solpass
end
function PreCondBeam(v, x, bdata)
J = bdata.D2
ptv = J\v
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2605 | """
alex_test()
Test for duplication of Table
"""
function alex_test()
(historye, condhiste, alphanorme) = vtst()
u0 = ones(2)
maxit = 20
maxm = 2
vdim = 3 * maxm + 3
Vstore = zeros(2, vdim)
VstoreS = zeros(2, 2 * maxm + 4)
m = 2
aout = aasol(alexfp!, u0, m, Vstore; rtol = 1.e-10)
alexerr = (
reldiff(aout.history, historye) +
reldiff(aout.stats.condhist, condhiste) +
reldiff(aout.stats.alphanorm, alphanorme)
)
aoutS = aasol(alexfp!, u0, m, VstoreS; rtol = 1.e-10)
alexerrS = (
reldiff(aoutS.history, aout.history) +
reldiff(aoutS.stats.condhist, aout.stats.condhist) +
reldiff(aoutS.stats.alphanorm, aout.stats.alphanorm)
)
#
# Something funny about these tests with 1.7.0 and MKL.
#
alexok2 = (alexerrS < 1.e-15)
lenh = length(aout.history)
solerr = reldiff(aout.history[1:lenh-2], historye[1:lenh-2])
# put this back to reldiff and solerr < 1.e-5 once 1.7 is fixed
# solerr = norm(aout.history-historye)
solok = (solerr < 1.e-5)
solok || println("alex solution error", " ", solerr)
conderr = reldiff(aout.stats.condhist[1:lenh-2], condhiste[1:lenh-2])
# Something's broken with 1.7 in windoze/linux
# put this back to reldiff and conderr < 1.e-5 once 1.7 is fixed
condok = (conderr < 1.e-1)
condok || println("alex condition error", " ", conderr)
aerr = reldiff(aout.stats.alphanorm[1:lenh-2], alphanorme[1:lenh-2])
# put this back to aerr < 1.e-5 once 1.7 is fixed
aok = (aerr < 1.e-2)
aok || println("alex coefficient error $aerr")
aout.idid || println("idid is wrong for m=2")
alexok2 = alexok2 && solok && condok && aok
aout = aasol(alexfp!, u0, 0, Vstore; rtol = 1.e-10)
aout.idid && println("idid is wrong for m=0")
alexok0 = ~aout.idid && (aout.errcode == 10)
alexok = alexok2 && alexok0
return alexok
end
function vtst()
historye = [
6.50111e-01
4.48661e-01
2.61480e-02
7.25389e-02
1.53107e-04
1.18512e-05
1.82476e-08
1.04804e-13
]
condhiste = [
1.00000e+00
2.01556e+10
1.37776e+09
3.61344e+10
2.54947e+11
3.67672e+10
]
alphanorme = [
1.00000e+00
4.61720e+00
2.15749e+00
1.18377e+00
1.00000e+00
1.00171e+00
]
return (historye, condhiste, alphanorme)
end
function alexfp!(G, u)
G[1] = cos(0.5 * (u[1] + u[2]))
G[2] = G[1] + 1.e-8 * sin(u[1] * u[1])
return G
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1043 | """
ci_pde_aa()
Duplicate part of the data for Figure 4.2 in the book.
"""
function ci_pde_aa()
n = 63
m = 10
pdata = pdeinit(n)
Vstore = zeros(n * n, 3 * m + 3)
VstoreS = zeros(n * n, 2 * m + 4)
aout = PDE_aa(n, m; Vstore = Vstore, pdata = pdata)
aoutS = PDE_aa(n, m; Vstore = VstoreS, pdata = pdata)
# Same results with low storage mode?
alphaS = reldiff(aout.stats.alphanorm, aoutS.stats.alphanorm)
condS = reldiff(aout.stats.condhist, aoutS.stats.condhist)
histS = norm(aoutS.history - aout.history, Inf)
pdeerrS = condS + alphaS + histS
aout.idid || println("pde solver failed")
(aout.errcode == 0) || println("wrong error code in pde")
(pdeerrS < 1.e-8) || println("different stats ", condS, " ", alphaS, " ", histS)
(length(aout.history) == 21) || println("history length wrong")
aa_ok =
aout.idid &&
(aout.errcode == 0) &&
(length(aout.history) == 21) &&
(pdeerrS < 1.e-8)
aa_ok && println("pde succeeds")
return aa_ok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2078 | """
heq_aa()
Duplicate the part of Table 4 in Toth-Kelley for l^2 optimization
with c=.99.
Compare solution with one from nsoli.
"""
function heq_aa()
c = 0.99
tol = 1.e-8
fcount = [11, 10, 10, 11, 12, 12]
anormref = [4.0, 5.4, 5.4, 5.4, 5.4, 5.4]
condref = [1.0, 2.e2, 1.9e5, 1.9e7, 5.5e9, 6.5e10]
n = 500
u0 = ones(n)
hdata = heqinit(u0, c)
FS = zeros(n)
FPS = zeros(n, 20)
rtol = tol
atol = tol
maxit = 100
mmax = 6
Vstore = zeros(n, 3 * mmax + 2)
itrecords = zeros(6, 4)
houtn = nsoli(
heqf!,
u0,
FS,
FPS;
pdata = hdata,
rtol = rtol,
atol = atol,
lmaxit = 10,
eta = 0.01,
)
#
# Solve H-equation with Anderson(m) for several values of m
#
for m = 1:6
houta = aasol(
HeqFix!,
u0,
m,
Vstore;
maxit = maxit,
pdata = hdata,
rtol = rtol,
atol = atol,
)
#
# Keep the books.
#
itrecords[m, 1] = hresults(houta.solution, houtn.solution)
itrecords[m, 2] = length(houta.history)
itrecords[m, 3] = norm(houta.stats.condhist, Inf)
itrecords[m, 4] = norm(houta.stats.alphanorm, Inf)
end
#
# Grade the results. I only use the coefficent norm and the condition
# numbers for my own research.
#
solok = (norm(itrecords[:, 1], Inf) < 1.e-7)
solok || println("Solution error in Anderson H solve")
histok = (norm(itrecords[:, 2] - fcount, Inf) < 1.e-5)
histok || println("History error in Anderson H solve")
condok = (hresults(itrecords[:, 3], condref) < 1.e-1)
condok || println("Condition error in Anderson H solve")
normok = (hresults(itrecords[:, 4], anormref) < 1.e-1)
normok || println("Coefficient norm error in Anderson H solve")
#return (itrecords = itrecords)
return solok && histok && condok && normok
end
function hresults(x, y)
vdiff = (x - y) ./ abs.(x)
return norm(vdiff, Inf)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2980 | """
linear_aa()
Test aasol.jl for a two-dimensional linear problem
"""
function linear_aa()
maxit = 10
maxm = 2
vdim = 3 * maxm + 3
Vstore = zeros(2, vdim)
vdimS = 2 * maxm + 4
VstoreS = zeros(2, vdimS)
eigs = [0.1, 0.5]
xstar = ones(2)
pdata = makeLinpdata(eigs)
m = 0
#
# Test for termination on entry
#
x0 = [1.0, 1.0]
m = 0
aout = aasol(
GLin!,
x0,
m,
Vstore;
rtol = 1.e-10,
pdata = pdata,
maxit = maxit,
keepsolhist = true,
)
# Same results with low storage method?
aoutS = aasol(
GLin!,
x0,
m,
VstoreS;
rtol = 1.e-10,
pdata = pdata,
maxit = maxit,
keepsolhist = true,
)
linerrS = (
norm(aoutS.history - aout.history, Inf) +
norm(aoutS.stats.condhist - aout.stats.condhist, Inf) +
norm(aoutS.stats.alphanorm - aout.stats.alphanorm, Inf)
)
tflag = (aout.errcode === -1) && aout.idid && (linerrS < 1.e-15)
tflag || println("Failure in aasol terminate on entry test.")
#
# Test for failure to converge
#
x0 = [2.0, 10.0]
m = 0
maxit = 10
aout = aasol(
GLin!,
x0,
m,
Vstore;
rtol = 1.e-10,
pdata = pdata,
maxit = maxit,
keepsolhist = true,
)
failflag = ~aout.idid && (aout.errcode == 10)
failflag || println("Linear iteration failure in aasol test fails.")
#
# Test for convergence is two iterations.
#
m = 2
aout = aasol(
GLin!,
x0,
m,
Vstore;
rtol = 1.e-10,
pdata = pdata,
maxit = maxit,
keepsolhist = true,
)
termflag = (length(aout.history) == 4) && (norm(aout.solution - xstar, Inf) < 1.e-14)
termflag || println("Terminate in two aa iterations test fails.")
#
# Now set the eigenvalues to [2.0, 10.0] and beta=-1/9
#
eigs = [2.0, 10.0]
pdata = makeLinpdata(eigs)
beta = -1.0 / 9.0
maxit = 10
m = 1
aout1 = aasol(GLin!, x0, m, Vstore; rtol = 1.e-10, pdata = pdata, maxit = maxit)
aout2 = aasol(
GLin!,
x0,
m,
Vstore;
rtol = 1.e-10,
pdata = pdata,
maxit = maxit,
beta = beta,
)
bflag = ~aout1.idid && aout2.idid && (length(aout2.history) == 8)
return tflag && failflag && termflag && bflag
end
function GLin!(gout, xin, pdata)
M = pdata.M
b = pdata.b
gout = M * xin + b
return gout
end
function GLinBeta!(gout, xin, pdata)
M = pdata.M
b = pdata.b
beta = pdata.beta
gout = Glin!(gout, xin, pdata)
gout .*= beta
gout .+= (1.0 - beta) * xin
return gout
end
function makeLinpdata(eigs, beta = 1.0)
U = [1 -1; 1 1] ./ sqrt(2.0)
V = [3 -4; 4 3] ./ 5.0
S = diagm(eigs)
M = U * S * V'
b = (I - M) * ones(2)
return (M = M, b = b, beta = beta)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 78 | function reldiff(x, y)
p = (x - y) ./ abs.(x)
return norm(p, Inf)
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1010 | function continue_test()
v1ok = test_v1()
v2ok = test_PAC()
continueok = v1ok && v2ok
end
function test_v1()
n = 100
version = "orig"
(pval, nval, x, lambda) = heq_continue(n; version = version)
dpath = path_test.(pval, nval)
del1 = dpath[1:end-1]
del2 = dpath[end]
v1_pass = (norm(del1, Inf) < 4.e-9) && (del2 < 1.5e-4)
return v1_pass
end
function test_PAC()
n = 100
version = "pac"
(pval, nval, x, lambda) = heq_continue(n; version = version)
dpath = path_test.(pval, nval)
nsingular = argmax(dpath)
del2 = dpath[nsingular]
del1 = [dpath[1:nsingular-1]; dpath[nsingular+1:end]]
v2_pass = (norm(del1, Inf) < 1.e-5) && (del2 < 1.e-4)
return v2_pass
end
function path_test(pval, nval)
if pval > 0
rp = (1.0 + sqrt.(1.0 .- pval)) / (0.5 .* pval)
rm = (1.0 - sqrt.(1.0 .- pval)) / (0.5 .* pval)
else
rm = 1.0
end
(nval .<= 2) ? dp = abs.(nval - rm) : dp = abs.(nval - rp)
return dp
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 3871 | #
# Test results and performance for the conductive-radiative heat
# transfer problems. We compare results against column 2 of tables
# 2 and 3 in
#
# author="C. E. Siewert and J. R. Thomas",
# title="A Computational Method for Solving a Class of Coupled
# Conductive-Radiative Heat Transfer Problems",
# journal="J. Quant. Spectrosc. Radiat. Transfer",
# year=1991,
# volume=45,
# pages="273--281"
#
#
function heat_test()
P1ok = heat_test_examples(2, 1.0, 0.0)
P2ok = heat_test_examples(2, 1.0, 0.5)
return P1ok && P2ok
end
#
function heat_test_examples(p = 2, thetal = 1.0, thetar = 0.0)
nx = (10^p) + 1
dout = 10^(p - 1)
na = 40
# thetal = 1.0
# thetar = 0.5
aa_it_len = [7, 7, 7, 7, 7]
(thetar == 0.0) || (aa_it_len = [10, 8, 7, 8, 8])
omega = 0.9
tau = 1.0
Nc = 0.05
hn_data = heat_init(nx, na, thetal, thetar, omega, tau, Nc)
theta0 = hn_data.bcfix
mmax = 10
Vstore = zeros(nx, 3 * mmax + 3)
tol = 1.e-10
#
# Anderson acceleration test
#
aout = aasol(
heat_fixed!,
theta0,
0,
Vstore;
maxit = 40,
rtol = tol,
atol = tol,
pdata = hn_data,
)
thetabase = aout.solution
test_out = thetabase[1:dout:nx]
bench_heat = ces_heat(thetar)
del_heat = norm(test_out - bench_heat, Inf)
heatokaa = (del_heat < 1.e-4)
heatokaa || println("Wrong results for xferheat: error = $del_heat")
for m = 1:5
aout =
aasol(heat_fixed!, theta0, m, Vstore; rtol = tol, atol = tol, pdata = hn_data)
delsol = norm(aout.solution - thetabase, Inf)
lhist = length(aout.history)
heatmok = (delsol < 1.e-6) && (lhist == aa_it_len[m])
heatmok || println("xferheat: aa fails for m=$m and thetar=$thetar")
heatmok || println("lhist for AA($m) = $lhist")
heatokaa = heatokaa && heatmok
#println("m=$m. solution difference = $delsol. Iterations = $lhist")
end
chist = aout.stats.condhist
ahist = aout.stats.alphanorm
heatokaa || println("aa test for heat fails")
#
# Newton-GMRES
#
FS = copy(theta0)
gout = nsoli(
FCR_heat!,
theta0,
FS,
Vstore;
pdata = hn_data,
rtol = tol,
atol = tol,
dx = 1.e-5,
eta = 0.1,
fixedeta = false,
lsolver = "gmres",
)
ndiffg = norm(gout.solution - aout.solution, Inf)
lghist = length(gout.history)
heatnkgok = (ndiffg < 1.e-10) && (lghist == 4)
heatnkgok || println("xferheat: gmres fails for thetar=$thetar")
#
# Newton-BiCGSTAB
#
bout = nsoli(
FCR_heat!,
theta0,
FS,
Vstore;
pdata = hn_data,
rtol = tol,
atol = tol,
dx = 1.e-5,
eta = 0.1,
fixedeta = false,
lsolver = "bicgstab",
)
ndiffb = norm(bout.solution - aout.solution, Inf)
lbhist = length(bout.history)
heatnkbok = (ndiffb < 1.e-10) && (lbhist == 4)
heatnkbok || println("xferheat: bicgstab fails for thetar=$thetar")
#
return heatokaa && heatnkgok && heatnkbok
end
function ces_heat(thetar)
if thetar == 0.0
bench_heat = [
1.00000e+00,
9.18027e-01,
8.36956e-01,
7.53557e-01,
6.65558e-01,
5.71475e-01,
4.70505e-01,
3.62437e-01,
2.47544e-01,
1.26449e-01,
0.00000e+00,
]
else
bench_heat = [
1.00000e+00,
9.54270e-01,
9.11008e-01,
8.68433e-01,
8.25127e-01,
7.79940e-01,
7.31936e-01,
6.80375e-01,
6.24709e-01,
5.64610e-01,
5.00000e-01,
]
end
return bench_heat
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 1981 | #
# Test results and performance for the conductive-radiative heat
# transfer problems.
#
# This test makes sure the traps and error codes for failure
# do what I want.
#
function heat_test2()
P1ok = heat_test2_examples()
return P1ok
end
#
function heat_test2_examples(p = 2, thetal = 1.0, thetar = 2.0, omega = 0.5, tau = 4.0)
nx = (10^p) + 1
na = 40
Nc = 0.05
hn_data = heat_init(nx, na, thetal, thetar, omega, tau, Nc)
theta0 = hn_data.bcfix
mmax = 50
Vstore = zeros(nx, 3 * mmax + 3)
tol = 1.e-10
errcodes = [-2, 0, 10]
errtarget = [1.e4, 1.e-8, 1.e-5]
Pok = true
#
# Newton-GMRES to obtain a converged result
#
FS = copy(theta0)
gout = nsoli(
FCR_heat!,
theta0,
FS,
Vstore;
pdata = hn_data,
rtol = tol,
atol = tol,
dx = 1.e-5,
eta = 0.1,
fixedeta = false,
lsolver = "gmres",
)
thetabase = gout.solution
gmhistok = (length(gout.history) == 7)
gmjacok = (sum(gout.stats.ijac) == 28)
gmconvok = (gout.errcode == 0)
gmresok = gmhistok && gmjacok && gmconvok
gmresok || println("nsoli fails in heat_test2")
Pok = Pok && gmresok
#
# Anderson acceleration test
#
iec = 1
for m in [5, 10, 20]
aout = aasol(
heat_fixed!,
theta0,
m,
Vstore;
rtol = tol,
atol = tol,
pdata = hn_data,
maxit = 50,
)
delsol = norm(aout.solution - thetabase, Inf)
errc = aout.errcode
ecodeok = (aout.errcode == errcodes[iec])
ecodeok || println("ecode test fails, heat_test2, m=$m")
Pok = Pok && ecodeok
delok = (delsol < errtarget[iec])
delok || println("sol err test fails, heat_test2, m=$m")
Pok = Pok && delok
iec += 1
println("For m=$m: error=$delsol, errcode = $errc")
end
return Pok
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | code | 2473 | #
# Test the transport solve with s=infty against the data
# from Tables 1 and 2 of
#
# author="R.D.M. Garcia and C.E. Siewert",
# title = "Radiative transfer in finite inhomogeneous plane-parallel
# atmospheres",
# journal="J. Quant. Spectrosc. Radiat. Transfer",
# year = 1982,
# volume=27,
# pages="141--148"
#
function transport_test()
nx = 2^8
na2 = 40
s = Inf
vleft = 1.0
vright = 0.0
sn_data = sn_init(nx, na2, x -> exp(-x / s), 5.0, vleft, vright)
source = zeros(nx)
#
tol = 1.e-5
kout = find_flux(source, sn_data, tol)
#
(sn_left, sn_right) = sn_tabulate(s, nx, kout.sol, source)
(out_left, out_right) = ces_data()
diff = norm(out_left - sn_left, Inf) + norm(out_right - sn_right, Inf)
kynum = length(kout.reshist)
transok = (diff < 1.e-4) && (kynum <= 13)
transok || println("Transport test fails: dataerr = $diff; itcount = $kynum")
return transok
end
function find_flux(source, sn_data, tol)
b = getrhs(source, sn_data)
kout = kl_gmres(sn_data.phi0, b, AxB, sn_data.V, tol; pdata = sn_data)
return kout
end
function ces_data()
out_left = [
8.97797e-01,
8.87836e-01,
8.69581e-01,
8.52299e-01,
8.35503e-01,
8.18996e-01,
8.02676e-01,
7.86493e-01,
7.70429e-01,
7.54496e-01,
7.38721e-01,
]
out_right = [
1.02202e-01,
1.12164e-01,
1.30419e-01,
1.47701e-01,
1.64497e-01,
1.81004e-01,
1.97324e-01,
2.13507e-01,
2.29571e-01,
2.45504e-01,
2.61279e-01,
]
return (out_left, out_right)
end
"""
sn_tabulate(s, nx, flux, psi_left, psi_right, source)
Make the tables to compare with Garcia/Siewert
Uses the converged flux from the solve.
"""
function sn_tabulate(s, nx, flux, source)
angleout = [-0.05; collect(-0.1:-0.1:-1.0); 0.05; collect(0.1:0.1:1.0)]
#
# I don't really need the weights, but sn_init expects some
weights = angleout
#
na2 = length(angleout)
na = floor(Int, na2 / 2)
vleft = 1.0
vright = 0.0
np = nx
tsn_data = sn_init(nx, na2, x -> exp(-x / s), 5.0, vleft, vright; siewert = true)
psi_right = tsn_data.psi_right
psi_left = tsn_data.psi_left
psi = tsn_data.psi
psi = transport_sweep!(psi, flux, psi_left, psi_right, source, tsn_data)
return (left = psi[1:na, 1], right = psi[na+1:na2, np])
end
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
|
[
"MIT"
] | 1.0.2 | 1c7ffc244c458bb52e2b311dd6e0902b2b13fc14 | docs | 15313 | | **Documentation** | **Build Status** | **DOI** |
|:-------------------------------------------------------------------------------:|:-----------------------------------------------------------------------------------------------: | :----------------------------------------------------------------------------------------------- |
| [![][docs-stable-img]][docs-stable-url] [![][docs-dev-img]][docs-dev-url] | [![][build-status-img]][build-status-url] [![][codecov-img]][codecov-url] | [](https://zenodo.org/badge/latestdoi/256312455) |
[](https://pkgs.genieframework.com?packages=SIAMFANLEquations)
# SIAMFANLEquations
## The archival version 1.0 from the date of publication is in the [FA20 branch](https://github.com/ctkelley/SIAMFANLEquations.jl/tree/FA20).
## The current version is 1.0.2.
- This version eliminates most implicit imports. Look at [this](/Users/ctk/tex/Active_Papers/MPArray/MultiPrecisionArrays.jl) for the story on implicit imports.
- I fixed a bug that, amazingly, it took 1.10.2 to find. I've put something in CI to make sure it's really fixed.
- I've updated/corrected a few docstrings in the solvers.
## This is the Julia package for my shiny new orange book
<img width = 400, src="https://user-images.githubusercontent.com/10243067/184647769-d9d51ee9-79f0-48ba-96a4-b9ed2a66cdfa.jpg">
# [Solving Nonlinear Equations with Iterative Methods: <br> Solvers and Examples in Julia](https://my.siam.org/Store/Product/viewproduct/?ProductId=44313635)
# NEW: the print book is now a [SIAM e-book](https://epubs.siam.org/action/showPublications?pubType=book¬ConceptID=115968&startPage=&ContribAuthorFirstLetter=k)
This means that if your organization is subscribes to the SIAM E-Book
series, you can download the pdf for free. Ask your librarian about this.
## [C. T. Kelley](https://ctk.math.ncsu.edu)
The book is finished and this project is __DONE__. So I take the sacred book author oath ...
- I will only make updates to the package and notebooks to fix bugs or typos.
- I will not be adding new functionality to this package or new material to the notebooks.
- I will make no changes to the user interface for the codes in the package.
This is a sequel to my book
(Kel03) C. T. Kelley, [***Solving Nonlinear Equations with Iterative Methods:***](https://my.siam.org/Store/Product/viewproduct/?ProductId=841) , Fundamentals of Algorithms 1, SIAM, Philadelphia, 2003.
Hence the notebook and this package all have SIAMFANL in their names.
The new book has a different algorithm mix and the solvers and examples are in Juila. The project has three parts.
1. A print book: (Kel22) C. T. Kelley, [***Solving Nonlinear Equations with Newton's Method: Solvers and Examples in Julia***](https://my.siam.org/Store/Product/viewproduct/?ProductId=44313635), Fundamentals of Algorithms 20, SIAM, Philadelphia, 2022.
__NEW: the print book is now a [SIAM e-book](https://epubs.siam.org/action/showPublications?pubType=book¬ConceptID=115968&startPage=&ContribAuthorFirstLetter=k)__ This means that if your organization is subscribes to the SIAM E-Book series, you can download the pdf for free. Ask your librarian about this.
3. [A suite of IJulia notebooks](https://github.com/ctkelley/NotebookSIAMFANL) (open source, MIT License, Creative Commons License)
The latest releases of the notebook suite and package run correctly. The notebooks and package from the master branches also run correctly
together. Bug fixes prior to 1.0 may, with an absurdly low probablilty, break things in older releases.
3. This package (MIT License)<br>
Content changes from (Kel03):
- New solvers: __pseudo-transient continuation__ and __Anderson acceleration__
- Deletions: __Broyden's method__
- Quasi-Newton methods are not used much for nonlinear equations any more. Newton-Krylov has taken over.
- New Case Studies chapter
## Readme Contents:
- [Mission](#package-mission)
- [Installation](#installation)
- [Reporting bugs: __Please__ No Pull Requests](#pull-requests)
- [Core References and Documentation](#core-references-and-documentation)
- [Algorithms and Solvers](#algorithms-and-solvers)
- [About the test problems](#test-problems-and-the-notebook)
- [How to cite this stuff](#citations)
- [Book FAQs](#faqs)
- [Funding](#funding)
## Package Mission
This package is designed and built to support a book project. So the solvers and examples reinforce the algorithmic discussion in the book. General purpose packages have a different mission.
## Installation:
- Your best bet is to __use the latest version of Julia__ (currently 1.10.0) with the notebooks and the package.
- If you must use old stuff, use LTS 1.6.7 and up with this thing!!!
- Please do not use any non-LTS version earlier than 1.8. The notebook kernel is now 1.10.0.
Type this
```
] add SIAMFANLEquations
```
or this
```
import Pkg; Pkg.add("SIAMFANLEquations")
```
in the REPL to install the package.
Then, as usual
```
using SIAMFANLequations
```
enables you to use the codes. You'll need
```
using SIAMFANLEquations.TestProblems
```
to run the test problems. Then there are the examples you get with
```
using SIAMFANLEquations.Examples
```
for the unit tests, the examples in the book, and the notebook.
## Pull Requests
My favorite thing about book projects is that they are not open-ended. They get finished. For example, take
[this book](https://my.siam.org/Store/Product/viewproduct/?ProductId=44313635) ... please.
__Even after publication, I like bug reports; I need bug reports__, but ...
__Please, please__, do not send me PRs. If you find
1. a bug (programming or performance) in the codes,
2. errors and/or typos in the notebooks/docstrings/readme
3. confusion, lack of clarity, or __errors in the installation instructions__,
1. I would __really like__ some Windows users to try this stuff, especially the notebooks.
4. something I could do in the codes to help you do your work ...
1. that won't break other stuff, which includes the connection between the book and the package,
2. or eat up lots of time,
Please ...
- tell me the old fashioned way with email to [email protected]
- or open an issue.
This is a book project and I need to put all changes in by hand so I'll have muscle memory about what's going on. If there is a second printing I can fix things in the print/pdf books and will fix things in real time (more or less) in the codes and notebooks.
I have limited bandwidth, __so please do not send me email or open issues about__ ...
1. Julia programming style, with the exception of correctness and performance. I know this is not fully idiomatic Julia. I got somewhat better as the project progressed. As I said in the introduction, I have traded a lot of abstraction for clarity. That means clairity for the novice.
1. I am also an old guy and the final product will reflect the Fortran __66__ I was raised on. That's show biz.
1. Fortran + Julia = __Foolia__
3. Questions like "Why isn't Trotsky's method in here?" If you object to an algorithmic choice, you'll have to be content to know that I thought about the algorithm mix pretty carefully, had a clear vision for this project, and understand this field fairly well.
4. Questions like "Why doesn't SIAMFANLEquations.jl look/work/smell like and/or use DasKapital.jl?" The reasons are that
1. I am neither Karl nor Groucho,
2. this project has a different mission, and
3. __I worked hard to limit depencencies__.
5. Philosophy, politics, opinions, invitations to debates, ...
6. Organization of the repo, names of functions, API, or anything else that is now __frozen for the book__.
## Core References and Documentation
The best documentation for this package lives in the [notebook](https://github.com/ctkelley/NotebookSIAMFANL) and the print book. They have detailed algorithmic descriptions, examples for you to play with, and guidance on tweaking the algorithmic paramenters to solve your problems. The notebook was built in parallel with the print book and the content is __roughly__ the same. The differences are mostly to accommodate the two formats. For example, docstrings need some work after the map from notebook to print and the notebook has to make sense as an interactive resource.
I've also used [Documenter.jl](https://github.com/JuliaDocs/Documenter.jl) with this package. Click the badge
[](https://ctkelley.github.io/SIAMFANLEquations.jl/stable)
to get the documentation from the latest release. The documenter files have the headers for the solvers and some of the test problems. I continue to work on the docs and they will get better, but will never be as good as the notebook.
This book will not cover theory in detail (ie no proofs). My two books on nonlinear equations
(Kel95) C. T. Kelley, [***Iterative Methods for Linear and Nonlinear Equations***](https://my.siam.org/Store/Product/viewproduct/?ProductId=862) , Frontiers in Applied Mathematics 16, SIAM, Philadelphia, 1995
and
(Kel03) C. T. Kelley, [***Solving Nonlinear Equations with Newton's Method***](https://my.siam.org/Store/Product/viewproduct/?ProductId=841) , Fundamentals of Algorithms 1, SIAM, Philadelphia, 2003
describe the classic Newton and Newton-Krylov algorithms. Kel95 has the theory. This project is a sequel to Kel03. Kel03 is Matlab-centric
and will remain in print.
A recent Acta Numerica paper has everything
(Kel18) C. T. Kelley, ***Numerical Methods for Nonlinear Equations***, Acta Numerica 27 (2018), pp 207--287. https://doi.org/10.1017/S0962492917000113
The references I use for theory of pseudo-transient continuation and Anderson acceleration are
(KK98) C. T. Kelley and D. E. Keyes, ***Convergence Analysis of Pseudo-Transient Continuation***, SIAM Journal on Numerical Analysis 35 (1998), pp 508-523. https://doi.org/10.1137/S0036142996304796
(TK15) A. Toth and C. T. Kelley, ***Convergence Analysis for Anderson Acceleration***, SIAM Journal on Numerical Analysis 53, (2015), pp 805-819. https://doi.org/10.1137/130919398
## Algorithms and Solvers
The solvers are designed to be stand-alone codes. The reason for this is the education mission of the project. I want the codes to be as easy to understand as possible. I have deliberately sacrificed a lot of abstraction and some performance in this effort. The reward for the reader (ie you) is that the algorithmic parameters are completely exposed so you can play with them. Someday, not soon, I may write a wrapper for all this that hides the parameters as a separate package. However, the stand-alone, keyword-infested codes are what you need if you want to really understand how these methods work. My students became experts in this field by fiddling with the Matlab version of these solvers.
The linear solvers are tuned to communicate well with nonlinear solvers. My old Matlab codes are a good illustration of this idea. My [new Mablab codes](https://ctk.math.ncsu.edu/knl.html) were designed in response to the need to do this better than I had been. In particular, the linear solver and the matrix-vector/preconditioner-vector product function need information on the nonlinear iteration and any precomputed data. While I could use global variables (and did in Kel95) and put these things in a module to simplify the interface, I won't do that anymore. Global variables make debugging harder and break parallelism. I like to avoid them.
The algorithms, listed by book chapter are
- Chapter 1: Newton-Armijo and Pseudo-transient continuation for scalar equations: __nsolsc.jl__ and __ptcsolsc.jl__
- Chapter 2: Newton-Armijo and Pseudo-transient continuation for systems with direct linear solvers: __nsol.jl__ and __ptcsol.jl__
- Chapter 3: Newton-Armijo and Pseudo-transient continuation for systems with iterative linear solvers: __nsoli.jl__ and __ptcsoli.jl__
- Chapter 4: Anderson acceleration: __aasol.jl__
- Chapter 5: Case studies: __Conductive-Radiative heat transfer__ and __Continuation for H-equation.__
## Test Problems and the notebook
You'll need the TestProblems and Examples submodules to run the notebook. To get those type
```using SIAMFANLEquations.TestProblems```
and
```using SIAMFANLEquations.Examples```
in the REPL or run the first code cell in the notebook
```include("fanote_init.jl")```
There are two kinds of test problems. The ones you care about are the ones that I use in the print book and notebook to demonstrate the algorithms. The "inside baseball" problems are the ones I __only__ use for CI. They only appear in the /test directory. If you don't know or care about what CI is, be happy.
## Citations
Cite the package, print book and notebook like this.
```
@misc{ctk:siamfanl,
title="{SIAMFANLEquations.jl}",
author="C. T. Kelley",
year=2022,
note="Julia Package",
doi="10.5281/zenodo.4284807",
url="https://github.com/ctkelley/SIAMFANLEquations.jl"
}
@book{ctk:fajulia,
author="C. T. Kelley",
title="{Solving Nonlinear Equations with Iterative Methods:
Solvers and Examples in Julia}",
year=2022,
publisher="SIAM",
address="Philadelphia",
series="Fundamentals of Algorithms",
number=20
}
@misc{ctk:notebooknl,
title="{Notebook for Solving Nonlinear Equations with Iterative Methods:
Solvers and Examples in Julia}",
author="C. T. Kelley",
year=2022,
note="IJulia Notebook",
url="https://github.com/ctkelley/NotebookSIAMFANL",
doi="10.5281/zenodo.4284687"
}
```
## FAQs
1. What kind of book is this?
- It's an orange book.
2. What is this book about?
- It's about 200 pages.
3. Have you written any other amazing books?
- [Yes.](https://ctk.math.ncsu.edu/lv/books.html)
## Funding
This project was partially supported by
1. National Science Foundation Grants
1. OAC-1740309
2. DMS-1745654
3. DMS-1906446
2. Department of Energy grant DE-NA003967
3. Army Research Office grant W911NF-16-1-0504
Any opinions, findings, and conclusions or
recommendations expressed in this material are those of the author and
do not necessarily reflect the views of the National
Science Foundation, the Department of Energy,
or the Army Research Office.
[docs-dev-img]: https://img.shields.io/badge/docs-dev-blue.svg
[docs-dev-url]: https://ctkelley.github.io/SIAMFANLEquations.jl/dev
[docs-stable-img]: https://img.shields.io/badge/docs-stable-blue.svg
[docs-stable-url]: https://ctkelley.github.io/SIAMFANLEquations.jl/stable
[build-status-img]: https://github.com/ctkelley/SIAMFANLEquations.jl/workflows/CI/badge.svg
[build-status-url]: https://github.com/ctkelley/SIAMFANLEquations.jl/actions
[codecov-img]: https://codecov.io/gh/ctkelley/SIAMFANLEquations.jl/branch/master/graph/badge.svg
[codecov-url]: https://codecov.io/gh/ctkelley/SIAMFANLEquations.jl
| SIAMFANLEquations | https://github.com/ctkelley/SIAMFANLEquations.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.