licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 4340 | # Customize these variables to allow other names
LONGITUDE_NAMES = ["long", "lon", "longitude"]
LATITUDE_NAMES = ["lat", "latitude"]
TIME_NAMES = ["t", "time"]
DATE_NAMES = ["date"]
ALTITUDE_NAMES = ["z", "z_reference", "z_physical"]
export times,
dates,
longitudes,
latitudes,
altitudes,
time_name,
date_name,
longitude_name,
latitude_name,
altitude_name,
has_time,
has_date,
has_longitude,
has_latitude,
has_altitude,
conventional_dim_name
"""
_dim_name(dim_names, allowed_names)
Return the `dim_name` within `dim_names` that is contained in `allowed_names`.
Return `nothing` is not available.
Example
==========
```jldoctest
julia> ClimaAnalysis.Var._dim_name(["z", "lat", "lon"], ["lon", "long"])
1
```
"""
function _dim_name(dim_names, allowed_names)
return findfirst(possible_name -> possible_name in dim_names, allowed_names)
end
"""
has_time(var::OutputVar)
Return whether `var` has a `time` dimesnion.
"""
has_time(var::OutputVar) = !isnothing(_dim_name(keys(var.dims), TIME_NAMES))
"""
has_date(var::OutputVar)
Return whether `var` has a `date` dimesnion.
"""
has_date(var::OutputVar) = !isnothing(_dim_name(keys(var.dims), DATE_NAMES))
"""
has_longitude(var::OutputVar)
Return whether `var` has a `longitude` dimesnion.
"""
has_longitude(var::OutputVar) =
!isnothing(_dim_name(keys(var.dims), LONGITUDE_NAMES))
"""
has_latitude(var::OutputVar)
Return whether `var` has a `latitude` dimesnion.
"""
has_latitude(var::OutputVar) =
!isnothing(_dim_name(keys(var.dims), LATITUDE_NAMES))
"""
has_altitude(var::OutputVar)
Return whether `var` has a `altitude` dimesnion.
"""
has_altitude(var::OutputVar) =
!isnothing(_dim_name(keys(var.dims), ALTITUDE_NAMES))
"""
find_dim_name(dim_names::Iterable, allowed_names::Iterable)
Find the first occurrence of a name in `allowed_names` in `dim_names`.
Example
==========
```jldoctest
julia> ClimaAnalysis.Var.find_dim_name(["z", "lat", "lon"], ["lon", "long"])
"lon"
```
"""
function find_dim_name(dim_names, allowed_names)
dim_name = _dim_name(dim_names, allowed_names)
isnothing(dim_name) &&
error("var does not have $(first(allowed_names)) among its dimensions")
return allowed_names[dim_name]
end
"""
time_name(var::OutputVar)
Return the name of the `time` dimension in `var`.
"""
time_name(var::OutputVar) = find_dim_name(keys(var.dims), TIME_NAMES)
"""
times(var::OutputVar)
Return the `time` dimension in `var`.
"""
times(var::OutputVar) = var.dims[time_name(var)]
"""
date_name(var::OutputVar)
Return the name of the `date` dimension in `var`.
"""
date_name(var::OutputVar) = find_dim_name(keys(var.dims), DATE_NAMES)
"""
dates(var::OutputVar)
Return the `date` dimension in `var`.
"""
dates(var::OutputVar) = var.dims[date_name(var)]
"""
longitude_name(var::OutputVar)
Return the name of the `longitude` dimension in `var`.
"""
longitude_name(var::OutputVar) = find_dim_name(keys(var.dims), LONGITUDE_NAMES)
"""
longitudes(var::OutputVar)
Return the `longitude` dimension in `var`.
"""
longitudes(var::OutputVar) = var.dims[longitude_name(var)]
"""
latitude_name(var::OutputVar)
Return the name of the `latitude` dimension in `var`.
"""
latitude_name(var::OutputVar) = find_dim_name(keys(var.dims), LATITUDE_NAMES)
"""
latitudes(var::OutputVar)
Return the `latitude` dimension in `var`.
"""
latitudes(var::OutputVar) = var.dims[latitude_name(var)]
"""
altitude_name(var::OutputVar)
Return the name of the `altitude` dimension in `var`.
"""
altitude_name(var::OutputVar) = find_dim_name(keys(var.dims), ALTITUDE_NAMES)
"""
altitudes(var::OutputVar)
Return the `altitude` dimension in `var`.
"""
altitudes(var::OutputVar) = var.dims[altitude_name(var)]
"""
conventional_dim_name(dim_name::AbstractString)
Return the type of dimension as a string from longitude, latitude, time, date, or altitude
if possible or `dim_name` as a string otherwise.
"""
function conventional_dim_name(dim_name::AbstractString)
dim_name in LONGITUDE_NAMES && return "longitude"
dim_name in LATITUDE_NAMES && return "latitude"
dim_name in TIME_NAMES && return "time"
dim_name in DATE_NAMES && return "date"
dim_name in ALTITUDE_NAMES && return "altitude"
return dim_name
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 748 | using Test
using ClimaAnalysis
using Aqua
import ExplicitImports
@testset "Aqua tests" begin
Aqua.test_undefined_exports(ClimaAnalysis)
Aqua.test_stale_deps(ClimaAnalysis)
Aqua.test_deps_compat(ClimaAnalysis)
Aqua.detect_ambiguities(ClimaAnalysis; recursive = true)
Aqua.test_piracies(ClimaAnalysis)
end
@testset "Explicit Imports" begin
@test isnothing(ExplicitImports.check_no_implicit_imports(ClimaAnalysis))
@test isnothing(
ExplicitImports.check_no_stale_explicit_imports(ClimaAnalysis),
)
@test isnothing(
ExplicitImports.check_all_qualified_accesses_via_owners(ClimaAnalysis),
)
@test isnothing(
ExplicitImports.check_no_self_qualified_accesses(ClimaAnalysis),
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 357 | using Documenter
import ClimaAnalysis
@testset "Test docstrings" begin
DocMeta.setdocmeta!(
ClimaAnalysis,
:DocTestSetup,
:(using ClimaAnalysis;
using ClimaAnalysis.Utils;
using ClimaAnalysis.Var;
using ClimaAnalysis.Sim);
recursive = true,
)
doctest(ClimaAnalysis; manual = false)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 202 | using Test
import JuliaFormatter
import ClimaAnalysis
@testset "Formatting" begin
@test JuliaFormatter.format(
ClimaAnalysis;
verbose = false,
overwrite = false,
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 781 | using SafeTestsets
using Test
#! format: off
@safetestset "Aqua" begin @time include("aqua.jl") end
@safetestset "Docstrings" begin @time include("doctest.jl") end
@safetestset "Format" begin @time include("format.jl") end
@safetestset "Utils" begin @time include("test_Utils.jl") end
@safetestset "Numerics" begin @time include("test_Numerics.jl") end
@safetestset "SimDir" begin @time include("test_Sim.jl") end
@safetestset "Atmos" begin @time include("test_Atmos.jl") end
@safetestset "Leaderboard" begin @time include("test_Leaderboard.jl") end
@safetestset "OutputVar" begin @time include("test_Var.jl") end
@safetestset "MakieExt" begin @time include("test_MakieExt.jl") end
@safetestset "GeoMakieExt" begin @time include("test_GeoMakieExt.jl") end
#! format: on
nothing
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 4552 | using Test
import ClimaAnalysis
import OrderedCollections: OrderedDict
@testset "To pressure coordinates" begin
# Let start with testing a single column
z_alt = 0:100.0 |> collect
data = copy(z_alt)
zvar = ClimaAnalysis.OutputVar(Dict("z" => z_alt), data)
# Fake pressure, linearly decreasing, so that we can check precisely
pressure = 300.0:-2.0:100.0 |> collect
pdata = copy(pressure)
attribs = Dict("short_name" => "pfull")
dim_attribs = Dict{String, Any}()
pressure_var =
ClimaAnalysis.OutputVar(attribs, Dict("z" => z_alt), dim_attribs, pdata)
pressure_in_pressure_coordinates =
ClimaAnalysis.Atmos.to_pressure_coordinates(pressure_var, pressure_var)
@test collect(keys(pressure_in_pressure_coordinates.dims)) == ["pfull"]
# reverse because we go from min to max for pressure (to have it increasing
# for Interpolations.jl)
@test pressure_in_pressure_coordinates.dims["pfull"] == reverse(pdata)
@test pressure_in_pressure_coordinates.data == reverse(pdata)
# Fake var, again linear. When everything is linear we should obtain the
# input variable back
var_func(z) = 500 + 10 * z
myvardata = var_func.(z_alt)
mydata = copy(myvardata)
attribs = Dict("short_name" => "myvar")
dim_attribs = Dict{String, Any}()
myvar = ClimaAnalysis.OutputVar(
attribs,
Dict("z" => z_alt),
dim_attribs,
mydata,
)
myvar_in_pressure_coordinates =
ClimaAnalysis.Atmos.to_pressure_coordinates(myvar, pressure_var)
@test collect(keys(myvar_in_pressure_coordinates.dims)) == ["pfull"]
@test myvar_in_pressure_coordinates.dims["pfull"] == reverse(pdata)
@test myvar_in_pressure_coordinates.data == reverse(mydata)
exp_pressure = exp.(-0.01 .* z_alt |> collect)
exp_pdata = copy(exp_pressure)
attribs = Dict("short_name" => "pfull")
dim_attribs = Dict{String, Any}()
exp_pressure_var = ClimaAnalysis.OutputVar(
attribs,
Dict("z" => z_alt),
dim_attribs,
exp_pdata,
)
myvar_in_exp_pressure_coordinates =
ClimaAnalysis.Atmos.to_pressure_coordinates(myvar, exp_pressure_var)
# Linear range from min to max
expected_range = collect(
range(
minimum(exp_pressure),
maximum(exp_pressure),
length = length(exp_pdata),
),
)
@test myvar_in_exp_pressure_coordinates.dims["pfull"] == expected_range
# Specify pressure levels
@test ClimaAnalysis.Atmos.to_pressure_coordinates(
myvar,
exp_pressure_var,
target_pressure = [1.0],
).data[1] == mydata[1]
# From the pressure range we can compute the corresponding altitudes
corresponding_z = -100 .* log.(expected_range)
@test myvar_in_exp_pressure_coordinates.data β var_func.(corresponding_z) rtol =
1e-5
# 3D test
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
zzz = 0.0:10.0 |> collect
var3D_func(lg, lt, zz) = 2lg + 3lt + 4zz
data = [var3D_func(lg, lt, zz) for lg in long, lt in lat, zz in zzz]
dims = OrderedDict(["lon" => long, "lat" => lat, "z" => zzz])
dim_attribs = Dict{String, Any}()
var3D = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
altitude = [zz for lg in long, lt in lat, zz in zzz]
pdata3D = exp.(-0.01 .* altitude)
attribs = Dict("short_name" => "pfull")
dim_attribs = Dict{String, Any}()
pressure3D = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, pdata3D)
my3Dvar_in_exp_pressure_coordinates =
ClimaAnalysis.Atmos.to_pressure_coordinates(var3D, pressure3D)
# Linear range from min to max
overall_range =
collect(range(minimum(pdata3D), maximum(pdata3D), length = length(zzz)))
@test my3Dvar_in_exp_pressure_coordinates.dims["pfull"] == overall_range
# From the pressure range we can compute the corresponding altitudes
expected_output = [
var3D_func.(lg, lt, -100 .* log(p)) for lg in long, lt in lat,
p in overall_range
]
@test my3Dvar_in_exp_pressure_coordinates.data β expected_output rtol = 1e-5
# Error checking
@test_throws ErrorException ClimaAnalysis.Atmos.to_pressure_coordinates(
var3D,
pressure_var,
)
# This will fail because there is no recognized altitude dimension
Zvar = ClimaAnalysis.OutputVar(Dict("Z" => z_alt), z_alt)
@test_throws ErrorException ClimaAnalysis.Atmos.to_pressure_coordinates(
Zvar,
Zvar,
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 3978 | using Test
import ClimaAnalysis
import Makie
import CairoMakie
import GeoMakie
using OrderedCollections
@testset "MakieExt" begin
tmp_dir = mktempdir(cleanup = false)
@info "Tempdir", tmp_dir
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
data2D = reshape(1.0:(91 * 181), (181, 91))
dims2D = OrderedDict(["lon" => long, "lat" => lat])
attribs = Dict([
"long_name" => "The quick brown fox jumps over the lazy dog. The quick brown fox.",
"short_name" => "name",
"units" => "bob",
])
path = "a/b/c"
dim_attributes2D = OrderedDict([
"lon" => Dict(["units" => "degrees"]),
"lat" => Dict(["units" => "degrees"]),
])
var2D = ClimaAnalysis.OutputVar(attribs, dims2D, dim_attributes2D, data2D)
fig = Makie.Figure()
ClimaAnalysis.Visualize.heatmap2D_on_globe!(fig, var2D)
output_name = joinpath(tmp_dir, "test2D_globe.png")
Makie.save(output_name, fig)
fig2 = Makie.Figure()
ClimaAnalysis.Visualize.contour2D_on_globe!(
fig2,
var2D,
more_kwargs = Dict(
:plot => ClimaAnalysis.Utils.kwargs(colormap = :vik),
),
)
output_name = joinpath(tmp_dir, "test_contours2D_globe.png")
Makie.save(output_name, fig2)
# Test cmap
test_cmap = ClimaAnalysis.Visualize._constrained_cmap(
Makie.colorschemes[:vik],
0.0,
15000.0 + (5000.0 / 3.0),
mid = 5000.0,
categorical = true,
)
fig3 = Makie.Figure()
ClimaAnalysis.Visualize.contour2D_on_globe!(
fig3,
var2D,
more_kwargs = Dict(
:plot => ClimaAnalysis.Utils.kwargs(colormap = test_cmap),
),
)
output_name = joinpath(tmp_dir, "test_contours2D_globe_with_test_cmap.png")
Makie.save(output_name, fig3)
test_cmap = ClimaAnalysis.Visualize._constrained_cmap(
range(Makie.colorant"red", stop = Makie.colorant"green", length = 15),
0.0,
15000.0 + (5000.0 / 3.0),
)
fig4 = Makie.Figure()
ClimaAnalysis.Visualize.contour2D_on_globe!(
fig4,
var2D,
more_kwargs = Dict(
:plot => ClimaAnalysis.Utils.kwargs(colormap = test_cmap),
),
)
output_name = joinpath(tmp_dir, "test_contours2D_globe_with_test_cmap2.png")
Makie.save(output_name, fig4)
# Test with oceanmask
fig5 = Makie.Figure()
ClimaAnalysis.Visualize.heatmap2D_on_globe!(
fig5,
var2D,
mask = ClimaAnalysis.Visualize.oceanmask(),
more_kwargs = Dict(:mask => ClimaAnalysis.Utils.kwargs(color = :blue)),
)
output_name = joinpath(tmp_dir, "test_contours2D_globe_with_oceanmask.png")
Makie.save(output_name, fig5)
# Test plot_bias
fig6 = Makie.Figure()
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = collect(reshape(-32400:32399, (360, 180))) ./ (32399.0 / 5.0)
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
data_zero = zeros(length(lon), length(lat))
var_zero = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_zero)
ClimaAnalysis.Visualize.plot_bias_on_globe!(fig6, var, var_zero)
output_name = joinpath(tmp_dir, "plot_bias.png")
Makie.save(output_name, fig6)
# Test plot bias with keyword arguments
fig7 = Makie.Figure()
ClimaAnalysis.Visualize.plot_bias_on_globe!(
fig7,
var,
var_zero,
more_kwargs = Dict(
:axis => Dict(:title => "no title"),
:plot => Dict(:extendhigh => nothing),
),
)
output_name = joinpath(tmp_dir, "plot_bias_kwargs.png")
Makie.save(output_name, fig7)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 13234 | using Test
import ClimaAnalysis
@testset "Constructors and helper functions" begin
# Testing constructor using short name and model names
rmse_var = ClimaAnalysis.RMSEVariable("ta", ["model1", "model2"])
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2"]
@test ClimaAnalysis.category_names(rmse_var) ==
["ANN", "DJF", "MAM", "JJA", "SON"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "", "model2" => "")
@test rmse_var.short_name == "ta"
@test isnan(rmse_var.RMSEs[1, 1])
# Testing constructor using short name, model names, and provided units
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
Dict("model1" => "units1", "model2" => "units2"),
)
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2"]
@test ClimaAnalysis.category_names(rmse_var) ==
["ANN", "DJF", "MAM", "JJA", "SON"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "units1", "model2" => "units2")
@test rmse_var.short_name == "ta"
@test isnan(rmse_var.RMSEs[1, 1])
# Testing constructor using short name, model names, category_names, and provided units
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
["cat1", "cat2"],
Dict("model1" => "units1", "model2" => "units2"),
)
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2"]
@test ClimaAnalysis.category_names(rmse_var) == ["cat1", "cat2"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "units1", "model2" => "units2")
@test rmse_var.short_name == "ta"
@test isnan(rmse_var.RMSEs[1, 1])
# Testing constructor using short name, model names, category names, RMSEs, and units
rmse_var = ClimaAnalysis.RMSEVariable(
"short_name",
["model1", "model2", "model3"],
["cat1", "cat2"],
ones(3, 2),
Dict("model1" => "units"),
)
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2", "model3"]
@test ClimaAnalysis.category_names(rmse_var) == ["cat1", "cat2"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "units", "model2" => "", "model3" => "")
@test rmse_var.short_name == "short_name"
@test rmse_var.RMSEs == ones(3, 2)
# Testing constructors for passing units as a string
rmse_var =
ClimaAnalysis.RMSEVariable("ta", ["model1", "model2"], "same_unit")
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2"]
@test ClimaAnalysis.category_names(rmse_var) ==
["ANN", "DJF", "MAM", "JJA", "SON"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "same_unit", "model2" => "same_unit")
@test rmse_var.short_name == "ta"
@test isnan(rmse_var.RMSEs[1, 1])
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
["cat1", "cat2"],
"same_unit",
)
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2"]
@test ClimaAnalysis.category_names(rmse_var) == ["cat1", "cat2"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "same_unit", "model2" => "same_unit")
@test rmse_var.short_name == "ta"
@test isnan(rmse_var.RMSEs[1, 1])
rmse_var = ClimaAnalysis.RMSEVariable(
"short_name",
["model1", "model2", "model3"],
["cat1", "cat2"],
ones(3, 2),
"units",
)
@test ClimaAnalysis.model_names(rmse_var) == ["model1", "model2", "model3"]
@test ClimaAnalysis.category_names(rmse_var) == ["cat1", "cat2"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("model1" => "units", "model2" => "units", "model3" => "units")
@test rmse_var.short_name == "short_name"
@test rmse_var.RMSEs == ones(3, 2)
# Error handling
# Duplicate model names and category names
@test_throws ErrorException ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model1"],
["cat1"],
ones(2, 1),
Dict("model1" => ""),
)
@test_throws ErrorException ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
["cat1", "cat1"],
ones(2, 2),
Dict("model1" => ""),
)
# Negative RMSE
@test_throws ErrorException ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
["cat1", "cat1"],
-1.0 .* ones(2, 2),
Dict("model1" => ""),
)
# Shape of RMSE array
@test_throws ErrorException ClimaAnalysis.RMSEVariable(
"ta",
["model1", "model2"],
["cat1", "cat1"],
ones(1, 2),
Dict("model1" => ""),
)
end
@testset "Reading RMSEs from CSV file" begin
# Testing constructor using CSV file
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
@test ClimaAnalysis.model_names(rmse_var) == ["ACCESS-CM2", "ACCESS-ESM1-5"]
@test ClimaAnalysis.category_names(rmse_var) ==
["DJF", "MAM", "JJA", "SON", "ANN"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("ACCESS-CM2" => "", "ACCESS-ESM1-5" => "")
@test rmse_var.short_name == "ta"
@test rmse_var.RMSEs[1, 1] == 11.941
@test isnan(rmse_var.RMSEs[2, 5])
# Testing constructor using CSV file with units provided
rmse_var = ClimaAnalysis.read_rmses(
csv_file_path,
"ta",
units = Dict("ACCESS-ESM1-5" => "m", "wacky" => "weird"),
)
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("ACCESS-CM2" => "", "ACCESS-ESM1-5" => "m")
# Testing constructor using CSV file with units being a string
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta", units = "m")
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("ACCESS-CM2" => "m", "ACCESS-ESM1-5" => "m")
end
@testset "Indexing" begin
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var[1, 1] = 100.0
@test rmse_var.RMSEs[1, 1] == 100.0
rmse_var[1, 1:3] = [100.0, 110.0, 120.0]
@test rmse_var.RMSEs[1, 1:3] == [100.0, 110.0, 120.0]
rmse_var[1:2, 1] = [100.0, 110.0]'
@test rmse_var.RMSEs[1:2, 1] == [100.0, 110.0]
rmse_var["ACCESS-ESM1-5", "DJF"] = 200.0
@test rmse_var["ACCESS-ESM1-5", "DJF"] == 200.0
rmse_var["ACCESS-ESM1-5", ["DJF", "MAM", "ANN"]] = [200.0, 210.0, 220.0]
@test rmse_var["ACCESS-ESM1-5", [1, 2, 5]] == [200.0, 210.0, 220.0]
rmse_var["ACCESS-ESM1-5"] = [120.0, 130.0, 140.0, 150.0, 160.0]
@test rmse_var["ACCESS-ESM1-5"] == [120.0, 130.0, 140.0, 150.0, 160.0]
# Check error handling
@test_throws ErrorException rmse_var[5, 5] = 100.0
@test_throws ErrorException rmse_var["do not exist"] = 100.0
@test_throws ErrorException rmse_var["do not exist", "test"] = 100.0
@test_throws ErrorException rmse_var["ACCESS-ESM1-5", "test"] = 100.0
@test_throws ErrorException rmse_var["model1", "ANN"] = 100.0
@test_throws DimensionMismatch rmse_var["ACCESS-ESM1-5"] =
[120.0, 130.0, 140.0]
@test_throws DimensionMismatch rmse_var[1, :] = [120.0, 130.0, 140.0]
end
@testset "Adding model and category" begin
# Add single model
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "new model")
@test ClimaAnalysis.model_names(rmse_var) ==
["ACCESS-CM2", "ACCESS-ESM1-5", "new model"]
@test ClimaAnalysis.category_names(rmse_var) ==
["DJF", "MAM", "JJA", "SON", "ANN"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("ACCESS-CM2" => "", "ACCESS-ESM1-5" => "", "new model" => "")
@test all(isnan.(rmse_var["new model"]))
# Add single category
rmse_var = ClimaAnalysis.add_category(rmse_var, "new cat")
@test ClimaAnalysis.model_names(rmse_var) ==
["ACCESS-CM2", "ACCESS-ESM1-5", "new model"]
@test ClimaAnalysis.category_names(rmse_var) ==
["DJF", "MAM", "JJA", "SON", "ANN", "new cat"]
@test ClimaAnalysis.rmse_units(rmse_var) ==
Dict("ACCESS-CM2" => "", "ACCESS-ESM1-5" => "", "new model" => "")
@test all(isnan.(rmse_var[:, "new cat"]))
# Add multiple models
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "new model", "new model 2")
@test ClimaAnalysis.model_names(rmse_var) ==
["ACCESS-CM2", "ACCESS-ESM1-5", "new model", "new model 2"]
@test ClimaAnalysis.category_names(rmse_var) ==
["DJF", "MAM", "JJA", "SON", "ANN"]
@test ClimaAnalysis.rmse_units(rmse_var) == Dict(
"ACCESS-CM2" => "",
"ACCESS-ESM1-5" => "",
"new model" => "",
"new model 2" => "",
)
@test all(isnan.(rmse_var["new model"]))
@test all(isnan.(rmse_var["new model 2"]))
# Add multiple categories
rmse_var = ClimaAnalysis.add_category(rmse_var, "new cat", "new cat 2")
@test ClimaAnalysis.model_names(rmse_var) ==
["ACCESS-CM2", "ACCESS-ESM1-5", "new model", "new model 2"]
@test ClimaAnalysis.category_names(rmse_var) ==
["DJF", "MAM", "JJA", "SON", "ANN", "new cat", "new cat 2"]
@test ClimaAnalysis.rmse_units(rmse_var) == Dict(
"ACCESS-CM2" => "",
"ACCESS-ESM1-5" => "",
"new model" => "",
"new model 2" => "",
)
@test all(isnan.(rmse_var[:, "new cat"]))
@test all(isnan.(rmse_var[:, "new cat 2"]))
end
@testset "Removing model" begin
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "new model", "new model 2")
rmse_var[2, 5] = 10.0
rmse_var_delete = ClimaAnalysis.Leaderboard._delete_model(
rmse_var,
"ACCESS-CM2",
"new model",
)
@test rmse_var_delete.short_name == "ta"
@test ClimaAnalysis.model_names(rmse_var_delete) ==
["ACCESS-ESM1-5", "new model 2"]
@test ClimaAnalysis.category_names(rmse_var_delete) ==
ClimaAnalysis.category_names(rmse_var)
@test rmse_var_delete[1, :] == rmse_var[2, :]
@test all(isnan.(rmse_var_delete[2, :]))
# Delete all models
rmse_var_delete = ClimaAnalysis.Leaderboard._delete_model(
rmse_var_delete,
"ACCESS-ESM1-5",
"new model 2",
)
@test rmse_var_delete.short_name == "ta"
@test isempty(ClimaAnalysis.model_names(rmse_var_delete))
@test ClimaAnalysis.category_names(rmse_var_delete) ==
ClimaAnalysis.category_names(rmse_var)
@test rmse_var_delete[:, :] |> size == (0, 5)
end
@testset "Adding units" begin
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
# Adding a single unit
rmse_var = ClimaAnalysis.add_model(rmse_var, "hi")
ClimaAnalysis.add_unit!(rmse_var, "hi", "test")
@test ClimaAnalysis.rmse_units(rmse_var)["hi"] == "test"
# Adding multiple units
rmse_var = ClimaAnalysis.add_model(rmse_var, "hello1", "hello2")
ClimaAnalysis.add_unit!(
rmse_var,
Dict("hello1" => "units1", "hello2" => "units2"),
)
@test ClimaAnalysis.rmse_units(rmse_var)["hello1"] == "units1"
@test ClimaAnalysis.rmse_units(rmse_var)["hello2"] == "units2"
end
@testset "Finding best, worst, and median model" begin
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var[:, :] = [[1.0 2.0 3.0 4.0 5.0]; [6.0 7.0 8.0 9.0 10.0]]
ClimaAnalysis.add_unit!(rmse_var, "ACCESS-CM2", "units")
ClimaAnalysis.add_unit!(rmse_var, "ACCESS-ESM1-5", "units")
val, model_name =
ClimaAnalysis.find_best_single_model(rmse_var, category_name = "ANN")
@test model_name == "ACCESS-CM2"
@test val == [1.0, 2.0, 3.0, 4.0, 5.0]
@test val |> size == (5,)
val, model_name =
ClimaAnalysis.find_worst_single_model(rmse_var, category_name = "ANN")
@test model_name == "ACCESS-ESM1-5"
@test val == [6.0, 7.0, 8.0, 9.0, 10.0]
@test val |> size == (5,)
val = ClimaAnalysis.median(rmse_var)
@test val == [7.0, 9.0, 11.0, 13.0, 15.0] ./ 2.0
@test val |> size == (5,)
# Test with NaN in RMSE array
rmse_var = ClimaAnalysis.add_model(rmse_var, "for adding NaN")
ClimaAnalysis.add_unit!(rmse_var, "for adding NaN", "units")
val, model_name =
ClimaAnalysis.find_best_single_model(rmse_var, category_name = "ANN")
@test model_name == "ACCESS-CM2"
@test val == [1.0, 2.0, 3.0, 4.0, 5.0]
@test val |> size == (5,)
val, model_name =
ClimaAnalysis.find_worst_single_model(rmse_var, category_name = "ANN")
@test model_name == "ACCESS-ESM1-5"
@test val == [6.0, 7.0, 8.0, 9.0, 10.0]
@test val |> size == (5,)
val = ClimaAnalysis.median(rmse_var)
@test val == [7.0, 9.0, 11.0, 13.0, 15.0] ./ 2.0
@test val |> size == (5,)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 12132 | using Test
import ClimaAnalysis
import Makie
import CairoMakie
using OrderedCollections
@testset "MakieExt" begin
tmp_dir = mktempdir(cleanup = false)
@info "Tempdir", tmp_dir
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data3D = reshape(1.0:(91 * 181 * 11), (11, 181, 91))
dims3D = OrderedDict(["time" => time, "lon" => long, "lat" => lat])
attribs = Dict([
"long_name" => "The quick brown fox jumps over the lazy dog. The quick brown fox.",
"short_name" => "name",
"units" => "bob",
])
path = "a/b/c"
dim_attributes3D = OrderedDict([
"time" => Dict(["units" => "s"]),
"lon" => Dict(["units" => "degrees"]),
"lat" => Dict(["units" => "degrees"]),
])
var3D = ClimaAnalysis.OutputVar(attribs, dims3D, dim_attributes3D, data3D)
fig = Makie.Figure()
@test_throws ErrorException ClimaAnalysis.Visualize.heatmap2D!(fig, var3D)
data2D = reshape(1.0:(91 * 181), (181, 91))
dims2D = OrderedDict(["lon" => long, "lat" => lat])
attribs = Dict([
"long_name" => "The quick brown fox jumps over the lazy dog. The quick brown fox.",
"short_name" => "name",
"units" => "bob",
])
path = "a/b/c"
dim_attributes2D = OrderedDict([
"lon" => Dict(["units" => "degrees"]),
"lat" => Dict(["units" => "degrees"]),
])
var2D = ClimaAnalysis.OutputVar(attribs, dims2D, dim_attributes2D, data2D)
ClimaAnalysis.Visualize.heatmap2D!(fig, var2D)
output_name = joinpath(tmp_dir, "test2D.png")
Makie.save(output_name, fig)
# Test with a GridLayout
fig = Makie.Figure()
layout = fig[1, 1] = Makie.GridLayout()
ClimaAnalysis.Visualize.heatmap2D!(layout, var2D)
output_name = joinpath(tmp_dir, "test2D_gd.png")
Makie.save(output_name, fig)
# New figure
fig = Makie.Figure()
@test_throws ErrorException ClimaAnalysis.Visualize.sliced_heatmap!(
fig,
var3D,
)
cut = Dict("time" => 1)
ClimaAnalysis.Visualize.sliced_heatmap!(fig, var3D, cut)
output_name = joinpath(tmp_dir, "test3D_sliced.png")
Makie.save(output_name, fig)
# New figure
fig = Makie.Figure()
ClimaAnalysis.Visualize.heatmap!(fig, var3D; time = 1)
output_name = joinpath(tmp_dir, "test3D_sliced_kwargs.png")
Makie.save(output_name, fig)
@test_throws ErrorException ClimaAnalysis.Visualize.line_plot1D!(fig, var3D)
data1D = reshape(1.0:(91), (91))
dims1D = OrderedDict(["lat" => lat])
attribs = Dict([
"long_name" => "The quick brown fox jumps over the lazy dog. The quick brown fox.",
"short_name" => "name",
"units" => "bob",
])
path = "a/b/c"
dim_attributes1D = OrderedDict(["lat" => Dict(["units" => "degrees"])])
var1D = ClimaAnalysis.OutputVar(attribs, dims1D, dim_attributes1D, data1D)
fig = Makie.Figure()
ClimaAnalysis.Visualize.line_plot1D!(fig, var1D)
output_name = joinpath(tmp_dir, "test1D.png")
Makie.save(output_name, fig)
fig = Makie.Figure()
cut = Dict("lon" => 30)
ClimaAnalysis.Visualize.sliced_line_plot!(fig, var2D, cut)
output_name = joinpath(tmp_dir, "test2D_sliced.png")
Makie.save(output_name, fig)
fig = Makie.Figure()
ClimaAnalysis.Visualize.line_plot!(fig, var2D; lon = 30)
output_name = joinpath(tmp_dir, "test2D_sliced_kwargs.png")
Makie.save(output_name, fig)
# Test plot!
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(fig, var2D; lon = 30)
output_name = joinpath(tmp_dir, "test_plot2D_sliced.png")
Makie.save(output_name, fig)
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(fig, var3D; lon = 30, time = 1)
output_name = joinpath(tmp_dir, "test_plot3D_sliced.png")
Makie.save(output_name, fig)
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(fig, var3D; time = 1)
output_name = joinpath(tmp_dir, "test_plot3D_sliced_once.png")
Makie.save(output_name, fig)
# Test passing more_kwargs
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(
fig,
var3D;
time = 1,
more_kwargs = Dict(:cb => [:vertical => :false]),
)
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(
fig,
var3D;
time = 1,
more_kwargs = Dict(
:plot => ClimaAnalysis.Utils.kwargs(colormap = :inferno),
),
)
output_name = joinpath(tmp_dir, "test_plot3D_sliced_once.png")
Makie.save(output_name, fig)
# Test dim_on_y
fig = Makie.Figure()
ClimaAnalysis.Visualize.plot!(
fig,
var3D;
time = 1,
lon = 30,
more_kwargs = Dict(
:plot => ClimaAnalysis.Utils.kwargs(colormap = :inferno),
:axis => ClimaAnalysis.Utils.kwargs(dim_on_y = true),
),
)
output_name = joinpath(tmp_dir, "test_plot3D_sliced_swapped.png")
Makie.save(output_name, fig)
# Test overriding title, xlabel, and ylabel
fig = Makie.Figure()
ClimaAnalysis.Visualize.heatmap2D!(
fig,
var2D,
more_kwargs = Dict(
:axis => ClimaAnalysis.Utils.kwargs(
title = "My title: The quick brown fox jumps over the lazy dog. The quick brown fox.",
xlabel = "My xlabel",
ylabel = "My ylabel",
),
),
)
output_name = joinpath(tmp_dir, "test2D_title.png")
Makie.save(output_name, fig)
# Plotting box plot
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "CliMA")
rmse_var["CliMA", :] = [12.0, 12.0, 11.0, 14.0, 6.0]
ClimaAnalysis.add_unit!(
rmse_var,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
),
)
rmse_var[2, 5] = 4.0
rmse_var1 = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var1 = ClimaAnalysis.add_model(rmse_var1, "CliMA", "test_model")
rmse_var1["CliMA", :] = [12.0, 12.0, 11.0, 14.0, 6.0]
rmse_var1["test_model", :] = [12.0, 12.0, 11.0, 14.0, 6.0]
ClimaAnalysis.add_unit!(
rmse_var1,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
"test_model" => "units",
),
)
rmse_var1[2, 5] = 4.0
fig = Makie.Figure(; size = (800, 300 * 3 + 400), fontsize = 20)
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_var,
model_names = ["CliMA"],
best_and_worst_category_name = "ANN",
)
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_var,
model_names = ["CliMA"],
ploc = (2, 1),
best_and_worst_category_name = "ANN",
)
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_var1,
model_names = ["CliMA", "ACCESS-ESM1-5"],
ploc = (3, 1),
best_and_worst_category_name = "ANN",
)
output_name = joinpath(tmp_dir, "test_boxplots.png")
Makie.save(output_name, fig)
# Plotting leaderboard
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "CliMA")
rmse_var[:, :] = [
[10.0 11.0 12.0 13.0 14.0]
[36.0 37.0 38.0 39.0 30.0]
[11.0 12.0 13.0 14.0 15.0]
]
ClimaAnalysis.add_unit!(
rmse_var,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
),
)
rmse_var1 = ClimaAnalysis.read_rmses(csv_file_path, "ta1")
rmse_var1 = ClimaAnalysis.add_model(rmse_var1, "CliMA")
rmse_var1[:, :] = [
[6.0 7.0 8.0 9.0 10.0]
[11.0 12.0 13.0 14.0 15.0]
[1.0 2.0 3.0 4.0 5.0]
]
ClimaAnalysis.add_unit!(
rmse_var1,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
),
)
rmse_var2 = ClimaAnalysis.read_rmses(csv_file_path, "ta2")
rmse_var2 = ClimaAnalysis.add_model(rmse_var2, "CliMA")
rmse_var2[:, :] = [
[0.5 1.0 1.5 2.0 2.5]
[6.0 7.0 8.0 9.0 10.0]
[11.0 12.0 13.0 14.0 15.0]
]
ClimaAnalysis.add_unit!(
rmse_var2,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
),
)
# Normalized RMSEs should improve going from ta to ta1 to ta2 for CliMA model
# Colors for ta of both models should be similar (close to 1)
# Colors for ta2 of best model should be greener (improve) from ta1 to ta2
# for CliMA, the normalized RMSEs from greatest to least should be ta1, ta, and ta2
fig = Makie.Figure(; fontsize = 20)
ClimaAnalysis.Visualize.plot_leaderboard!(
fig,
rmse_var,
rmse_var1,
rmse_var2,
best_category_name = "ANN",
)
output_name = joinpath(tmp_dir, "test_leaderboard.png")
Makie.save(output_name, fig)
# Plot box plots and leaderboard in one plot
rmse_vars = (rmse_var, rmse_var1, rmse_var2)
fig = Makie.Figure(; size = (800, 300 * 3 + 400), fontsize = 20)
for i in 1:3
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_vars[i],
ploc = (i, 1),
best_and_worst_category_name = "ANN",
)
end
ClimaAnalysis.Visualize.plot_leaderboard!(
fig,
rmse_vars...,
best_category_name = "ANN",
ploc = (4, 1),
)
output_name = joinpath(tmp_dir, "test_boxplot_and_leaderboard.png")
Makie.save(output_name, fig)
# Plotting box plot with NaN
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(rmse_var, "CliMA")
ClimaAnalysis.add_unit!(
rmse_var,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"CliMA" => "units",
),
)
rmse_var[2, 5] = 10.0
fig = Makie.Figure(; fontsize = 20)
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_var,
model_names = ["CliMA"],
best_and_worst_category_name = "ANN",
)
output_name = joinpath(tmp_dir, "test_boxplot_nan.png")
Makie.save(output_name, fig)
fig = Makie.Figure(; fontsize = 20)
ClimaAnalysis.Visualize.plot_leaderboard!(
fig,
rmse_var,
model_names = ["CliMA"],
best_category_name = "ANN",
)
output_name = joinpath(tmp_dir, "test_leaderboard_nan.png")
Makie.save(output_name, fig)
# Testing with long name
rmse_var = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var = ClimaAnalysis.add_model(
rmse_var,
"long_name_name_name_name_name_name_name",
)
ClimaAnalysis.add_unit!(
rmse_var,
Dict(
"ACCESS-ESM1-5" => "units",
"ACCESS-CM2" => "units",
"long_name_name_name_name_name_name_name" => "units",
),
)
rmse_var[2, 5] = 10.0
fig = Makie.Figure(; fontsize = 20)
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_var,
model_names = ["long_name_name_name_name_name_name_name"],
best_and_worst_category_name = "ANN",
)
output_name = joinpath(tmp_dir, "test_boxplot_long_name.png")
Makie.save(output_name, fig)
# Test error handling for plot_leaderboard
csv_file_path = joinpath(@__DIR__, "sample_data/test_csv.csv")
rmse_var1 = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var1 = ClimaAnalysis.add_category(rmse_var1, "hi")
rmse_var2 = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var2 = ClimaAnalysis.add_category(rmse_var2, "hello")
@test_throws ErrorException ClimaAnalysis.Visualize.plot_leaderboard!(
fig,
rmse_var1,
rmse_var2,
model_names = ["CliMA"],
best_category_name = "ANN",
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 3542 | using Test
import ClimaAnalysis
@testset "integration weights for lon and lat" begin
# Integration weights for lon (not equispaced)
lon = [-180.0, -45.0, 100.0, 180.0]
lon_weights = [135.0, 145.0, 80.0, 0.0] .* (Ο / 180.0)
@test all(
isapprox.(
lon_weights,
ClimaAnalysis.Numerics._integration_weights_lon_left(lon),
),
)
# Integration weights for lat (not equispaced)
lat = [-90.0, 20.0, 45.0, 90.0]
lat_weights = [110.0, 25.0, 45.0, 0.0] .* (Ο / 180.0) .* cosd.(lat)
@test all(
isapprox.(
lat_weights,
ClimaAnalysis.Numerics._integration_weights_lat_left(lat),
),
)
# Integration weights for lon (not equispaced)
lon = collect(range(-180.0, 180.0, 5))
lon_weights = [90.0 for _ in lon] .* (Ο / 180.0)
@test all(
isapprox.(
lon_weights,
ClimaAnalysis.Numerics._integration_weights_lon_equispaced(lon),
),
)
lat = collect(range(-90.0, 90.0, 5))
lat_weights = [45.0 for _ in lat] .* (Ο / 180.0) .* cosd.(lat)
@test all(
isapprox.(
lat_weights,
ClimaAnalysis.Numerics._integration_weights_lat_equispaced(lat),
),
)
end
@testset "Integrating on lon and lat" begin
# Integrating only lon (non equispaced)
lon = collect(range(-180.0, 179.0, 100))
# Force array to be non equispaced for testing _integration_weights_lon
push!(lon, 180.0)
lon_data = ones(length(lon))
@test isapprox(
ClimaAnalysis.Numerics._integrate_lon(lon_data, lon, dims = 1)[1],
2.0Ο,
atol = 0.01,
)
# Integrating only lat (non equispaced)
lat = collect(range(-90.0, 89.0, 100))
# Force array to be non equispaced for testing _integration_weights_lat
push!(lat, 90.0)
lat_data = ones(length(lat))
@test isapprox(
ClimaAnalysis.Numerics._integrate_lat(lat_data, lat, dims = 1)[1],
2.0,
atol = 0.01,
)
# Integrating both lon and lat
data = ones(length(lat), length(lon))
integrated_lat = ClimaAnalysis.Numerics._integrate_lat(data, lat, dims = 1)
integrated_latlon =
ClimaAnalysis.Numerics._integrate_lon(integrated_lat, lon, dims = 1)
integrated_lon = ClimaAnalysis.Numerics._integrate_lon(data, lon, dims = 2)
integrated_lonlat =
ClimaAnalysis.Numerics._integrate_lat(integrated_lon, lat, dims = 1)
# Order of integration should not matter
@test isapprox(integrated_latlon[1], integrated_lonlat[1])
@test isapprox(integrated_latlon[1], 4Ο, atol = 0.01)
# Error checking for length of lon and lat and values in lon and lat
@test_throws "Cannot integrate when latitude is a single point" ClimaAnalysis.Numerics._integrate_lat(
lat_data,
[0.0],
dims = 1,
)
@test_throws "Cannot integrate when latitude is a single point" ClimaAnalysis.Numerics._integrate_lat(
lon_data,
[0.0],
dims = 1,
)
# Integrating only lon (non equispaced)
lon = collect(range(-179.5, 179.5, 360))
lon_data = ones(length(lon))
@test isapprox(
ClimaAnalysis.Numerics._integrate_lon(lon_data, lon, dims = 1)[1],
2.0Ο,
atol = 0.01,
)
# Integrating only lat (non equispaced)
lat = collect(range(-89.5, 89.5, 180))
lat_data = ones(length(lat))
@test isapprox(
ClimaAnalysis.Numerics._integrate_lat(lat_data, lat, dims = 1)[1],
2.0,
atol = 0.01,
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 3648 | using Test
import ClimaAnalysis
@testset "SimDir" begin
simulation_path = joinpath(@__DIR__, "sample_data")
simdir = ClimaAnalysis.SimDir(simulation_path)
@test ClimaAnalysis.available_vars(simdir) ==
Set(["va", "ua", "orog", "ta", "ts", "pfull"])
@test ClimaAnalysis.available_reductions(simdir, short_name = "ta") ==
Set(["average", "max", "min"])
@test_throws ErrorException ClimaAnalysis.available_reductions(
simdir,
short_name = "bob",
)
@test ClimaAnalysis.available_periods(
simdir,
short_name = "ta",
reduction = "max",
) == Set(["3.0h", "4.0h"])
@test_throws ErrorException ClimaAnalysis.available_periods(
simdir,
short_name = "ta",
reduction = "bob",
)
@test simdir.variable_paths["orog"]["inst"][nothing] ==
joinpath(simulation_path, "orog_inst.nc")
@test simdir.variable_paths["ta"]["max"] == Dict{Any, Any}(
"3.0h" => joinpath(simulation_path, "ta_3.0h_max.nc"),
"4.0h" => joinpath(simulation_path, "ta_4.0h_max.nc"),
)
@test simdir.vars["ta"]["max"] ==
Dict{Any, Any}("3.0h" => nothing, "4.0h" => nothing)
expected_files = Set(
joinpath(simulation_path, f) for
f in readdir(simulation_path) if endswith(f, ".nc")
)
@test simdir.allfiles == expected_files
@test !isempty(simdir)
@test isempty(ClimaAnalysis.SimDir(tempdir()))
end
@testset "OutputVar" begin
simulation_path = joinpath(@__DIR__, "sample_data")
simdir = ClimaAnalysis.SimDir(simulation_path)
ta_max = get(simdir, short_name = "ta", reduction = "max", period = "4.0h")
ts_max = get(simdir, short_name = "ts", reduction = "max", period = "1.0h")
@test ClimaAnalysis.units(ta_max) == "K"
@test collect(keys(ta_max.dims)) == Array(["time", "lon", "lat", "z"])
@test ta_max.dim2index ==
Dict(["time" => 1, "lon" => 2, "lat" => 3, "z" => 4])
@test ta_max.index2dim == ["time", "lon", "lat", "z"]
@test ta_max.dim_attributes["lat"]["units"] == "degrees_north"
@test size(ta_max.data) == (3, 180, 80, 10)
@test_throws ErrorException get(simdir, short_name = "bob")
@test_throws ErrorException get(
simdir,
short_name = "ta",
reduction = "max",
period = "10.0h",
)
# Check if reduction = inst and passing a period
@test_throws ErrorException orog =
get(simdir, short_name = "orog", reduction = "inst", period = "4.0h")
orog = get(simdir, short_name = "orog", reduction = "inst")
# Check is_z_1d
pfull = get(simdir, short_name = "pfull", reduction = "inst")
@test ClimaAnalysis.is_z_1D(pfull) == false
# Check the shorter gets
@test_throws ErrorException get(simdir, "ta")
@test_throws ErrorException get(simdir; short_name = "ta")
@test_throws ErrorException get(
simdir;
short_name = "ta",
reduction = "max",
)
@test orog == get(simdir, "orog")
@test orog ==
get(simdir; short_name = "orog", reduction = "inst", period = nothing)
@test ts_max == get(simdir; short_name = "ts", reduction = "max")
# short_name, long_name, units
@test ClimaAnalysis.short_name(orog) == "orog"
@test ClimaAnalysis.long_name(orog) == "Surface Altitude, Instantaneous"
@test ClimaAnalysis.units(orog) == "m"
# The ts_max file bundled with ClimaAnalysis does not have the short_name
# attribute because it was generated before that was a feature. We use that
# to check the empty tring
@test ClimaAnalysis.short_name(ts_max) == ""
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 6018 | using Test
import ClimaAnalysis: Utils
import Dates
@testset "Regexp" begin
@test Utils.match_nc_filename("bob") === nothing
@test Utils.match_nc_filename("ta_1d_average.nc") ==
Tuple(["ta", "1d", "average"])
@test Utils.match_nc_filename("ta_3.0h_average.nc") ==
Tuple(["ta", "3.0h", "average"])
@test Utils.match_nc_filename("toa_net_flux_1m_40s_inst.nc") ==
Tuple(["toa_net_flux", "1m_40s", "inst"])
@test Utils.match_nc_filename("toa_net_flux_1M_inst.nc") ==
Tuple(["toa_net_flux", "1M", "inst"])
@test Utils.match_nc_filename("p500_1M_inst.nc") ==
Tuple(["p500", "1M", "inst"])
@test Utils.match_nc_filename("pfull_6.0m_max.nc") ==
Tuple(["pfull", "6.0m", "max"])
@test Utils.match_nc_filename("hu_inst.nc") ==
Tuple(["hu", nothing, "inst"])
end
@testset "Squeeze" begin
@test Utils.squeeze([[1 2] [3 4]]) == [1, 2, 3, 4]
end
@testset "nearest_index" begin
@test Utils.nearest_index([10, 20, 30], 0) == 1
@test Utils.nearest_index([10, 20, 30], 100) == 3
@test Utils.nearest_index([10, 20, 30], 20) == 2
@test Utils.nearest_index([10, 20, 30], typemax(Int)) == 3
@test Utils.nearest_index([10, 20, 30], -typemax(Int)) == 1
end
@testset "kwargs" begin
@test values(Utils.kwargs(a = 1, b = 2)) == (; a = 1, b = 2)
end
@testset "seconds_to_prettystr" begin
@test Utils.seconds_to_prettystr(1) == "1s"
@test Utils.seconds_to_prettystr(2) == "2s"
@test Utils.seconds_to_prettystr(60) == "1m"
@test Utils.seconds_to_prettystr(61) == "1m 1s"
@test Utils.seconds_to_prettystr(3600) == "1h"
@test Utils.seconds_to_prettystr(3666) == "1h 1m 6s"
@test Utils.seconds_to_prettystr(86400) == "1d"
@test Utils.seconds_to_prettystr(86401) == "1d 1s"
@test Utils.seconds_to_prettystr(86522) == "1d 2m 2s"
@test Utils.seconds_to_prettystr(24 * 60 * 60 * 365) == "1y"
@test Utils.seconds_to_prettystr(24 * 60 * 60 * 365 + 60) == "1y 1m"
@test Utils.seconds_to_prettystr(24 * 60 * 60 * 365 + 68) == "1y 1m 8s"
@test Utils.seconds_to_prettystr(24 * 60 * 60 * 365 + 68.5) == "1y 1m 8.5s"
end
@testset "format_title" begin
@test Utils.warp_string("") == ""
@test Utils.warp_string("test", max_width = 4) == "test"
@test Utils.warp_string(" test ", max_width = 4) == "test"
@test Utils.warp_string(" test") == "test"
@test Utils.warp_string("test1", max_width = 4) == "test1"
@test Utils.warp_string(" test1 ", max_width = 4) == "test1"
@test Utils.warp_string("test blah", max_width = 4) == "test\nblah"
@test Utils.warp_string("test1 test2 test3", max_width = 4) ==
"test1\ntest2\ntest3"
@test Utils.warp_string("abc def", max_width = 3) == "abc\ndef"
@test Utils.warp_string("is a test", max_width = 4) == "is a\ntest"
@test Utils.warp_string("a b c d", max_width = 2) == "a\nb\nc\nd"
@test Utils.warp_string("a b c d e f", max_width = 5) == "a b c\nd e f"
@test Utils.warp_string("a\tb\nc\vd\fe\rf", max_width = 11) == "a b c d e f"
end
@testset "split by season" begin
empty_dates = Vector{Dates.DateTime}()
@test Utils.split_by_season(empty_dates) == ([], [], [], [])
date = [Dates.DateTime(2015, 4, 13)]
@test Utils.split_by_season(date) ==
([Dates.DateTime(2015, 4, 13)], [], [], [])
dates = [
Dates.DateTime(2015, 1, 13),
Dates.DateTime(2018, 2, 13),
Dates.DateTime(1981, 7, 6),
Dates.DateTime(1993, 11, 19),
Dates.DateTime(2040, 4, 1),
Dates.DateTime(2000, 8, 18),
]
expected_dates = (
[Dates.DateTime(2040, 4, 1)],
[Dates.DateTime(1981, 7, 6), Dates.DateTime(2000, 8, 18)],
[Dates.DateTime(1993, 11, 19)],
[Dates.DateTime(2015, 1, 13), Dates.DateTime(2018, 2, 13)],
)
@test Utils.split_by_season(dates) == expected_dates
end
@testset "equispaced" begin
equispaced = Utils._isequispaced([1.0, 2.0, 3.0])
@test equispaced == true
equispaced = Utils._isequispaced([0.0, 2.0, 3.0])
@test equispaced == false
end
@testset "Date and time conversion" begin
reference_date = Dates.DateTime(2013, 7, 1, 12)
date_one_day = Dates.DateTime(2013, 7, 2, 12)
date_one_hour = Dates.DateTime(2013, 7, 1, 13)
date_one_min = Dates.DateTime(2013, 7, 1, 12, 1)
date_one_sec = Dates.DateTime(2013, 7, 1, 12, 0, 1)
# Test time_to_date
@test Utils.time_to_date(reference_date, 86400.0) == date_one_day
@test Utils.time_to_date(reference_date, 3600.0) == date_one_hour
@test Utils.time_to_date(reference_date, 60.0) == date_one_min
@test Utils.time_to_date(reference_date, 1.0) == date_one_sec
# Test date_to_time
@test Utils.date_to_time(reference_date, date_one_day) == 86400.0
@test Utils.date_to_time(reference_date, date_one_hour) == 3600.0
@test Utils.date_to_time(reference_date, date_one_min) == 60.0
@test Utils.date_to_time(reference_date, date_one_sec) == 1.0
# Test period_to_seconds_float
@test Utils.period_to_seconds_float(Dates.Millisecond(1)) == 0.001
@test Utils.period_to_seconds_float(Dates.Second(1)) == 1.0
@test Utils.period_to_seconds_float(Dates.Minute(1)) == 60.0
@test Utils.period_to_seconds_float(Dates.Hour(1)) == 3600.0
@test Utils.period_to_seconds_float(Dates.Day(1)) == 86400.0
@test Utils.period_to_seconds_float(Dates.Week(1)) == 604800.0
@test Utils.period_to_seconds_float(Dates.Month(1)) == 2.629746e6
@test Utils.period_to_seconds_float(Dates.Year(1)) == 3.1556952e7
end
@testset "data_at_dim_vals" begin
data = [[1, 2, 3] [4, 5, 6] [7, 8, 9]]
dim_arr = [2.0, 3.0, 4.0]
dim_idx = 2
@test Utils._data_at_dim_vals(data, dim_arr, dim_idx, []) ==
reshape([], 3, 0)
@test Utils._data_at_dim_vals(data, dim_arr, dim_idx, [2.1]) ==
reshape([1; 2; 3], 3, 1)
@test Utils._data_at_dim_vals(data, dim_arr, dim_idx, [2.1, 2.9, 4.0]) ==
data
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | code | 55424 | using Test
import ClimaAnalysis
import Interpolations as Intp
import NaNStatistics: nanmean
import NCDatasets: NCDataset
import OrderedCollections: OrderedDict
import Unitful: @u_str
import Dates
@testset "General" begin
# Add test for short constructor
long = -180.0:180.0 |> collect
data = copy(long)
longvar = ClimaAnalysis.OutputVar(Dict("long" => long), data)
@test longvar.dims["long"] == long
# Unitful
attribs = Dict("long_name" => "hi", "units" => "m/s")
dim_attributes = OrderedDict(["long" => Dict("units" => "m")])
var_with_unitful = ClimaAnalysis.OutputVar(
attribs,
Dict("long" => long),
dim_attributes,
data,
)
@test ClimaAnalysis.units(var_with_unitful) == "m s^-1"
@test var_with_unitful.attributes["units"] == u"m" / u"s"
# Unparsable unit
attribs = Dict("long_name" => "hi", "units" => "bob")
var_without_unitful = ClimaAnalysis.OutputVar(
attribs,
Dict("long" => long),
dim_attributes,
data,
)
@test ClimaAnalysis.units(var_without_unitful) == "bob"
@test var_without_unitful.attributes["units"] == "bob"
# Reading directly from file
ncpath = joinpath(@__DIR__, "topo_drag.res.nc")
file_var = ClimaAnalysis.OutputVar(ncpath, "t11")
NCDataset(ncpath) do nc
@test nc["t11"][:, :, :] == file_var.data
end
# center_longitude!
#
# Check var without long
dims = Dict("z" => long)
var_error = ClimaAnalysis.OutputVar(
Dict{String, Any}(),
dims,
Dict{String, Any}(),
data,
)
@test_throws ErrorException ClimaAnalysis.center_longitude!(
var_error,
180.0,
)
time = 0:10.0 |> collect
dims = OrderedDict("lon" => long, "time" => time)
data = collect(reshape(1:(361 * 11), (361, 11)))
var_good = ClimaAnalysis.OutputVar(
Dict{String, Any}(),
dims,
Dict{String, Any}(),
data,
)
ClimaAnalysis.center_longitude!(var_good, 90.0)
# We are shifting by 91
@test var_good.dims["lon"][180] == 90
@test var_good.data[3, :] == data[3, :]
@test var_good.data[180, 1] == 271
@test_throws ErrorException ClimaAnalysis.OutputVar(
Dict("time" => time),
[1],
)
end
@testset "Interpolant boundary conditions" begin
# Check boundary condtions for lon (equispaced and span), lat (equispaced and span), and
# time
lon = 0.5:1.0:359.5 |> collect
lat = -89.5:1.0:89.5 |> collect
time = 1.0:100 |> collect
data = ones(length(lon), length(lat), length(time))
dims = OrderedDict(["lon" => lon, "lat" => lat, "time" => time])
attribs = Dict("long_name" => "hi")
dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
"time" => Dict("units" => "test_units3"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test var.interpolant.et == (Intp.Periodic(), Intp.Flat(), Intp.Throw())
# Not equispaced for lon and lat
lon = 0.5:1.0:359.5 |> collect |> x -> push!(x, 42.0) |> sort
lat = -89.5:1.0:89.5 |> collect |> x -> push!(x, 42.0) |> sort
time = 1.0:100 |> collect
data = ones(length(lon), length(lat), length(time))
dims = OrderedDict(["lon" => lon, "lat" => lat, "time" => time])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test var.interpolant.et == (Intp.Throw(), Intp.Throw(), Intp.Throw())
# Does not span entire range for and lat
lon = 0.5:1.0:350.5 |> collect
lat = -89.5:1.0:80.5 |> collect
time = 1.0:100 |> collect
data = ones(length(lon), length(lat), length(time))
dims = OrderedDict(["lon" => lon, "lat" => lat, "time" => time])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test var.interpolant.et == (Intp.Throw(), Intp.Throw(), Intp.Throw())
# Dates for the time dimension
lon = 0.5:1.0:359.5 |> collect
lat = -89.5:1.0:89.5 |> collect
time = [
Dates.DateTime(2020, 3, 1, 1, 1),
Dates.DateTime(2020, 3, 1, 1, 2),
Dates.DateTime(2020, 3, 1, 1, 3),
]
data = ones(length(lon), length(lat), length(time))
dims = OrderedDict(["lon" => lon, "lat" => lat, "time" => time])
attribs = Dict("long_name" => "hi")
dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
"time" => Dict("units" => "test_units3"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test isnothing(var.interpolant)
end
@testset "empty" begin
dims = OrderedDict{String, Vector{Float64}}()
data = Float64[]
empty_var = ClimaAnalysis.OutputVar(dims, data)
@test ClimaAnalysis.isempty(empty_var)
dims = OrderedDict{String, Vector{Float64}}()
data = fill(1.0)
empty_var = ClimaAnalysis.OutputVar(dims, data)
@test !ClimaAnalysis.isempty(empty_var)
long = 0.0:180.0 |> collect
dims = OrderedDict(["long" => long])
data = ones(size(long))
dim_attributes = OrderedDict(["lon" => Dict("b" => 2)])
attribs = Dict("short_name" => "bob", "long_name" => "hi")
not_empty_var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
@test !ClimaAnalysis.isempty(not_empty_var)
end
@testset "Arithmetic operations" begin
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data1 = collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
dims = OrderedDict(["time" => time, "lon" => long, "lat" => lat])
dim_attributes = OrderedDict([
"time" => Dict("units" => "s"),
"lon" => Dict("b" => 2),
"lat" => Dict("a" => 1),
])
attribs = Dict("short_name" => "bob", "long_name" => "hi")
var1 = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data1)
dim_attributes2 = OrderedDict([
"time" => Dict("units" => "m"),
"lon" => Dict("lol" => 2),
"lat" => Dict("a" => 1),
])
var2 = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes2, data1)
data3 = 5.0 .+ collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
attribs3 = Dict("long_name" => "bob", "short_name" => "bula")
var3 = ClimaAnalysis.OutputVar(attribs3, dims, dim_attributes, data3)
# Check arecompatible
@test !ClimaAnalysis.arecompatible(var1, var2)
@test ClimaAnalysis.arecompatible(var1, var3)
var1plus10 = var1 + 10
@test var1plus10.data == data1 .+ 10
@test ClimaAnalysis.short_name(var1plus10) == "bob + 10"
@test ClimaAnalysis.long_name(var1plus10) == "hi + 10"
@test var1plus10((0.0, 0.0, 0.0)) == var1((0.0, 0.0, 0.0)) + 10
tenplusvar1 = 10 + var1
@test tenplusvar1.data == data1 .+ 10
@test ClimaAnalysis.short_name(tenplusvar1) == "10 + bob"
@test ClimaAnalysis.long_name(tenplusvar1) == "10 + hi"
@test tenplusvar1((0.0, 0.0, 0.0)) == 10 + var1((0.0, 0.0, 0.0))
var1plusvar3 = var1 + var3
@test var1plusvar3.data == data1 .+ data3
@test ClimaAnalysis.short_name(var1plusvar3) == "bob + bula"
@test ClimaAnalysis.long_name(var1plusvar3) == "hi + bob"
# Test for element wise multiplication and division between OutputVars
var_times = var1 * var3
@test var_times.data == var1.data .* var3.data
var_divide = var1 / var3
@test var_divide.data == var1.data ./ var3.data
end
@testset "Reductions (sphere dims)" begin
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data = collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
dims = OrderedDict(["time" => time, "lon" => long, "lat" => lat])
dim_attributes = OrderedDict([
"time" => Dict(),
"lon" => Dict("b" => 2),
"lat" => Dict("a" => 1),
])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
# Test copy
var_copied = copy(var)
fields = fieldnames(ClimaAnalysis.OutputVar)
for field in fields
@test getfield(var, field) == getfield(var_copied, field)
@test getfield(var, field) !== getfield(var_copied, field)
end
# Test reduction
lat_avg = ClimaAnalysis.average_lat(var)
@test lat_avg.dims == OrderedDict(["lon" => long, "time" => time])
@test lat_avg.dim_attributes ==
OrderedDict(["lon" => Dict("b" => 2), "time" => Dict()])
@test lat_avg.data == dropdims(nanmean(data, dims = 3), dims = 3)
wei_lat_avg = ClimaAnalysis.weighted_average_lat(var)
@test wei_lat_avg.dims == OrderedDict(["lon" => long, "time" => time])
@test wei_lat_avg.dim_attributes ==
OrderedDict(["lon" => Dict("b" => 2), "time" => Dict()])
weights = ones(size(data))
for i in eachindex(time)
for j in eachindex(long)
for k in eachindex(lat)
weights[i, j, k] = cosd(lat[k])
end
end
end
weights ./= nanmean(cosd.(lat))
expected_avg = dropdims(nanmean(data .* weights, dims = 3), dims = 3)
@test wei_lat_avg.data β expected_avg
# Test reduction with NaN
latnan = [1, 2, 3]
datanan = [10.0, 20.0, NaN]
dimsnan = OrderedDict(["lat" => latnan])
dim_attributesnan = OrderedDict(["lat" => Dict("b" => 2)])
attribsnan = Dict("lat_name" => "hi")
varnan =
ClimaAnalysis.OutputVar(attribsnan, dimsnan, dim_attributesnan, datanan)
@test isnan(ClimaAnalysis.average_lat(varnan; ignore_nan = false).data[])
@test ClimaAnalysis.average_lat(varnan; weighted = true).data[] β
(datanan[1] * cosd(latnan[1]) + datanan[2] * cosd(latnan[2])) /
(cosd(latnan[1]) + cosd(latnan[2]))
wrong_dims = OrderedDict(["lat" => [0.0, 0.1]])
wrong_dim_attributes = OrderedDict(["lat" => Dict("a" => 1)])
wrong_var = ClimaAnalysis.OutputVar(
Dict{String, Any}(),
wrong_dims,
wrong_dim_attributes,
[0.0, 0.1],
)
@test_logs (
:warn,
"Detected latitudes are small. If units are radians, results will be wrong",
)
lat_lon_avg = ClimaAnalysis.average_lon(lat_avg)
@test lat_lon_avg.dims == OrderedDict(["time" => time])
@test lat_lon_avg.dim_attributes == OrderedDict(["time" => Dict()])
@test lat_lon_avg.data ==
dropdims(nanmean(lat_avg.data, dims = 2), dims = 2)
lat_lon_time_avg = ClimaAnalysis.average_time(lat_lon_avg)
@test lat_lon_time_avg.dims == OrderedDict()
@test lat_lon_time_avg.dim_attributes == OrderedDict()
@test lat_lon_time_avg.data[] == nanmean(data)
@test lat_lon_time_avg.attributes["long_name"] ==
"hi averaged over lat (0.0 to 90.0) averaged over lon (0.0 to 180.0) averaged over time (0.0 to 10.0)"
end
@testset "Reductions (box dims)" begin
x = 0.0:180.0 |> collect
y = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data = reshape(1.0:(91 * 181 * 11), (11, 181, 91))
# Identical test pattern to sphere setup, with `dims` modified.
dims = OrderedDict(["time" => time, "x" => x, "y" => y])
dim_attributes = OrderedDict([
"time" => Dict(),
"x" => Dict("b" => 2),
"y" => Dict("a" => 1),
])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
y_avg = ClimaAnalysis.average_y(var)
@test y_avg.dims == OrderedDict(["x" => x, "time" => time])
@test y_avg.dim_attributes ==
OrderedDict(["x" => Dict("b" => 2), "time" => Dict()])
@test y_avg.data == dropdims(nanmean(data, dims = 3), dims = 3)
y_x_avg = ClimaAnalysis.average_x(y_avg)
xy_avg = ClimaAnalysis.average_xy(var)
@test y_x_avg.data == xy_avg.data
@test y_x_avg.dims == OrderedDict(["time" => time])
@test y_x_avg.dim_attributes == OrderedDict(["time" => Dict()])
@test y_x_avg.data == dropdims(nanmean(y_avg.data, dims = 2), dims = 2)
y_x_time_avg = ClimaAnalysis.average_time(y_x_avg)
xy_time_avg = ClimaAnalysis.average_time(xy_avg)
@test y_x_time_avg.dims == OrderedDict()
@test y_x_time_avg.dim_attributes == OrderedDict()
@test y_x_time_avg.data[] == nanmean(data)
@test y_x_time_avg.attributes["long_name"] ==
"hi averaged over y (0.0 to 90.0) averaged over x (0.0 to 180.0) averaged over time (0.0 to 10.0)"
@test xy_time_avg.attributes["long_name"] ==
"hi averaged horizontally over x (0.0 to 180.0) and y (0.0 to 90.0) averaged over time (0.0 to 10.0)"
end
@testset "Slicing" begin
z = 0.0:20.0 |> collect
time = 100.0:110.0 |> collect
data = reshape(1.0:(11 * 21), (11, 21))
dims = OrderedDict(["time" => time, "z" => z])
dim_attributes =
OrderedDict(["time" => Dict("units" => "s"), "z" => Dict("b" => 2)])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
z_sliced = ClimaAnalysis.slice(var, z = 1.0)
# 1.0 is the second index
z_expected_data = data[:, 2]
@test z_sliced.dims == OrderedDict(["time" => time])
@test z_sliced.dim_attributes ==
OrderedDict(["time" => Dict("units" => "s")])
@test z_sliced.data == z_expected_data
t_sliced = ClimaAnalysis.slice(var, time = 200.0)
# 200 is the last index
t_expected_data = data[end, :]
@test t_sliced.dims == OrderedDict(["z" => z])
@test t_sliced.dim_attributes == OrderedDict(["z" => Dict("b" => 2)])
@test t_sliced.data == t_expected_data
@test t_sliced.attributes["long_name"] == "hi time = 1m 50.0s"
# Test with the general slice
t_sliced = ClimaAnalysis.slice(var, time = 200.0)
# 200 is the last index
t_expected_data = data[end, :]
@test t_sliced.dims == OrderedDict(["z" => z])
@test t_sliced.dim_attributes == OrderedDict(["z" => Dict("b" => 2)])
@test t_sliced.data == t_expected_data
@test t_sliced.attributes["long_name"] == "hi time = 1m 50.0s"
@test t_sliced.attributes["slice_time"] == "110.0"
@test t_sliced.attributes["slice_time_units"] == "s"
end
@testset "Windowing" begin
z = 0.0:20.0 |> collect
time = 0.0:10.0 |> collect
data = reshape(1.0:(11 * 21), (11, 21))
dims = OrderedDict(["time" => time, "z" => z])
dim_attributes =
OrderedDict(["time" => Dict("units" => "s"), "z" => Dict("b" => 2)])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
# Dimension not existing
@test_throws ErrorException ClimaAnalysis.window(var, "lat")
# Left right not ordered
@test_throws ErrorException ClimaAnalysis.window(
var,
"time",
left = 10,
right = 1,
)
var_windowed = ClimaAnalysis.window(var, "time", left = 2.5, right = 5.1)
expected_data = data[3:6, :]
@test var_windowed.data == expected_data
@test var_windowed.dims["time"] == time[3:6]
end
@testset "Extracting dimension" begin
@test ClimaAnalysis.Var.find_dim_name(["a", "b"], ["c", "a"]) == "a"
@test_throws ErrorException ClimaAnalysis.Var.find_dim_name(
["a", "b"],
["c", "d"],
)
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
alt = 0.0:2.0 |> collect
data = reshape(1.0:(3 * 91 * 181 * 11), (11, 181, 91, 3))
dims =
OrderedDict(["time" => time, "lon" => long, "lat" => lat, "z" => alt])
attribs = Dict("short_name" => "bob", "long_name" => "hi")
dim_attributes = OrderedDict([
"time" => Dict(),
"lon" => Dict("b" => 2),
"lat" => Dict("a" => 1),
"z" => Dict(),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
@test ClimaAnalysis.time_name(var) == "time"
@test ClimaAnalysis.longitude_name(var) == "lon"
@test ClimaAnalysis.latitude_name(var) == "lat"
@test ClimaAnalysis.altitude_name(var) == "z"
@test ClimaAnalysis.times(var) == time
@test ClimaAnalysis.latitudes(var) == lat
@test ClimaAnalysis.longitudes(var) == long
@test ClimaAnalysis.altitudes(var) == alt
@test ClimaAnalysis.has_time(var)
@test ClimaAnalysis.has_longitude(var)
@test ClimaAnalysis.has_latitude(var)
@test ClimaAnalysis.has_altitude(var)
@test ClimaAnalysis.conventional_dim_name("long") == "longitude"
@test ClimaAnalysis.conventional_dim_name("latitude") == "latitude"
@test ClimaAnalysis.conventional_dim_name("t") == "time"
@test ClimaAnalysis.conventional_dim_name("date") == "date"
@test ClimaAnalysis.conventional_dim_name("z") == "altitude"
@test ClimaAnalysis.conventional_dim_name("hi") == "hi"
end
@testset "Interpolation" begin
# 1D interpolation with linear data, should yield correct results
long = -175.0:175.0 |> collect
data = copy(long)
longvar = ClimaAnalysis.OutputVar(Dict("long" => long), data)
@test longvar.([10.5, 20.5]) == [10.5, 20.5]
# Test error for data outside of range
@test_throws BoundsError longvar(200.0)
# 2D interpolation with linear data, should yield correct results
time = 100.0:110.0 |> collect
z = 0.0:20.0 |> collect
data = reshape(1.0:(11 * 21), (11, 21))
var2d = ClimaAnalysis.OutputVar(Dict("time" => time, "z" => z), data)
@test var2d.([[105.0, 10.0], [105.5, 10.5]]) == [116.0, 122]
end
@testset "Dim of units and range" begin
x = 0.0:180.0 |> collect
y = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data = collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
dims = OrderedDict(["time" => time, "x" => x, "y" => y])
dim_attributes = OrderedDict([
"time" => Dict("units" => "seconds"),
"x" => Dict("units" => "km"),
])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
@test ClimaAnalysis.dim_units(var, "y") == ""
@test ClimaAnalysis.dim_units(var, "x") == "km"
@test ClimaAnalysis.range_dim(var, "x") == (0.0, 180.0)
@test_throws ErrorException(
"Var does not have dimension z, found [\"time\", \"x\", \"y\"]",
) ClimaAnalysis.dim_units(var, "z")
@test_throws ErrorException(
"Var does not have dimension z, found [\"time\", \"x\", \"y\"]",
) ClimaAnalysis.range_dim(var, "z")
end
@testset "Long name updates" begin
# Setup to test x_avg, y_avg, xy_avg
x = 0.0:180.0 |> collect
y = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data = collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
dims = OrderedDict(["time" => time, "x" => x, "y" => y])
dim_attributes = OrderedDict([
"time" => Dict("units" => "seconds"),
"x" => Dict("units" => "km"),
"y" => Dict("units" => "km"),
])
attribs = Dict("long_name" => "hi")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data)
y_avg = ClimaAnalysis.average_y(var)
@test y_avg.attributes["long_name"] == "hi averaged over y (0.0 to 90.0km)"
x_avg = ClimaAnalysis.average_x(var)
@test x_avg.attributes["long_name"] == "hi averaged over x (0.0 to 180.0km)"
xy_avg = ClimaAnalysis.average_xy(var)
@test xy_avg.attributes["long_name"] ==
"hi averaged horizontally over x (0.0 to 180.0km) and y (0.0 to 90.0km)"
# Setup to test average_lat and average_lon
long = 0.0:180.0 |> collect
lat = 0.0:90.0 |> collect
time = 0.0:10.0 |> collect
data1 = collect(reshape(1.0:(91 * 181 * 11), (11, 181, 91)))
dims = OrderedDict(["time" => time, "lon" => long, "lat" => lat])
dim_attributes = OrderedDict([
"time" => Dict("units" => "seconds"),
"lon" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
attribs = Dict("long_name" => "hi")
var1 = ClimaAnalysis.OutputVar(attribs, dims, dim_attributes, data1)
lat_avg = ClimaAnalysis.average_lat(var1)
lon_avg = ClimaAnalysis.average_lon(var1)
lat_weighted_avg = ClimaAnalysis.weighted_average_lat(var1)
@test lon_avg.attributes["long_name"] ==
"hi averaged over lon (0.0 to 180.0test_units1)"
@test lat_avg.attributes["long_name"] ==
"hi averaged over lat (0.0 to 90.0test_units2)"
@test lat_weighted_avg.attributes["long_name"] ==
"hi weighted averaged over lat (0.0 to 90.0test_units2)"
end
@testset "Consistent units checking" begin
x_long = 0.0:180.0 |> collect
x_lat = 0.0:90.0 |> collect
x_data = reshape(1.0:(181 * 91), (181, 91))
x_dims = OrderedDict(["long" => x_long, "lat" => x_lat])
x_attribs = Dict("long_name" => "hi")
x_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
x_var = ClimaAnalysis.OutputVar(x_attribs, x_dims, x_dim_attribs, x_data)
y_lon = 0.0:90.0 |> collect
y_lat = 0.0:45.0 |> collect
y_data = reshape(1.0:(91 * 46), (91, 46))
y_dims = OrderedDict(["lon" => y_lon, "lat" => y_lat])
y_attribs = Dict("long_name" => "hi")
y_dim_attribs = OrderedDict([
"lon" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
y_var = ClimaAnalysis.OutputVar(y_attribs, y_dims, y_dim_attribs, y_data)
@test_nowarn ClimaAnalysis.Var._check_dims_consistent(x_var, y_var)
# Test if units are consistent between dimensions
x_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units2"),
"lat" => Dict("units" => "test_units1"),
])
x_var = ClimaAnalysis.OutputVar(x_attribs, x_dims, x_dim_attribs, x_data)
@test_throws "Units for dimensions [\"long\", \"lat\"] in x is not consistent with units for dimensions [\"lon\", \"lat\"] in y" ClimaAnalysis.Var._check_dims_consistent(
x_var,
y_var,
)
# Test if units are missing from any of the dimensions
x_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units2"),
"lat" => Dict("units" => ""),
])
x_var = ClimaAnalysis.OutputVar(x_attribs, x_dims, x_dim_attribs, x_data)
@test_throws "Units for dimensions [\"lat\"] are missing in x and units for dimensions [\"lat\"] are missing in y" ClimaAnalysis.Var._check_dims_consistent(
x_var,
x_var,
)
@test_throws "Units for dimensions [\"lat\"] are missing in x" ClimaAnalysis.Var._check_dims_consistent(
x_var,
y_var,
)
@test_throws "Units for dimensions [\"lat\"] are missing in y" ClimaAnalysis.Var._check_dims_consistent(
y_var,
x_var,
)
# Test if type of dimensions agree
x_data = reshape(1.0:(91 * 181), (91, 181))
x_dims = OrderedDict(["lat" => x_lat, "long" => x_long])
x_dim_attribs = OrderedDict([
"lat" => Dict("units" => "test_units1"),
"long" => Dict("units" => "test_units2"),
])
x_var = ClimaAnalysis.OutputVar(x_attribs, x_dims, x_dim_attribs, x_data)
@test_throws "Dimensions do not agree between x ([\"latitude\", \"longitude\"]) and y ([\"longitude\", \"latitude\"])" ClimaAnalysis.Var._check_dims_consistent(
x_var,
y_var,
)
# Test number of dimensions are the same
x_data = reshape(1.0:(181), (181))
x_dims = OrderedDict(["long" => x_long])
x_attribs = Dict("long_name" => "hi")
x_dim_attribs = OrderedDict(["long" => Dict("units" => "test_units1")])
x_var = ClimaAnalysis.OutputVar(x_attribs, x_dims, x_dim_attribs, x_data)
@test_throws "Number of dimensions do not match between x (1) and y (2)" ClimaAnalysis.Var._check_dims_consistent(
x_var,
y_var,
)
end
@testset "Reordering" begin
# Reordering the dimensions of a var to match itself
src_long = 0.0:180.0 |> collect
src_lat = 0.0:90.0 |> collect
src_data = ones(length(src_long), length(src_lat))
src_dims = OrderedDict(["long" => src_long, "lat" => src_lat])
src_attribs = Dict("long_name" => "hi")
src_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
src_var = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs,
src_data,
)
reordered_var = ClimaAnalysis.reordered_as(src_var, src_var)
@test reordered_var.attributes == src_var.attributes
@test reordered_var.dims == src_var.dims
@test reordered_var.dim_attributes == src_var.dim_attributes
@test reordered_var.data == src_var.data
# Reordering the dimensions of src_var to match a different order of dimensions in
# dest_var
dest_long = 20.0:180.0 |> collect
dest_lat = 30.0:90.0 |> collect
dest_data = zeros(length(dest_lat), length(dest_long))
dest_dims = OrderedDict(["lat" => dest_lat, "long" => dest_long])
dest_attribs = Dict("long_name" => "hi")
dest_dim_attribs = OrderedDict([
"lat" => Dict("units" => "test_units4"),
"long" => Dict("units" => "test_units3"),
])
dest_var = ClimaAnalysis.OutputVar(
dest_attribs,
dest_dims,
dest_dim_attribs,
dest_data,
)
reordered_var = ClimaAnalysis.reordered_as(src_var, dest_var)
@test reordered_var.attributes == src_var.attributes
@test reordered_var.dims ==
OrderedDict(["lat" => src_lat, "long" => src_long])
@test reordered_var.dim_attributes == OrderedDict([
"lat" => Dict("units" => "test_units2"),
"long" => Dict("units" => "test_units1"),
])
@test reordered_var.data == ones(length(src_lat), length(src_long))
# Reordering but dim_attributes is not available for every dimension
src_dim_attribs_one = OrderedDict(["lat" => Dict("units" => "test_units2")])
src_dim_attribs_empty = empty(src_dim_attribs_one)
src_dim_attribs_extra = OrderedDict([
"extra_info" => "hi",
"lat" => Dict("units" => "test_units2"),
])
src_var_one = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs_one,
src_data,
)
src_var_empty = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs_empty,
src_data,
)
src_var_extra = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs_extra,
src_data,
)
reordered_var = ClimaAnalysis.reordered_as(src_var_one, dest_var)
@test reordered_var.dim_attributes == src_dim_attribs_one
reordered_var = ClimaAnalysis.reordered_as(src_var_empty, dest_var)
@test reordered_var.dim_attributes == src_dim_attribs_empty
reordered_var = ClimaAnalysis.reordered_as(src_var_extra, dest_var)
@test reordered_var.dim_attributes == OrderedDict([
"lat" => Dict("units" => "test_units2"),
"extra_info" => "hi",
])
# Error checking for dimensions not being the same in both
src_long = 20.0:180.0 |> collect
src_lat = 30.0:90.0 |> collect
src_dims = OrderedDict(["long" => src_long, "lat" => src_lat])
src_data = ones(length(src_long), length(src_lat))
dest_lat = 30.0:90.0 |> collect
dest_dims = OrderedDict(["lat" => dest_lat])
dest_data = ones(length(src_lat))
src_var = ClimaAnalysis.OutputVar(src_dims, src_data)
dest_var = ClimaAnalysis.OutputVar(dest_dims, dest_data)
@test_throws ErrorException ClimaAnalysis.reordered_as(src_var, dest_var)
end
@testset "Resampling" begin
src_long = 0.0:180.0 |> collect
src_lat = 0.0:90.0 |> collect
src_data = reshape(1.0:(181 * 91), (181, 91))
src_dims = OrderedDict(["long" => src_long, "lat" => src_lat])
src_attribs = Dict("long_name" => "hi")
src_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
src_var = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs,
src_data,
)
dest_long = 0.0:90.0 |> collect
dest_lat = 0.0:45.0 |> collect
dest_data = reshape(1.0:(91 * 46), (91, 46))
dest_dims = OrderedDict(["long" => dest_long, "lat" => dest_lat])
dest_attribs = Dict("long_name" => "hi")
dest_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
dest_var = ClimaAnalysis.OutputVar(
dest_attribs,
dest_dims,
dest_dim_attribs,
dest_data,
)
@test src_var.data == ClimaAnalysis.resampled_as(src_var, src_var).data
resampled_var = ClimaAnalysis.resampled_as(src_var, dest_var)
@test resampled_var.data == reshape(1.0:(181 * 91), (181, 91))[1:91, 1:46]
@test_throws BoundsError ClimaAnalysis.resampled_as(dest_var, src_var)
# BoundsError check
src_long = 90.0:120.0 |> collect
src_lat = 45.0:90.0 |> collect
src_data = zeros(length(src_long), length(src_lat))
src_dims = OrderedDict(["long" => src_long, "lat" => src_lat])
src_var = ClimaAnalysis.OutputVar(
src_attribs,
src_dims,
src_dim_attribs,
src_data,
)
dest_long = 85.0:115.0 |> collect
dest_lat = 50.0:85.0 |> collect
dest_data = zeros(length(dest_long), length(dest_lat))
dest_dims = OrderedDict(["long" => dest_long, "lat" => dest_lat])
dest_var = ClimaAnalysis.OutputVar(
dest_attribs,
dest_dims,
dest_dim_attribs,
dest_data,
)
@test_throws BoundsError ClimaAnalysis.resampled_as(src_var, dest_var)
end
@testset "Units" begin
long = -180.0:180.0 |> collect
data = copy(long)
# Unitful
attribs = Dict("long_name" => "hi", "units" => "m/s")
dim_attributes = OrderedDict(["long" => Dict("units" => "m")])
var_with_unitful = ClimaAnalysis.OutputVar(
attribs,
Dict("long" => long),
dim_attributes,
data,
)
var_without_unitful = ClimaAnalysis.OutputVar(
Dict{String, Any}(),
Dict("long" => long),
dim_attributes,
data,
)
var_empty_unit = ClimaAnalysis.OutputVar(
Dict{String, Any}("units" => ""),
Dict("long" => long),
dim_attributes,
data,
)
@test ClimaAnalysis.has_units(var_with_unitful)
# Convert to cm/s
var_unitful_in_cms = ClimaAnalysis.convert_units(var_with_unitful, "cm/s")
@test var_unitful_in_cms.data == 100 .* var_with_unitful.data
# Unparsable because of new units
@test_throws ErrorException ClimaAnalysis.convert_units(
var_with_unitful,
"bob",
)
# New units, using conversion function
var_notunitful = ClimaAnalysis.convert_units(
var_with_unitful,
"bob",
conversion_function = (data) -> 2 * data,
)
@test var_notunitful.data == 2 .* var_with_unitful.data
# New units parsaeble, but with conversion function
@test_logs (:warn, "Ignoring conversion_function, units are parseable.") ClimaAnalysis.convert_units(
var_with_unitful,
"cm/s",
conversion_function = (data) -> 2 * data,
)
end
@testset "Integrating on lat and lon" begin
# Tests for integrate_lon
lon = collect(range(-179.5, 179.5, 360))
lon_data = ones(length(lon))
lon_dims = OrderedDict(["lon" => lon])
lon_attribs = Dict("long_name" => "hi")
lon_dim_attribs = OrderedDict(["lon" => Dict("units" => "deg")])
var = ClimaAnalysis.OutputVar(
lon_attribs,
lon_dims,
lon_dim_attribs,
lon_data,
)
var_integrated_lon = ClimaAnalysis.Var.integrate_lon(var)
@test isapprox(var_integrated_lon.data[1], 2.0 * Ο, atol = 0.01)
@test var_integrated_lon.dims == OrderedDict()
@test var_integrated_lon.dim_attributes == OrderedDict()
@test "hi integrated over lon (-179.5 to 179.5deg)" ==
var_integrated_lon.attributes["long_name"]
@test_throws "var does not has latitude as a dimension" ClimaAnalysis.Var.integrate_lat(
var,
)
# Tests for integrate_lat
lat = collect(range(-89.5, 89.5, 180))
lat_data = ones(length(lat))
lat_dims = OrderedDict(["lat" => lat])
lat_attribs = Dict("long_name" => "hi")
lat_dim_attribs = OrderedDict(["lat" => Dict("units" => "deg")])
var = ClimaAnalysis.OutputVar(
lat_attribs,
lat_dims,
lat_dim_attribs,
lat_data,
)
var_integrated_lat = ClimaAnalysis.Var.integrate_lat(var)
@test isapprox(var_integrated_lat.data[1], 2.0, atol = 0.01)
@test var_integrated_lat.dims == OrderedDict()
@test var_integrated_lat.dim_attributes == OrderedDict()
@test "hi integrated over lat (-89.5 to 89.5deg)" ==
var_integrated_lat.attributes["long_name"]
@test_throws "var does not has longitude as a dimension" ClimaAnalysis.Var.integrate_lon(
var,
)
# Unit checking
dim_attribs_no_units = OrderedDict([
"lon" => Dict("units" => ""),
"lat" => Dict("units" => ""),
])
var_lon_no_units = ClimaAnalysis.OutputVar(
lon_attribs,
lon_dims,
dim_attribs_no_units,
lon_data,
)
@test_throws ErrorException ClimaAnalysis.Var.integrate_lon(
var_lon_no_units,
)
var_lat_no_units = ClimaAnalysis.OutputVar(
lat_attribs,
lat_dims,
dim_attribs_no_units,
lat_data,
)
@test_throws ErrorException ClimaAnalysis.Var.integrate_lat(
var_lat_no_units,
)
end
@testset "Integrating on sphere" begin
# Integrate out all dimensions (lat and lon) from OutputVar
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs = Dict("long_name" => "hi")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
integrated_var = ClimaAnalysis.Var.integrate_lonlat(var)
@test isapprox(integrated_var.data[1], 4 * Ο, atol = 0.1)
@test integrated_var.dims == OrderedDict()
@test integrated_var.dim_attributes == OrderedDict()
# Integrating out lon and lat to get time series data
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
time = collect(range(0.0, 10.0, 10))
data = ones(length(lat), length(time), length(lon))
dims = OrderedDict(["lat" => lat, "time" => time, "lon" => lon])
attribs = Dict("long_name" => "hi")
dim_attribs = OrderedDict([
"lat" => Dict("units" => "deg"),
"time" => Dict("units" => "days"),
"lon" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
integrated_var = ClimaAnalysis.Var.integrate_lonlat(var)
@test all(
isapprox.(integrated_var.data, [4 * Ο for _ in 1:10], atol = 0.01),
)
@test integrated_var.dims == OrderedDict(["time" => time])
@test integrated_var.dim_attributes ==
OrderedDict(["time" => Dict("units" => "days")])
@test "hi integrated over lon (-179.5 to 179.5deg) and integrated over lat (-89.5 to 89.5deg)" ==
integrated_var.attributes["long_name"]
# Unit checking
dim_attribs_no_lon = OrderedDict([
"time" => Dict("units" => "days"),
"lat" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs_no_lon, data)
@test_throws "The unit for lon is missing or is not degree" ClimaAnalysis.Var.integrate_lonlat(
var,
)
dim_attribs_no_lat = OrderedDict([
"time" => Dict("units" => "days"),
"lon" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs_no_lat, data)
@test_throws "The unit for lat is missing or is not degree" ClimaAnalysis.Var.integrate_lonlat(
var,
)
end
@testset "split_by_season" begin
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
time = [0.0]
push!(time, 5_184_000.0) # correspond to 2024-3-1
push!(time, 5_184_001.0)
push!(time, 13_132_800.0) # correspond to 2024-6-1
push!(time, 13_132_802.0)
push!(time, 13_132_803.0)
data = ones(length(lat), length(time), length(lon))
dims = OrderedDict(["lat" => lat, "time" => time, "lon" => lon])
attribs = Dict("long_name" => "hi", "start_date" => "2024-1-1")
dim_attribs = OrderedDict([
"lat" => Dict("units" => "deg"),
"time" => Dict("units" => "s"),
"lon" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
MAM, JJA, SON, DJF = ClimaAnalysis.split_by_season(var)
# Check size of data
@test size(MAM.data) == (length(lat), 2, length(lon))
@test size(JJA.data) == (length(lat), 3, length(lon))
@test size(SON.data) == (0,)
@test size(DJF.data) == (length(lat), 1, length(lon))
# Check times are correct in OutputVars
@test MAM.dims["time"] == [5_184_000.0, 5_184_001.0]
@test JJA.dims["time"] == [13_132_800.0, 13_132_802.0, 13_132_803.0]
@test DJF.dims["time"] == [0.0]
# Check start date
MAM.attributes["start_date"] == "2024-1-1"
JJA.attributes["start_date"] == "2024-1-1"
DJF.attributes["start_date"] == "2024-1-1"
# Check empty OutputVar
@test isempty(SON)
# Check error handling
attribs_no_start_date = Dict("long_name" => "hi")
var =
ClimaAnalysis.OutputVar(attribs_no_start_date, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.split_by_season(var)
dim_attribs_no_sec = OrderedDict([
"lat" => Dict("units" => "deg"),
"time" => Dict("units" => "min"),
"lon" => Dict("units" => "deg"),
])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs_no_sec, data)
@test_throws ErrorException ClimaAnalysis.split_by_season(var)
lon = collect(range(-179.5, 179.5, 360))
data = ones(length(lon))
dims = OrderedDict(["lon" => lon])
attribs = Dict("long_name" => "hi", "start_date" => "2024-1-1")
dim_attribs = OrderedDict(["lon" => Dict("units" => "deg")])
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.split_by_season(var)
end
@testset "Compute bias" begin
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var_ones = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_ones)
data_twos = ones(length(lon), length(lat)) .* 2.0
var_twos = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_twos)
bias_var = ClimaAnalysis.bias(var_ones, var_twos)
global_bias = ClimaAnalysis.global_bias(var_ones, var_twos)
# Check global bias computation, short_name, long_name, and units
@test bias_var.attributes["global_bias"] == -1.0
@test global_bias == -1.0
@test bias_var.data == ones(length(lon), length(lat)) * -1.0
@test ClimaAnalysis.short_name(bias_var) == "sim-obs_short"
@test ClimaAnalysis.long_name(bias_var) == "SIM - OBS short"
@test ClimaAnalysis.units(bias_var) == "kg"
# Flip order in bias and check computations
bias_var = ClimaAnalysis.bias(var_twos, var_ones)
global_bias = ClimaAnalysis.global_bias(var_twos, var_ones)
@test bias_var.attributes["global_bias"] == 1.0
@test global_bias == 1.0
@test bias_var.data == ones(length(lon), length(lat)) * 1.0
end
@testset "Compute mse" begin
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var_ones = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_ones)
data_threes = ones(length(lon), length(lat)) .* 3.0
var_threes =
ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_threes)
squared_error_var = ClimaAnalysis.squared_error(var_ones, var_threes)
global_mse = ClimaAnalysis.global_mse(var_ones, var_threes)
global_rmse = ClimaAnalysis.global_rmse(var_ones, var_threes)
# Check global bias computation, short_name, long_name, and units
@test squared_error_var.attributes["global_mse"] == (1.0 - 3.0)^2
@test squared_error_var.attributes["global_rmse"] == 2.0
@test global_mse == (1.0 - 3.0)^2
@test global_rmse == 2.0
@test squared_error_var.data == (data_ones - data_threes) .^ 2
@test ClimaAnalysis.short_name(squared_error_var) == "(sim-obs)^2_short"
@test ClimaAnalysis.long_name(squared_error_var) == "(SIM - OBS)^2 short"
@test ClimaAnalysis.units(squared_error_var) == "kg^2"
# Flip order in squared_error and check computations
squared_error_var = ClimaAnalysis.squared_error(var_threes, var_ones)
global_mse = ClimaAnalysis.global_mse(var_threes, var_ones)
global_rmse = ClimaAnalysis.global_rmse(var_threes, var_ones)
@test squared_error_var.attributes["global_mse"] == (3.0 - 1.0)^2
@test squared_error_var.attributes["global_rmse"] == 2.0
@test global_mse == (3.0 - 1.0)^2
@test global_rmse == 2.0
@test squared_error_var.data == (data_threes - data_ones) .^ 2
# Check unit handling
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs_unitful =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg^2/m")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var_unitful =
ClimaAnalysis.OutputVar(attribs_unitful, dims, dim_attribs, data_ones)
attribs_not_unitful = Dict(
"long_name" => "idk",
"short_name" => "short",
"units" => "wacky/weird^2",
)
var_not_unitful = ClimaAnalysis.OutputVar(
attribs_not_unitful,
dims,
dim_attribs,
data_ones,
)
var_unitful = ClimaAnalysis.squared_error(var_unitful, var_unitful)
var_not_unitful =
ClimaAnalysis.squared_error(var_not_unitful, var_not_unitful)
@test ClimaAnalysis.units(var_unitful) == "(kg^2 m^-1)^2"
@test ClimaAnalysis.units(var_not_unitful) == "(wacky/weird^2)^2"
end
@testset "Units and dims check for error functions" begin
# Missing units for data
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs_missing_data_units =
Dict("long_name" => "idk", "short_name" => "short")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "rad"),
"lat" => Dict("units" => "rad"),
])
var_missing_data_units = ClimaAnalysis.OutputVar(
attribs_missing_data_units,
dims,
dim_attribs,
data_ones,
)
@test_throws ErrorException ClimaAnalysis.bias(
var_missing_data_units,
var_missing_data_units,
)
@test_throws ErrorException ClimaAnalysis.squared_error(
var_missing_data_units,
var_missing_data_units,
)
# Mismatch units for data
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs_kg =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
attribs_g =
Dict("long_name" => "idk", "short_name" => "short", "units" => "g")
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var_kg = ClimaAnalysis.OutputVar(attribs_kg, dims, dim_attribs, data_ones)
var_g = ClimaAnalysis.OutputVar(attribs_g, dims, dim_attribs, data_ones)
@test_throws ErrorException ClimaAnalysis.bias(var_kg, var_g)
@test_throws ErrorException ClimaAnalysis.squared_error(var_kg, var_g)
# Mismatch units for dims
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
dim_attribs_rad = OrderedDict([
"lon" => Dict("units" => "rad"),
"lat" => Dict("units" => "rad"),
])
dim_attribs_deg = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
var_rad = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs_rad, data_ones)
var_deg = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs_deg, data_ones)
@test_throws ErrorException ClimaAnalysis.bias(var_rad, var_deg)
@test_throws ErrorException ClimaAnalysis.squared_error(var_rad, var_deg)
# Missing units for dims
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data_ones = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
dim_attribs = OrderedDict(["lon" => Dict("units" => "deg")])
var_missing_dim_units =
ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data_ones)
@test_throws ErrorException ClimaAnalysis.bias(
var_missing_dim_units,
var_missing_dim_units,
)
@test_throws ErrorException ClimaAnalysis.squared_error(
var_missing_dim_units,
var_missing_dim_units,
)
# Missing dims
lon = collect(range(-179.5, 179.5, 360))
data_missing_dim = ones(length(lon))
dims_missing_dim = OrderedDict(["lon" => lon])
dim_attribs_missing_dim = OrderedDict(["lon" => Dict("units" => "deg")])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var_missing = ClimaAnalysis.OutputVar(
attribs,
dims_missing_dim,
dim_attribs_missing_dim,
data_missing_dim,
)
@test_throws ErrorException ClimaAnalysis.bias(var_missing, var_missing)
@test_throws ErrorException ClimaAnalysis.squared_error(
var_missing,
var_missing,
)
# Dimensions should be lon and lat
lon = collect(range(-179.5, 179.5, 360))
tal = collect(range(-89.5, 89.5, 180))
data_tal = ones(length(lon), length(tal))
dims_tal = OrderedDict(["lon" => lon, "tal" => tal])
dim_attribs_tal = OrderedDict([
"lon" => Dict("units" => "deg"),
"tal" => Dict("units" => "deg"),
])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var_tal =
ClimaAnalysis.OutputVar(attribs, dims_tal, dim_attribs_tal, data_tal)
@test_throws ErrorException ClimaAnalysis.bias(var_tal, var_tal)
@test_throws ErrorException ClimaAnalysis.squared_error(var_tal, var_tal)
end
@testset "Setting units" begin
# Unit exists (unitful)
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_units = ClimaAnalysis.set_units(var, "idk")
@test ClimaAnalysis.units(var_units) == "idk"
# Unit exists (not unitful)
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "wacky")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_units = ClimaAnalysis.set_units(var, "idk")
@test ClimaAnalysis.units(var_units) == "idk"
# Unit does not exist
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
attribs = Dict("long_name" => "idk", "short_name" => "short")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_units = ClimaAnalysis.set_units(var, "idk")
@test ClimaAnalysis.units(var_units) == "idk"
end
@testset "Dates to seconds for vars" begin
# Test for no start date
time_arr = [
Dates.DateTime(2020, 3, 1, 1, 1),
Dates.DateTime(2020, 3, 1, 1, 2),
Dates.DateTime(2020, 3, 1, 1, 3),
]
data = ones(length(time_arr))
dims = OrderedDict("time" => time_arr)
dim_attribs = OrderedDict("time" => Dict("blah" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_s = ClimaAnalysis.Var._dates_to_seconds(var)
@test ClimaAnalysis.times(var_s) == [0.0, 60.0, 120.0]
@test var_s.attributes["start_date"] == "2020-03-01T01:01:00"
# Test for a new start date
var_s = ClimaAnalysis.Var._dates_to_seconds(
var;
new_start_date = "2020-03-01T01:03:00",
)
@test ClimaAnalysis.times(var_s) == [-120.0, -60.0, 0.0]
@test var_s.attributes["start_date"] == "2020-03-01T01:03:00"
# Test for a new start date as a DateTime object
var_s = ClimaAnalysis.Var._dates_to_seconds(
var;
new_start_date = Dates.DateTime("2020-03-01T01:03:00"),
)
@test ClimaAnalysis.times(var_s) == [-120.0, -60.0, 0.0]
@test var_s.attributes["start_date"] == "2020-03-01T01:03:00"
# Test for shifting dates
var_s = ClimaAnalysis.Var._dates_to_seconds(
var,
shift_by = t -> t - Dates.Day(15),
)
@test ClimaAnalysis.times(var_s) == [0.0, 60.0, 120.0]
@test var_s.attributes["start_date"] == "2020-02-15T01:01:00"
# Test for shifting dates and new date together
var_s = ClimaAnalysis.Var._dates_to_seconds(
var;
new_start_date = "2020-03-01T01:00:00",
shift_by = t -> t + Dates.Minute(4),
)
@test ClimaAnalysis.times(var_s) == [300.0, 360.0, 420.0]
@test var_s.attributes["start_date"] == "2020-03-01T01:00:00"
# Test constructor for OutputVar that uses _dates_to_seconds
ncpath = joinpath(@__DIR__, "sample_nc/test_pr.nc")
file_var = ClimaAnalysis.OutputVar(
ncpath;
new_start_date = nothing,
shift_by = identity,
)
@test ClimaAnalysis.times(file_var) == [0.0, 1398902400.0]
@test file_var.attributes["start_date"] == "1979-01-01T00:00:00"
# Test for error handling
# Use date dimension instead of time dimension
date_arr = [
Dates.DateTime(2020, 3, 1, 1, 1),
Dates.DateTime(2020, 3, 1, 1, 2),
Dates.DateTime(2020, 3, 1, 1, 3),
]
data = ones(length(date_arr))
dims = OrderedDict("date" => date_arr)
dim_attribs = OrderedDict("date" => Dict("blah" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.Var._dates_to_seconds(var)
# Cannot convert if the element type of time array is float
time_arr = [0.0, 60.0, 120.0]
data = ones(length(time_arr))
dims = OrderedDict("time" => time_arr)
dim_attribs = OrderedDict("time" => Dict("blah" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.Var._dates_to_seconds(var)
end
@testset "End of previous month" begin
# Shift to beginning of month and shift back one month
time_arr = [
Dates.DateTime("2010-02-01T00:00:00"),
Dates.DateTime("2010-03-01T00:02:00"),
Dates.DateTime("2010-04-01T00:02:00"),
]
data = ones(length(time_arr))
dims = OrderedDict("time" => time_arr)
dim_attribs = OrderedDict("time" => Dict("blah" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_s = ClimaAnalysis.Var._dates_to_seconds(var)
var_times = ClimaAnalysis.shift_to_start_of_previous_month(var_s)
@test ClimaAnalysis.times(var_times) == [0.0, 2678400.0, 5097600.0]
@test var_times.attributes["start_date"] == "2010-01-01T00:00:00"
# Error checking
# Dates in time array
time_arr = [
Dates.DateTime(2020, 3, 1, 1, 1),
Dates.DateTime(2020, 3, 1, 1, 2),
Dates.DateTime(2020, 3, 1, 1, 3),
]
data = ones(length(time_arr))
dims = OrderedDict("time" => time_arr)
dim_attribs = OrderedDict("time" => Dict("blah" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.shift_to_start_of_previous_month(
var,
)
# Time is not a dimension
lon = collect(range(-179.5, 179.5, 360))
lat = collect(range(-89.5, 89.5, 180))
data = ones(length(lon), length(lat))
dims = OrderedDict(["lon" => lon, "lat" => lat])
dim_attribs = OrderedDict([
"lon" => Dict("units" => "deg"),
"lat" => Dict("units" => "deg"),
])
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
@test_throws ErrorException ClimaAnalysis.shift_to_start_of_previous_month(
var,
)
# Units is wrong
time_arr = [
Dates.DateTime("2010-02-01T00:00:00"),
Dates.DateTime("2010-03-01T00:02:00"),
Dates.DateTime("2010-04-01T00:02:00"),
]
data = ones(length(time_arr))
dims = OrderedDict("time" => time_arr)
dim_attribs = OrderedDict("time" => Dict("units" => "blah"))
attribs =
Dict("long_name" => "idk", "short_name" => "short", "units" => "kg")
var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data)
var_min = ClimaAnalysis.Var._dates_to_seconds(var)
var_min.dim_attributes["time"]["units"] = "min"
@test_throws ErrorException ClimaAnalysis.shift_to_start_of_previous_month(
var_min,
)
end
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 20853 | ClimaAnalysis.jl Release Notes
===============================
v0.5.10
-------
## Features
### Set units
You can now set units for a `OutputVar`. This is useful if you need to change the name of
the units or units are missing.
```julia
new_var = ClimaAnalysis.set_units(var, "kg m s^-1")
```
### Extrapolating `OutputVar` on longitude and latitude
Extrapolation is now possible for the longitude and latitude dimensions. If the dimension
arrays are equispaced and span the entire range, then a periodic boundary condition is added
for the longitude dimension and a flat boundary condition is added for the latitude
dimension.
### Preprocess dates and times
There is now support for preprocessing dates and times. The constructor for reading NetCDF
files now automatically converts dates to seconds in the time dimension. This is done
because `ClimaAnalysis` does not support interpolating on dates which mean functions that
rely on the interpolats, such as `resampled_as`, will not work otherwise.
Also, the constructor supports two additional parameters `new_start_date` and `shift_by`.
After converting from dates to seconds, the seconds are shifted to match `new_start_date`.
If preprocessing of dates is needed before shifting to `new_start_date`, then the parameter
`shift_by` can be used as it accepts a function that takes in `Dates.DateTime` elements and
return Dates.DateTime elements. This function is applied to each element of the time array.
```julia
# Shift the dates to first day of month, convert to seconds, and adjust seconds to
# match "1/1/2010"
shift_var = OutputVar(
"test.nc",
"pr",
new_start_date = "1/1/2010", # or Dates.DateTime(2010, 1, 1)
shift_by = Dates.firstdayofmonth,
)
```
The function `Var.shift_to_start_of_previous_month` is added to shift the times in the time
dimension to the end of the previous month. This function is helpful in ensuring consistency
in dates between simulation and observational data. One example of this is when adjusting
monthly averaged data. For instance, data on 2010-02-01 in the `OutputVar` corresponds to
the monthly average for January. This function shifts the times so that 2010-01-01 will
correspond to the monthly average for January.
```julia
sim_var = shift_to_start_of_previous_month(sim_var)
```
## Bug fixes
- Interpolation is not possible with dates. When dates are detected in any dimension, an
interpolat will not be made.
- Fix identifying variables with underscore in the short name (such as
`net_toa_flux`). ([#109](https://github.com/CliMA/ClimaAnalysis.jl/pull/109
"PR109"))
## Minor changes
- Var.arecompatible only check for the units of the dimensions instead of checking that the
dimension attributes fully match.
v0.5.9
------
## Features
### Reorder dimensions
Before, resampling requires that the order of the dimensions is the same between the two
`OutputVar`s. This feature adds the functionality of reordering the dimensions in a
`OutputVar` to match the ordering of another `OutputVar`. The function `resampled_as` is
updated to use `reordered_as`. See the example below of this functionality.
```julia
julia> src_var.dims |> keys |> collect
2-element Vector{String}:
"long"
"lat"
julia> dest_var.dims |> keys |> collect
2-element Vector{String}:
"lat"
"long"
julia> reordered_var = ClimaAnalysis.reordered_as(src_var, dest_var);
julia> reordered_var.dims |> keys |> collect
2-element Vector{String}:
"lat"
"long"
```
## Bug fixes
- Fix models repeating in legend of box plots by not considering the models in `model_names`
when finding the best and worst models.
- Fix legend from covering the box plot by adding the parameter `legend_text_width` which
control the number of characters on each line of the legend of the box plot.
- Use default marker size instead of a marker size of 20 when plotting other models beside
`CliMA` on the box plot.
- Fix support for `""` in units.
v0.5.8
------
## Features
This release introduces the following features
- [Directly reading NetCDF files](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#add-support-for-directly-reading-netcdf-files)
- [Resampling a OutputVar using the dimensions from another OutputVar](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#resampling-a-outputvar-using-the-dimensions-from-another-outputvar)
- [Add support for converting units](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#add-support-for-converting-units)
- [Applying a land/sea mask to GeoMakie plots](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#applying-a-landsea-mask-to-geomakie-plots)
- [Integrating OutputVar with respect to longitude or latitude](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#integrating-outputvar-with-respect-to-longitude-or-latitude)
- [Splitting OutputVar by season](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#splitting-outputvar-by-season)
- [Compute bias and squared error between OutputVar](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#compute-bias-and-squared-error-between-outputvar)
- [Represent RMSEs from various models](https://github.com/CliMA/ClimaAnalysis.jl/blob/main/NEWS.md#represent-rmses-from-various-models)
### Add support for directly reading NetCDF files
Starting version 0.5.8, `ClimaAnalysis` now supports NetCDF files that were not
generated with
[`ClimaDiagnostics`](https://github.com/CliMA/ClimaDiagnostics.jl) [0].
To load a `NetCDF` file into a `ClimaAnalysis.OutputVar`, just pass the path of
such file to the constructor
```julia
import ClimaAnalysis: OutputVar
myfile = OutputVar("my_netcdf_file.nc")
```
`ClimaAnalysis` will try to find a variable in this file. If multiple are available,
`ClimaAnalysis` picks the latest in alphabetical order. If you want to specify one,
pass it to the constructor:
```julia
import ClimaAnalysis: OutputVar
myfile = OutputVar("my_netcdf_file.nc", "myvar")
```
[0] Restrictions apply.
### Resampling a `OutputVar` using the dimensions from another `OutputVar`
You can use the `resampled_as(src_var, dest_var)` function where `src_var` is a
OutputVar with the data you want to resample using the dimensions in another
OutputVar `dest_var`. If resampling is possible, then a new `OutputVar` is
returned where the data in `src_var` is resampled using a linear interpolation
to fit the dimensions in `dest_var`. Resampling is not possible when the
dimensions in either `OutputVar`s are missing units, the dimensions between the
`OutputVar`s do not agree, or the data in `src_var` is not defined everywhere on
the dimensions in `dest_var`.
```julia
julia> src_var.data
3Γ4 Matrix{Float64}:
1.0 4.0 7.0 10.0
2.0 5.0 8.0 11.0
3.0 6.0 9.0 12.0
julia> src_var.dims
OrderedDict{String, Vector{Float64}} with 2 entries:
"lon" => [0.0, 1.0, 2.0]
"latitude" => [0.0, 1.0, 2.0, 3.0]
julia> dest_var.dims # dims that src_var.data should be resampled on
OrderedDict{String, Vector{Float64}} with 2 entries:
"long" => [0.0, 1.0]
"lat" => [0.0, 1.0, 2.0]
julia> resampled_var = ClimaAnalysis.resampled_as(src_var, dest_var);
julia> resampled_var.data
2Γ3 Matrix{Float64}:
1.0 4.0 7.0
2.0 5.0 8.0
julia> resampled_var.dims # updated dims that are the same as the dims in dest_var
OrderedDict{String, Vector{Float64}} with 2 entries:
"lon" => [0.0, 1.0]
"latitude" => [0.0, 1.0, 2.0]
```
### Add support for converting units
`ClimaAnalysis` now uses
[Unitful](https://painterqubits.github.io/Unitful.jl/stable) to handle variable
units, when possible.
When a `OutputVar` has `units` among its `attributes`, `ClimaAnalysis` will try
to use `Unitful` to parse it. If successful, `OutputVar` can be directly
converted to other compatible units. For example, if `var` has units of `m/s`,
```julia-repl
julia> ClimaAnalysis.convert_units(var, "cm/s")
```
will convert to `cm/s`.
Some units are not recognized by `Unitful`. Please, open an issue about that:
we can add more units.
In those cases, or when units are incompatible, you can also pass a
`conversion_function` that specify how to transform units.
```julia-repl
julia> ClimaAnalysis.convert_units(var, "kg/s", conversion_function = (x) - 1000x)
```
### Applying a land/sea mask to `GeoMakie` plots
When plotting with `GeoMakie` (ie, using the `contour2D_on_globe!` and
`heatmap2D_on_globe!` function), it is now possible to mask out a portion of the
output. The most common use cases are to hide the ocean or the continents.
To hide the ocean, you can now pass `mask = ClimaAnalysis.Visualize.oceanmask()`
to the globe plotting functions. You can customize how the mask is plotted by
passing the `:mask` extra keywords. For example:
```julia
import ClimaAnalysis.Visualize: contour2D_on_globe!, oceanmask
import ClimaAnalysis.Utils: kwargs as ca_kwargs
import GeoMakie
import CairoMakie
fig = CairoMakie.Figure()
contour2D_on_globe!(fig,
var,
mask = oceanmask(),
more_kwargs = Dict(:mask => ca_kwargs(color = :blue)),
)
CairoMakie.save("myfigure.pdf", fig)
```
### Integrating `OutputVar` with respect to longitude or latitude
You can use the `integrate_lon(var)`, `integrate_lat(var)`, or `integrate_lonlat(var)`
functions for integrating along longitude, latitude, or both respectively. The bounds of
integration are determined by the range of the dimensions longitude and latitude in `var`.
The unit of both longitude and latitude should be degree.
If the points are equispaced, it is assumed that each point correspond to the midpoint of a
cell which results in rectangular integration using the midpoint rule. Otherwise, the
integration being done is rectangular integration using the left endpoints for integrating
longitude and latitude. See the example of integrating over a sphere where the data is all
ones to find the surface area of a sphere.
```julia
julia> lon = collect(range(-179.5, 179.5, 360));
julia> lat = collect(range(-89.5, 89.5, 180));
julia> data = ones(length(lon), length(lat));
julia> dims = OrderedDict(["lon" => lon, "lat" => lat]);
julia> dim_attribs = OrderedDict([
"lon" => Dict("units" => "degrees_east"),
"lat" => Dict("units" => "degrees_north"),
]);
julia> attribs = Dict("long_name" => "f");
julia> var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data);
julia> integrated_var = integrate_lonlat(var);
julia> integrated_var.dims # no dimensions since longitude and latitude are integrated out
OrderedDict{String, Vector{Float64}}()
julia> integrated_var.data # approximately 4Ο (the surface area of a sphere)
0-dimensional Array{Float64, 0}:
12.566530113084296
julia> long_name(integrated_var) # updated long name to reflect the data being integrated
"f integrated over lon (-179.5 to 179.5degrees_east) and integrated over lat (-89.5 to 89.5degrees_north)"
```
### Splitting OutputVar by season
`OutputVar`s can be split by seasons using `split_by_season(var)` provided that a start date
can be found in `var.attributes["start_date"]` and time is a dimension in the `OutputVar`.
The unit of time is expected to be second. The function `split_by_season(var)` returns a
vector of four `OutputVar`s with each `OutputVar` corresponding to a season. The months of
the seasons are March to May, June to August, September to November, and December to
February. The order of the vector is MAM, JJA, SON, and DJF. If there are no dates found for
a season, then the `OutputVar` for that season will be an empty `OutputVar`.
```julia
julia> attribs = Dict("start_date" => "2024-1-1");
julia> time = [0.0, 5_184_000.0, 13_132_800.0]; # correspond to dates 2024-1-1, 2024-3-1, 2024-6-1
julia> dims = OrderedDict(["time" => time]);
julia> dim_attribs = OrderedDict(["time" => Dict("units" => "s")]); # unit is second
julia> data = [1.0, 2.0, 3.0];
julia> var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data);
julia> MAM, JJA, SON, DJF = ClimaAnalysis.split_by_season(var);
julia> ClimaAnalysis.isempty(SON) # empty OutputVar because no dates between September to November
true
julia> [MAM.dims["time"], JJA.dims["time"], DJF.dims["time"]]
3-element Vector{Vector{Float64}}:
[5.184e6]
[1.31328e7]
[0.0]
julia> [MAM.data, JJA.data, DJF.data]
3-element Vector{Vector{Float64}}:
[2.0]
[3.0]
[1.0]
```
### Compute bias and squared error between OutputVar
Bias and squared error can be computed from simulation data and observational data in
`OutputVar`s using `bias(sim, obs)` and `squared_error(sim, obs)`. The function `bias(sim,
obs)` returns a `OutputVar` whose data is the bias (`sim.data - obs.data`) and computes the
global bias of `data` in `sim` and `obs` over longitude and latitude. The result is stored
in `var.attributes["global_bias"]`. The function `squared_error(sim, obs)` returns a
`OutputVar` whose data is the squared error (`(sim.data - obs.data)^2`) and computes the
global mean squared error (MSE) and the global root mean squared error (RMSE) of `data` in
`sim` and `obs` over longitude and latitude. The result is stored in
`var.attributes["global_mse"]` and `var.attributes["global_rmse"]`. Resampling is
automatically done by resampling `obs` on `sim`. If you are only interested in computing
global bias, MSE, or RMSE, you can use `global_bias(sim, obs)`, `global_mse(sim, obs)`, or
`global_rmse(sim, obs)`.
As of now, these functions are implemented for `OutputVar`s with only the dimensions
longitude and latitude. Furthermore, units must be supplied for data and dimensions in `sim`
and `obs` and the units for longitude and latitude should be degrees.
Consider the following example, where we compute the bias and RMSE between our simulation
and some observations stored in "ta\_1d\_average.nc".
```julia
julia> obs_var = OutputVar("ta_1d_average.nc"); # load in observational data
julia> sim_var = get(simdir("simulation_output"), "ta"); # load in simulation data
julia> ClimaAnalysis.short_name(sim_var)
"ta"
julia> bias_var = ClimaAnalysis.bias(sim_var, obs_var); # bias_var is a OutputVar that can be plotted
julia> global_bias(sim, obs)
2.0
julia> units(bias_var)
"K"
julia> se_var = ClimaAnalysis.squared_error(sim_var, obs_var); # can also be plotted
julia> global_mse(sim, obs)
4.0
julia> global_rmse(sim, obs)
2.0
julia> units(se_var)
"K^2"
```
### Plotting bias
Building upon the other features introduced in this release, you can now directly plot bias
and root mean squared error between two variables with the `plot_bias_on_globe!` function.
Typically, this is done to compare simulated data against observations.
In the example below, we plot the bias between our simulation and some observations stored
in `ta_1d_average.nc`.
```julia
import ClimaAnalysis
import ClimaAnalysis.Visualize: plot_bias_on_globe!
import GeoMakie
import CairoMakie
obs_var = ClimaAnalysis.OutputVar("ta_1d_average.nc")
sim_var = ClimaAnalysis.get(ClimaAnalysis.simdir("simulation_output"), "ta")
fig = CairoMakie.Figure()
plot_bias_on_globe!(fig, sim_var, obs_var)
CairoMakie.save("myfigure.pdf", fig)
```
### Represent RMSEs from various models
To facilitate analysis of root mean squared errors (RMSEs) over different models and
categories (e.g., seasons) for a single variable of interest, `RMSEVariable` is introduced in
this release. See the examples below of constructing a `RMSEVariable` using a short name, a
vector of model names, a vector of categories, and a dictionary mapping model names to units
or a string of the name of the unit.
```julia
import ClimaAnalysis
rmse_var = ClimaAnalysis.RMSEVariable("ta", ["ACCESS-CM2", "ACCESS-ESM1-5"])
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"),
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"),
)
# Convenience functions if models all share the same unit
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
"K",
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
"K",
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
ones(2, 5),
"K",
)
```
A `RMSEVariable` can be inspected using `model_names`, `category_names`, and `rmse_units`
which provide the model names, the category names, and the units respectively.
#### Reading RMSEs from CSV file
A CSV file containing model names in the first column and root mean squared errors in the
subsequent columns with a header describing each category (i.e. seasons) can be read into
a `RMSEVariable`. See the example below on how to use this functionality.
```julia
rmse_var = ClimaAnalysis.read_rmses("./data/test_csv.csv", "ta")
rmse_var = ClimaAnalysis.read_rmses(
"./data/test_csv.csv",
"ta",
units = Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"), # passing units as a dictionary
)
rmse_var = ClimaAnalysis.read_rmses(
"./data/test_csv.csv",
"ta",
units = "K", # passing units as a string
)
```
#### Indexing
`RMSEVariable` supports indexing by integer or string. See the example for indexing into
a `RMSEVariable`.
```julia
rmse_var["ACCESS-CM2"]
rmse_var[:, "MAM"]
rmse_var["ACCESS-CM2", ["ANN", "DJF", "MAM"]]
rmse_var[2,5] = 11.2
rmse_var[:, :]
```
#### Adding categories, models, and units
Adding categories (e.g., seasons, months, years, etc.), models, and units to a `RMSEVariable`
can be done using `add_category`, `add_model`, and `add_units!`.
See the example below for how to use this functionality.
```julia
rmse_var2 = ClimaAnalysis.add_category(rmse_var, "Jan") # can take in mode than one category
rmse_var = ClimaAnalysis.add_model(rmse_var, "CliMA") # can take in more than one model name
ClimaAnalysis.add_unit!(rmse_var, "CliMA", "K")
ClimaAnalysis.add_unit!(rmse_var, Dict("CliMA" => "K")) # for adding multiple units
```
#### Summary statistics
Comparsion between models can be done using `find_best_single_model`,
`find_worst_single_model`, and `median`. The functions `find_best_single_model` and
`find_worst_single_model` default to the category "ANN" (corresponding to the annual mean),
but any category be considered using the parameter `category_name`. Furthermore, the model's
root mean squared errors (RMSEs) and name is returned. The function `median` only return the
model's RMSEs. Any `NaN` that appear in the data is ignored when computing the summary
statistics. See the example below on how to use this functionality.
```julia
ClimaAnalysis.find_best_single_model(rmse_var, category_name = "DJF")
ClimaAnalysis.find_worst_single_model(rmse_var, category_name = "DJF")
ClimaAnalysis.median(rmse_var)
```
#### Plotting RMSEVariable
`RMSEVariable` can be visualized as a box plot or heat map using `plot_boxplot!` and
`plot_leaderboard!`. The function `plot_boxplot!` makes a box plot for each category in the
`RMSEVariable` and plots any other models as specified by `model_names`. The function
`plot_leaderboard!` makes a heatmap of the RMSEs between the variables of interest and the
categories. The values of the heatmap are normalized by dividing over the median model's
RMSEs for each variable.
## Bug fixes
- Increased the default value for `warp_string` to 72.
- Binary operation between Real and OutputVar now update the interpolation of the resulting
OutputVar
## New compat requirements
`ClimaAnalysis` 0.5.8 drops support for versions of `GeoMakie` prior to `0.7.3`.
This change is required to acquire land-sea mask data from `GeoMakie`. Version
`0.7.3` specifically is also required because it fixes a precompilation bug in
`GeoMakie`. As a result, the minimum version of `Makie` is now `0.21.5`.
- `GeoMakie` >= 0.7.3
- `Makie` >= 0.21.5
- `CairoMakie` >= 0.12.0
v0.5.7
------
- Add support for evaluating `OutputVar`s onto arbitrary target points (with
multilinear interpolation).
- `average` operations now ignore `NaN`s by default.
- Add `has_*` methods to query whether a `Var` has a given dimension (e.g., `z`).
- Support `Makie` backends besides `CairoMakie`.
- Add methods to get the range and units of a given dimension in `Var`.
v0.5.6
------
- Fix finding variables with name like `clwup_1m_40s_inst.nc` (composed period).
- Add support for weighted averages in `average_lat`.
v0.5.5
------
- Fix reading `NetCDF` files with dimensions in incorrect order.
v0.5.4
------
- Added support for extraction dimension from functions, such as `times`.
- Reorganized internal modules so that each file is a module.
v0.5.3
------
- Add `Visualize.contour2D_on_globe!` for discrete contours.
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 6312 | <h1 align="center">
<img src="logo.svg" width="100px"> <br>
ClimaAnalysis.jl
</h1>
<p align="center">
<strong>Analyzing and visualizing ClimaAtmos simulations</strong>
</p>
[](https://github.com/CliMA/ClimaAnalysis.jl/actions/workflows/CI.yml)
[](https://clima.github.io/ClimaAnalysis.jl/dev/)
[](https://codecov.io/gh/CliMA/ClimaAnalysis.jl)
`ClimaAnalysis.jl` is a Julia library to post-process and visualize `ClimaAtmos`
simulations (and, more generally, NetCDF files).
Check out the [documentation](https://CliMA.github.io/ClimaAnalysis.jl) for more information and tutorials.
## Features
- Read, organize, and process NetCDF files
- Visualize heatmaps and 1D profiles with `Makie`
- Visualize heatmaps on a globe with `GeoMakie`
- Apply averages and other reductions to the output variables
- Slice variables along a given value (e.g., take the slice with altitude of 500 meters)
- Window variables within given ranges (e.g., select times between 10 and 100 days)
- Perform mathematical operations between output variables
- Extract dimensions from conventional names (e.g., `times`)
- Interpolate output variables onto arbitrary points
- Reinterpolate output variables onto pressure levels
## ClimaAnalysis.jl Developer Guidelines
These guidelines aim to ensure consistent code quality, maintainability, and a
smooth collaborative workflow for `ClimaAnalysis.jl`. Please, read these
guidelines even if you are familiar with other CliMA packages as there may be
some differences.
### Tests and environments
We prioritize well-tested code to guarantee `ClimaAnalysis.jl` functions
reliably. Here are some principles we follow:
#### Tests are collected in the `test` folder and are exclusively there
This means that all the tests can be run with `Pkg.test()`.
#### There are no checked `Manifest.toml` files
While checking in `Manifest.toml` files ensures reproducibility, it also
introduces some nuisance, including:
- lot of git/repository noise just for "up deps";
- multiple environments that have to be managed;
- busywork to keep the manifests updated.
In this repository, we have two environments:
- project,
- documentation.
The project environment defines the test dependencies in its `extras` (to reduce
the number of environments and to avoid the "cannot merge projects" problem).
> :note: Please, open an issue if you find workflow problems/friction with this
> system.
#### Running tests
`ClimaAnalysis.jl` defines the test dependencies directly in the main
`Project.toml`. This means that the package can be tested simply by running `]
test` in a Julia REPL, as shown below:
Start a Julia session in the `ClimaAnalysis` directory:
``` sh
julia --project
```
Enter `Pkg` mode by typing `]`. This will change the prompt. Run `test`.
When doing so, `Julia` will start a new temporary environment where the tests
are run in isolation. Tests are running checking for in-bounds and for
deprecations, and this can result in code invalidation and new precompilation.
Note, the project environment does not contain the test dependencies. Therefore,
you will find that some dependencies are missing if you try "manually" run the
test in a REPL. To solve this problem, use
[TestEnv](https://github.com/JuliaTesting/TestEnv.jl). Install `TestEnv` in your
base environment (`julia -e 'using Pkg; Pkg.add("TestEnv")'`). Then, when you
want to use the test dependencies, activate it from your REPL with `using
TestEnv; TestEnv.activate()`. This will bump you to an environment where the
test dependencies are available.
> :note: Please, open an issue if you find workflow problems/friction with this
> system.
#### Code Formatting with `JuliaFormatter.jl`
One of the tests consists in checking that the code is uniformly formatted. We
use [JuliaFormatter.jl](https://github.com/domluna/JuliaFormatter.jl) to achieve
consistent formatting. Here's how to use it:
You can either install in your base environment with
``` sh
julia -e 'using Pkg; Pkg.add("JuliaFormatter")'
```
or use it from within the `TestEnv` (or base) environments (see previous section).
Then, you can format the package running:
``` julia
using JuliaFormatter; format(".")
```
or just with `format(".")` if the package is already imported.
The rules for formatting are defined in the `.JuliaFormatter.toml`.
If you are used to formatting from the command line instead of the REPL, you can
install `JuliaFormatter` in your base environment and call
``` sh
julia -e 'using JuliaFormatter; format(".")'
```
You could also define a shell alias
``` sh
alias julia_format_here="julia -e 'using JuliaFormatter; format(\".\")'"
```
> :note: Please, open an issue if you find workflow problems/friction with this
> system.
### Documentation
Documentation is generated with
[Documenter.jl](https://documenter.juliadocs.org/stable/). We strive to have
complete and up-to-date information.
To generate documentation, run
``` sh
julia --project=docs docs/make.jl
```
Please, update the documentation if you add new features or change the behavior
of existing ones.
We encourage using `jldoctest` to add and test examples in docstrings.
### Pull Request (PR) and commits
Here's how to structure your contributions effectively:
- Descriptive Title: Briefly summarize the changes your PR introduces. Commit
titles should preferably be under 50 characters, start with a capital latter,
and use imperative verbs (e.g., "Remove superfluous function call").
- Detailed Description: Explain the purpose of your changes. Focus on the
intent.
- Breaking Changes: If your PR introduces breaking changes, highlight them
clearly in the description.
Your pull request can contain one or multiple commits. In either cases, it is
important that each commit is atomic (meaning that each commit represents a
single logical change).
Please, squash commits that represent a single logical change (e.g., do not have
two commits when the second just fixes the first).
Pull requests are not merged, but _rebased_, ensuring a linear history (this is
handled automatically by GitHub).
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 2908 | # API
```@meta
CurrentModule = ClimaAnalysis
```
## Sim
```@docs
Sim.SimDir
Base.get
Sim.available_vars
Sim.available_reductions
Sim.available_periods
```
## Var
```@docs
Var.OutputVar
Var.read_var
Var.is_z_1D
Base.isempty(var::OutputVar)
Var.short_name
Var.long_name
Var.units
Var.has_units
Var.slice
Var.average_lat
Var.weighted_average_lat
Var.average_lon
Var.average_x
Var.average_y
Var.average_xy
Var.average_time
Var.window
Var.arecompatible
Var.center_longitude!
Var.times
Var.dates
Var.longitudes
Var.latitudes
Var.altitudes
Var.time_name
Var.date_name
Var.longitude_name
Var.latitude_name
Var.altitude_name
Var.has_time
Var.has_date
Var.has_longitude
Var.has_latitude
Var.has_altitude
Var.conventional_dim_name
Var.dim_units
Var.range_dim
Var.reordered_as
Var.resampled_as
Var.convert_units
Var.set_units
Var.integrate_lonlat
Var.integrate_lat
Var.integrate_lon
Var.split_by_season(var::OutputVar)
Var.bias
Var.global_bias
Var.squared_error
Var.global_mse
Var.global_rmse
Var.shift_to_start_of_previous_month
```
## Leaderboard
```@docs
Leaderboard.RMSEVariable
Leaderboard.RMSEVariable(short_name, model_names::Vector{String})
Leaderboard.RMSEVariable(short_name, model_names::Vector{String}, units::Dict)
Leaderboard.RMSEVariable(short_name, model_names::Vector{String}, category_names::Vector{String}, units::Dict)
Leaderboard.RMSEVariable(short_name::String, model_names::Vector{String}, category_names::Vector{String}, RMSEs, units::Dict)
Leaderboard.RMSEVariable(short_name, model_names::Vector{String}, units::String)
Leaderboard.RMSEVariable(short_name, model_names::Vector{String}, category_names::Vector{String}, units::String)
Leaderboard.RMSEVariable(short_name::String, model_names::Vector{String}, category_names::Vector{String}, RMSEs, units::String)
Leaderboard.model_names
Leaderboard.category_names
Leaderboard.rmse_units
Leaderboard.read_rmses
Base.getindex(rmse_var::RMSEVariable, model_name, category)
Base.getindex(rmse_var::RMSEVariable, model_name::String)
Base.setindex!(rmse_var::RMSEVariable, rmse, model_name, category)
Base.setindex!(rmse_var::RMSEVariable, rmse, model_name::String)
Leaderboard.add_category
Leaderboard.add_model
Leaderboard.add_unit!
Leaderboard.find_best_single_model
Leaderboard.find_worst_single_model
Leaderboard.median
```
## Utilities
For development and not
```@docs
Utils.match_nc_filename
Utils.squeeze
Utils.nearest_index
Utils.kwargs
Utils.seconds_to_prettystr
Utils.warp_string
```
## Atmos
```@docs
Atmos
Atmos.to_pressure_coordinates
```
## Makie
```@docs
Visualize.heatmap2D!
Visualize.sliced_heatmap!
Visualize.heatmap!
Visualize.line_plot1D!
Visualize.sliced_line_plot!
Visualize.sliced_plot!
Visualize.plot!
Visualize.plot_boxplot!
Visualize.plot_leaderboard!
```
## GeoMakie
```@docs
Visualize.oceanmask
Visualize.landmask
Visualize.contour2D_on_globe!
Visualize.heatmap2D_on_globe!
Visualize.plot_bias_on_globe!
```
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 5702 | # How-to guide and cookbook
## How do I make a line plot with variable on the y axis instead of the x one?
By default, the plotting functions in `MakieExt` place the variable on the
x axis. If you want it on the y axis instead (e.g., you are plotting the
vertical profile of a column), you can pass the `dim_on_y = true` argument to
the axis.
For instance,
```julia
plot!(var, more_kwargs = Dict(:axis => [:dim_on_y = true]))
```
`ClimaAnalysis.Utils` provides a convenience function `kwargs` to specify
arguments a little bit more easily without having to think about `Symbol`s too
much.
```julia
plot!(var, more_kwargs = Dict(:axis => kwargs(dim_on_y = true))
```
## How do I take an average of a a variable in a given window of time?
You can use the `window` function to select a portion of a given `var`. For
example, to select only the time from 10 seconds to 100 for `var`
```julia
reduced_var = window(var, "time", left = 10, right = 100)
```
Now, you can apply the usual average functions.
## How do I make the y axis logscale?
The plotting routines can pass additional arguments to `Makie` through the
`more_kwargs` keyword arguments. `more_kwargs` has to be a dictionary that maps
symbols to a list of pairs. The accepted symbols are `:axis`, `:plot`, and
`:cb`, and the pairs have to be pairs of symbols to values (the keyword
arguments you want to pass down). Given that the type structure is a little
complicated, `ClimaAnalysis` comes with a helper function for you to use. So, if
you want to set the logscale for the `y` axis, you would do something like
```julia
import ClimaAnalysis.Utils: kwargs as ca_kwargs
plot!(fig, var, more_kwargs = Dict(:axis => ca_kwargs(yscale = log)))
```
where inside `ca_kwargs` you pass the arguments you would pass to `Makie.Axis`.
## How do I center my longitude to 180 instead of 0?
You can use the `center_longitude!` function.
## How do I find the specific name of a dimension in a `OutputVar` given its conventional name?
Suppose you want to extract the `longitudes` of your data but you don't know if
the dimension was called `lon` or `long`. In this case, you can use the
`Var.longitude_name` function to find the name. This function scans the names
and compares them to a standard list in `ClimaAnalysis.Var.LONGITUDE_NAMES`.
You can also customize that variable if your name is not in that list.
Equivalent functions exist for others dimensions too.
## How do I move to pressure coordinates?
The `Atmos` module in `ClimaAnalysis` comes with a function,
`to_pressure_coordinates` that does precisely that. The function takes an input
`OutputVar` and a pressure `OutputVar`. If the two are compatible, a new
`OutputVar` is returned where the values are linearly interpolated on fixed
pressure levels.
## How do I reorder the dimensions in a `OutputVar` to match the dimensions in another `OutputVar`?
You can use the `reordered_as(src_var, dest_var)` function where `src_var` is a `OutputVar`
with the dimensions you want to reorder to match the dimensions in the OutputVar `dest_var`.
```@setup reordered_as
import ClimaAnalysis
import OrderedCollections: OrderedDict
src_long = 0.0:180.0 |> collect
src_lat = 0.0:90.0 |> collect
src_data = ones(length(src_long), length(src_lat))
src_dims = OrderedDict(["long" => src_long, "lat" => src_lat])
src_attribs = Dict("long_name" => "hi")
src_dim_attribs = OrderedDict([
"long" => Dict("units" => "test_units1"),
"lat" => Dict("units" => "test_units2"),
])
src_var =
ClimaAnalysis.OutputVar(src_attribs, src_dims, src_dim_attribs, src_data)
dest_long = 20.0:180.0 |> collect
dest_lat = 30.0:90.0 |> collect
dest_data = zeros(length(dest_lat), length(dest_long))
dest_dims = OrderedDict(["lat" => dest_lat, "long" => dest_long])
dest_attribs = Dict("long_name" => "hi")
dest_dim_attribs = OrderedDict([
"lat" => Dict("units" => "test_units4"),
"long" => Dict("units" => "test_units3"),
])
dest_var = ClimaAnalysis.OutputVar(
dest_attribs,
dest_dims,
dest_dim_attribs,
dest_data,
)
```
```@repl reordered_as
src_var.dims |> keys |> collect
dest_var.dims |> keys |> collect
reordered_var = ClimaAnalysis.reordered_as(src_var, dest_var);
reordered_var.dims |> keys |> collect
```
## How do I resample the data in a `OutputVar` using the dimensions from another `OutputVar`?
You can use the `resampled_as(src_var, dest_var)` function where `src_var` is a
OutputVar with the data you want to resample using the dimensions in another
OutputVar `dest_var`. If resampling is possible, then a new `OutputVar` is
returned where the data in `src_var` is resampled using a linear interpolation
to fit the dimensions in `dest_var`. Resampling is not possible when the
dimensions in either `OutputVar`s are missing units, the dimensions between the
`OutputVar`s do not agree, or the data in `src_var` is not defined everywhere on
the dimensions in `dest_var`.
```@julia resampled_as
julia> src_var.data
3Γ4 Matrix{Float64}:
1.0 4.0 7.0 10.0
2.0 5.0 8.0 11.0
3.0 6.0 9.0 12.0
julia> src_var.dims
OrderedDict{String, Vector{Float64}} with 2 entries:
"lon" => [0.0, 1.0, 2.0]
"latitude" => [0.0, 1.0, 2.0, 3.0]
julia> dest_var.dims # dims that src_var.data should be resampled on
OrderedDict{String, Vector{Float64}} with 2 entries:
"long" => [0.0, 1.0]
"lat" => [0.0, 1.0, 2.0]
julia> resampled_var = ClimaAnalysis.resampled_as(src_var, dest_var);
julia> resampled_var.data
2Γ3 Matrix{Float64}:
1.0 4.0 7.0
2.0 5.0 8.0
julia> resampled_var.dims # updated dims that are the same as the dims in dest_var
OrderedDict{String, Vector{Float64}} with 2 entries:
"lon" => [0.0, 1.0]
"latitude" => [0.0, 1.0, 2.0]
```
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 7243 | # ClimaAnalysis
`ClimaAnalysis`, your one-stop-shop for all your CliMA analysis needs.
## Quick start
### `SimDir`
Suppose you ran a `ClimaAtmos` simulation, and the output is saved in the folder
`simulation_output`. The first step in using `ClimaAnalysis` is to instantiate a
`SimDir`:
``` julia
import ClimaAnalysis
simdir = ClimaAnalysis.SimDir("simulation_output")
```
`ClimaAnalysis.SimDir` scans the `simulation_output`, finds all the output
files, and organizes them.
As of version `0.1.0`, `ClimaAnalysis` uses file names to identify files and
variables. In this, `ClimaAnalysis` assumes that the default names for outputs
are used in `ClimaAtmos` (i.e.,
`<short_name>_<reduction_time>_<reduction_type>.nc`, as in `ta_1h_max.nc`, or
`<short_name>_1d_inst.nc`).
Once you have a `SimDir`, you can inspect the output. For example, to find what
variables are available:
``` julia-repl
julia> println(summary(simdir))
Output directory: simulation_output
Variables:
- va
average (2.0h)
- ua
average (6.0h)
- orog
inst (1.0d)
- ta
average (3.0h)
max (4.0h, 3.0h)
min (3.0h)
- ts
max (1.0h)
```
Now, you can access any given variable
``` julia
ta_max = get(simdir; short_name = "t12", reduction = "max", period = "3.0h")
```
`ta_max` is a ` OutputVar`, a type that contains the variable as well as some
metadata. When there is only one combination `short_name/reduction/period`, the
function `get` can be used with `get(simdir, short_name)` (e.g., `get(simdir,
"orog")` in the previous example).
Let us learn about ` OutputVar`s
### `OutputVar`
` OutputVar`s contain the raw data (in `.data`), the attributes read from the
file, and the information regarding the dimension over which the variable is
defined.
``` julia-repl
julia> ts_max.dims
OrderedCollections.OrderedDict{String, Vector{Float32}} with 4 entries:
"time" => [10800.0, 21600.0, 32400.0, 43200.0]
"lon" => [-180.0, -177.989, -175.978, -173.966, -171.955, -169.944, -167.933, -165.922β¦
"lat" => [-80.0, -77.9747, -75.9494, -73.924, -71.8987, -69.8734, -67.8481, -65.8228, β¦
"z" => [0.0, 5000.0, 10000.0, 15000.0, 20000.0, 25000.0, 30000.0, 35000.0, 40000.0, β¦
```
Here we have the dimensions and their values. The dimensions are ordered as in
the file, so that the first index of `.data` is `time`, and so on.
We can find the attributes of the dimensions in `.attributes`:
``` julia-repl
julia> ts_max.dim_attributes["lon"]
"lon" => Dict("units"=>"degrees_east")
```
Some of the attributes are exposed with function calls. For example
``` julia-repl
julia> long_name(ts_max)
Surface Temperature, max within 1.0 Hour(s)
```
These function use the attributes in the NetCDF files. When not available, empty strings are returned.
Given an `OutputVar`, we can perform manipulations. For instance, we can take
the average over latitudes:
``` julia
ts_max_lat_averaged = ClimaAnalysis.average_lat(ts_max)
```
Now,
``` julia
ts_max_lat_averaged.dims =
OrderedCollections.OrderedDict{String, Vector{Float32}} with 3 entries:
"time" => [10800.0, 21600.0, 32400.0, 43200.0]
"lon" => [-180.0, -177.989, -175.978, -173.966, -171.955, -169.944, -167.933, -165.922β¦
"z" => [0.0, 5000.0, 10000.0, 15000.0, 20000.0, 25000.0, 30000.0, 35000.0, 40000.0, β¦
```
We can also take a time/altitude slice, for example, the plane with altitude closest to 8000 meters.
``` julia
ts_max_lat_averaged_sliced = ClimaAnalysis.slice(ts_max_lat_averaged, 8_000)
```
Alternatively, you can also call `ClimaAnalysis.slice(ts_max_lat_averaged_sliced, time = 8_000)`.
Now,
``` julia
ts_max_lat_averaged_sliced.dims =
OrderedCollections.OrderedDict{String, Vector{Float32}} with 2 entries:
"time" => [10800.0, 21600.0, 32400.0, 43200.0]
"lon" => [-180.0, -177.989, -175.978, -173.966, -171.955, -169.944, -167.933, -165.922β¦
```
You can get the dimensions from standard names, for example, to find the
`times`, simply run
``` julia
times(ts_max_lat_averaged_sliced) =
4-element Vector{Float32}:
10800.0
21600.0
32400.0
43200.0
```
`OutputVar`s can be evaluated on arbitrary points. For instance
``` julia-repl
julia> ts_max(12000., 23., 45., 1200.)
```
will return the value of the maximum temperature at time 12000, longitude 23,
latitude 45, and altitude 1200. This can be used to interpolate `OutputVar`s
onto new grids.
#### Mathematical operations
`OutputVar`s support the usual mathematical operations. For instance, if
`ts_max` is an `OutputVar`, `2 * ts_max` will be an `OutputVar` with doubled values.
For binary operations (e.g., `+, -, *, /`), `ClimaAnalysis` will check if the
operation is well defined (i.e., the two variables are defined on the physical
space). Binary operations do remove some attribute information.
#### `Visualize`
We can directly visualize `OutputVar`s.
If [`Makie`](https://docs.makie.org/stable/) is available, `ClimaAnalysis` can
be used for plotting. Importing `Makie` and `ClimaAnalysis` in the same session
automatically loads the necessary `ClimaAnalysis` plotting modules.
If we want to make a heatmap for `ta_max` at time of 100 s at altitude `z` of 30000 meters:
``` julia
import CairoMakie
import ClimaAnalysis.Visualize as viz
fig = Makie.Figure(size = (400, 600))
viz.plot!(
fig,
ta_max,
time = 100.0,
z = 30_000.0
)
Makie.save("ta_max.png", fig)
```
If we want to have a line plot, we can simply add another argument (e.g., `lat =
30`), to slice through that value.
If you want to customize some of the properties, you can pass the `more_kwargs`
to the `plot!` function. `more_kwargs` is a dictionary that can contain
additional arguments to the `Axis` (`:axis`), `plot` (`:plot`), and `Colorbar`
(`:cb`) functions. `more_kwargs` is a Dictionary that maps the symbols `:axis`,
`:plot`, and `:cb` to their additional arguments. For instance, to choose the
colormap for the colorbar to viridis
``` julia
viz.plot!(
fig,
ta_max,
time = 100.0,
z = 30_000.0,
more_kwargs = Dict(:cb => [:colormap => :viridis])
)
```
Note the `Symbol` in colormap!. `:cb` has to be a mapping of `Symbol`s and
values. `ClimaAnalysis` has a convenience function `kwargs` to more easily pass
down the keyword arguments avoiding this step. With that, the above example becomes
``` julia
import ClimaAnalysis.Utils : kwargs as ca_kwargs
viz.plot!(
fig,
ta_max,
time = 100.0,
z = 30_000.0,
more_kwargs = Dict(:cb => ca_kwargs(colormap = :inferno))
)
```
With `Utils.kwargs`, you can just pass the arguments as you would pass them to
the constructor.
If you need more control over the placement of plots, you can pass
`Makie.GridLayout` objects to the plotting functions. For example,
``` julia
using CairoMakie
fig = Figure()
layout = fig[1, 2] = GridLayout()
viz.plot!(
layout,
ta_max,
time = 100.0,
z = 30_000.0,
more_kwargs = Dict(:cb => ca_kwargs(colormap = :inferno))
)
```
When you pass a `GridLayout`, the optional argument `p_loc` refers to the
placement within the layout. When you pass a `Figure`, it refers to the
placement within the figure.
If you have `GeoMakie` and are working on a variable defined on a `long-lat`
grid, you can directly plot on a projected global surface. For that, load
`GeoMakie` and use the `heatmap2D_on_globe!` function.
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 5880 | # `RMSEVariable`s
`RMSEVariable`s contain all the information needed to process and compare root mean squared
errors (RMSEs) between different models and categories (e.g., seasons) for a single variable
of interest.
`ClimaAnalysis` provides several constructors for making a `RMSEVariable`. For all
constructors, a short name and a vector of model names must be provided. If units are not
provided, then each model will have no unit which denotes the missing unit. See the examples
below where the constructor can take in a short name, a vector of model names, a vector of
categories, and a dictionary mapping model names to units or a string of the name of the
unit.
```@example rmse_var
import ClimaAnalysis
rmse_var = ClimaAnalysis.RMSEVariable("ta", ["ACCESS-CM2", "ACCESS-ESM1-5"])
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"),
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"),
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
ones(2, 5),
Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"),
)
# Convenience functions if models all share the same unit
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
"K",
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
"K",
)
rmse_var = ClimaAnalysis.RMSEVariable(
"ta",
["ACCESS-CM2", "ACCESS-ESM1-5"],
["DJF", "MAM", "JJA", "SON", "ANN"],
ones(2, 5),
"K",
)
nothing # hide
```
The `RMSEVariable` can be inspected using `model_names`, `category_names`, and `rmse_units`
which provide the model names, the category names, and the units respectively.
```@repl rmse_var
ClimaAnalysis.model_names(rmse_var)
ClimaAnalysis.category_names(rmse_var)
ClimaAnalysis.rmse_units(rmse_var)
```
## Reading RMSEs from CSV file
Typically, the root mean squared errors (RMSEs) of different models across different
categories are stored in a different file and need to be loaded in. `ClimaAnalysis` can load
this information from a CSV file and store it in a `RMSEVariable`. The format of the CSV
file should have a header consisting of the entry "model_name" (or any other text as it is
ignored by the function) and rest of the entries should be the category names. Each row
after the header should start with the model name and the root mean squared errors for each
category for that model. The entries of the CSV file should be separated by commas.
See the example below using `read_rmses` where data is loaded from `test_csv.csv` and a
short name of `ta` is provided. One can also pass in a dictionary mapping model names to
units for `units` or a string if the units are the same for all the models.
```@example rmse_var
rmse_var = ClimaAnalysis.read_rmses("./data/test_csv.csv", "ta")
rmse_var = ClimaAnalysis.read_rmses(
"./data/test_csv.csv",
"ta",
units = Dict("ACCESS-CM2" => "K", "ACCESS-ESM1-5" => "K"), # passing units as a dictionary
)
rmse_var = ClimaAnalysis.read_rmses(
"./data/test_csv.csv",
"ta",
units = "K", # passing units as a string
)
nothing # hide
```
## Indexing
After loading the data, one may want to inspect, change, or manipulate the data. This is
possible by the indexing functionality that `RMSEVariable` provides. Indexing into a
`RMSEVariable` is similar, but not the same as indexing into an array. Indexing by
integer or string is supported, but linear indexing (e.g. `rmse_var[1]`) is not supported.
integer or string is supported, but linear indexing (e.g., `rmse_var[1]`) is not supported.
```@repl rmse_var
rmse_var[:, :]
rmse_var["ACCESS-CM2"]
rmse_var[:, "MAM"]
rmse_var["ACCESS-CM2", ["ANN", "DJF", "MAM"]]
rmse_var[2,5] = 11.2;
rmse_var[:, :]
```
## Adding categories, models, and units
It may be the case that the CSV file does not contain all the models you want to analyze, or
you want to consider another category but do not want to go in and manually edit the CSV
file to add it. `ClimaAnalysis` provides `add_category`, `add_model`, and `add_unit!` for
adding categories, models, and units respectively. Multiple model or categories can be
provided (e.g., `add_model(rmse_var, "model1", "model2")`) in the functions. For adding
multiple units, one can pass in a dictionary mapping model names to units. See the example
below using this functionality.
```@julia rmse_var
rmse_var2 = ClimaAnalysis.add_category(rmse_var, "Jan") # can take in more than one category
rmse_var = ClimaAnalysis.add_model(rmse_var, "CliMA") # can take in more than one model name
ClimaAnalysis.add_unit!(rmse_var, "CliMA", "K")
ClimaAnalysis.add_unit!(rmse_var, Dict("CliMA" => "K")) # for adding multiple units
```
## Summary statistics
`ClimaAnalysis` provides several functions to compute summary statistics. As of now,
`ClimaAnalysis` provides methods for find the best single model, the worst single model,
and the median model.
The functions `find_best_single_model` and `find_worst_single_model` default to the category
"ANN" (corresponding to the annual mean), but any category can be considered using the
parameter `category_name`. Furthermore, the model's root mean squared errors (RMSEs) and the
model's name are returned. The function `median` only returns the median model's RMSEs.
Any `NaN` that appears in the data is ignored when computing the summary statistics.
See the example below using this functionality.
```@repl rmse_var
ClimaAnalysis.find_best_single_model(rmse_var, category_name = "DJF")
ClimaAnalysis.find_worst_single_model(rmse_var, category_name = "DJF")
ClimaAnalysis.median(rmse_var)
``` | ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 10583 | # `OutputVar`s
`OutputVar`s are the heart of `ClimaAnalysis`. This page is under construction,
in the meantime, consult [`OutputVar`](@ref).
`OutputVar`s can be directly generated from most NetCDF files. Just pass the
path to the constructor:
```julia
import ClimaAnalysis: OutputVar
myfile = OutputVar("my_netcdf_file.nc")
```
`ClimaAnalysis` will try to find a variable in this file. If multiple are available,
`ClimaAnalysis` picks the latest in alphabetical order. If you want to specify one,
pass it to the constructor:
```julia
import ClimaAnalysis: OutputVar
myfile = OutputVar("my_netcdf_file.nc", "myvar")
```
## Physical units
`OutputVar`s can contain information about their physical units. For
`OutputVar`s read from NetCDF files, this is obtained from the `units` attribute
(and stored in the `attributes["units"]`).
When possible, `ClimaAnalysis` uses
[Unitful](https://painterqubits.github.io/Unitful.jl/stable) to handle units.
This enables automatic unit conversion for `OutputVar`s.
Consider the following example:
```julia
import ClimaAnalysis
values = 0:100.0 |> collect
data = copy(values)
attribs = Dict("long_name" => "speed", "units" => "m/s")
dim_attribs = Dict{String, Any}()
var = ClimaAnalysis.OutputVar(attribs, Dict("distance" => values), dim_attribs, data)
var_cms = ClimaAnalysis.convert_units(var, "cm/s")
```
In this example, we set up` var`, an `OutputVar` with units of meters per second.
Then, we called [`ClimaAnalysis.convert_units`](@ref) to convert the units to
centimeters per second.
Sometimes, this automatic unit conversion is not possible (e.g., when you want
to transform between incompatible units). In this case, you an pass a function
that specify how to apply this transformation. For example, in the previous
case, we can assume that we are talking about water and transform units into a
mass flux:
```julia
new_var = ClimaAnalysis.convert_units(var, "kg m/s", conversion_function = (x) -> 1000x)
```
!!! note If you find some unparseable units, please open an issue. We can fix them!
If units do not exist or you want to change the name of the units, then one can uses the
`set_units` function.
```julia
new_var = ClimaAnalysis.set_units(var, "kg m s^-1")
```
!!! warning "Override existing units"
If units already exist, this will override the units for data in `var`.
## Interpolations and extrapolations
Interpolating a `OutputVar` onto coordinates can be done by doing the following:
```julia
var((0.0, 0.0)) # var is a two-dimensional OutputVar
```
A multilinear interpolation is used to determine the value at the coordinate (0, 0).
!!! warning "Interpolate on dates"
If any of the dimensions contains `Dates.DateTime` elements, interpolation is not
possible. `Interpolations.jl` does not support interpolating on dates.
Extrapolating is supported only on the longitude and latitude dimensions. For the longitude
and latitude dimensions, a periodic boundary condition and a flat boundary condition are
added, respectively, when the dimension array is equispaced and spans the entire range. For
all other cases, extrapolating beyond the domain of the dimension will throw an error.
## Preprocess dates and seconds
When loading a NetCDF file, dates in the time dimension are automatically converted to
seconds and a start date is added to the attributes of the `OutputVar`. This is done because
`ClimaAnalysis` does not support interpolating on dates which mean functions that rely on
the interpolats, such as `resampled_as`, will not work otherwise.
Two additional parameters are provided to help preprocess dates which are `new_start_date`
and `shift_by`. If `new_start_date` is provided, then dates in the time dimension will
automatically be converted with reference to the `new_start_date` rather than the first date
found in the NetCDF file. The parameter `new_start_date` can be any string parseable by the
[Dates](https://docs.julialang.org/en/v1/stdlib/Dates/) module or a `Dates.DateTime` object.
If additional preprocessing is needed, then one can provide a function that takes in and
returns a `Date.DateTime` object. This function is applied to each date before converting
each dates to seconds with reference with the start date.
```@julia dates_to_seconds
# Shift the dates to first day of month, convert to seconds, and adjust seconds to
# match the date 1/1/2010
obs_var = ClimaAnalysis.OutputVar(
"pr.nc",
"precip",
new_start_date = "2010-01-01T00:00:00", # or Dates.DateTime(2010, 1, 1)
shift_by = Dates.firstdayofmonth,
)
```
Additionally, the function `shift_to_start_of_previous_month(var::OutputVar)` is provided to
help with preprocessing. This function shifts the times in the time dimension to the start
of the previous month. After applying this function, the start date in the attributes
corresponds to the first element in the time array.
```@julia beginning
sim_var = shift_to_start_of_previous_month(sim_var)
```
This function is helpful in ensuring consistency in dates between simulation and
observational data. One example of this is when adjusting monthly averaged data. For
instance, suppose that data on 2010-02-01 in `sim_var` corresponds to the monthly
average for January. This function shifts the times so that 2010-01-01 will correspond to
the monthly average for January.
## Integration
`OutputVar`s can be integrated with respect to longitude, latitude, or both using
`integrate_lon(var)`, `integrate_lat(var)`, or `integrate_lonlat(var)` respectively. The
bounds of integration are determined by the range of the dimensions longitude and latitude
in `var`. The unit of both longitude and latitude should be degree.
If the points are equispaced, it is assumed that each point correspond to the midpoint of a
cell which results in rectangular integration using the midpoint rule. Otherwise, the
integration being done is rectangular integration using the left endpoints for integrating
longitude and latitude.
See the example of integrating over a sphere where the data is all ones to find the surface
area of a sphere.
```@julia integrate_lonlat
julia> lon = collect(range(-179.5, 179.5, 360));
julia> lat = collect(range(-89.5, 89.5, 180));
julia> data = ones(length(lon), length(lat));
julia> dims = OrderedDict(["lon" => lon, "lat" => lat]);
julia> dim_attribs = OrderedDict([
"lon" => Dict("units" => "degrees_east"),
"lat" => Dict("units" => "degrees_north"),
]);
julia> attribs = Dict("long_name" => "f");
julia> var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data);
julia> integrated_var = integrate_lonlat(var);
julia> integrated_var.dims # no dimensions since longitude and latitude are integrated out
OrderedDict{String, Vector{Float64}}()
julia> integrated_var.data # approximately 4Ο (the surface area of a sphere)
0-dimensional Array{Float64, 0}:
12.566530113084296
julia> long_name(integrated_var) # updated long name to reflect the data being integrated
"f integrated over lon (-179.5 to 179.5degrees_east) and integrated over lat (-89.5 to 89.5degrees_north)"
```
## Split by season
`OutputVar`s can be split by seasons using `split_by_season(var)` provided that a start date
can be found in `var.attributes["start_date"]` and time is a dimension in the `OutputVar`.
The unit of time is expected to be second. The function `split_by_season(var)` returns a
vector of four `OutputVar`s with each `OutputVar` corresponding to a season. The months of
the seasons are March to May, June to August, September to November, and December to
February. The order of the vector is MAM, JJA, SON, and DJF. If there are no dates found for
a season, then the `OutputVar` for that season will be an empty `OutputVar`.
```@julia split_by_season
julia> attribs = Dict("start_date" => "2024-1-1");
julia> time = [0.0, 5_184_000.0, 13_132_800.0]; # correspond to dates 2024-1-1, 2024-3-1, 2024-6-1
julia> dims = OrderedDict(["time" => time]);
julia> dim_attribs = OrderedDict(["time" => Dict("units" => "s")]); # unit is second
julia> data = [1.0, 2.0, 3.0];
julia> var = ClimaAnalysis.OutputVar(attribs, dims, dim_attribs, data);
julia> MAM, JJA, SON, DJF = ClimaAnalysis.split_by_season(var);
julia> ClimaAnalysis.isempty(SON) # empty OutputVar because no dates between September to November
true
julia> [MAM.dims["time"], JJA.dims["time"], DJF.dims["time"]]
3-element Vector{Vector{Float64}}:
[5.184e6]
[1.31328e7]
[0.0]
julia> [MAM.data, JJA.data, DJF.data]
3-element Vector{Vector{Float64}}:
[2.0]
[3.0]
[1.0]
```
## Bias and squared error
Bias and squared error can be computed from simulation data and observational data in
`OutputVar`s using `bias(sim, obs)` and `squared_error(sim, obs)`. The function `bias(sim,
obs)` returns a `OutputVar` whose data is the bias (`sim.data - obs.data`) and computes the
global bias of `data` in `sim` and `obs` over longitude and latitude. The result is stored
in `var.attributes["global_bias"]`. The function `squared_error(sim, obs)` returns a
`OutputVar` whose data is the squared error (`(sim.data - obs.data)^2`) and computes the
global mean squared error (MSE) and the global root mean squared error (RMSE) of `data` in
`sim` and `obs` over longitude and latitude. The result is stored in
`var.attributes["global_mse"]` and `var.attributes["global_rmse"]`. Resampling is
automatically done by resampling `obs` on `sim`. If you are only interested in computing
global bias, MSE, or RMSE, you can use `global_bias(sim, obs)`, `global_mse(sim, obs)`, or
`global_rmse(sim, obs)`.
As of now, these functions are implemented for `OutputVar`s with only the dimensions
longitude and latitude. Furthermore, units must be supplied for data and dimensions in `sim`
and `obs` and the units for longitude and latitude should be degrees.
Consider the following example, where we compute the bias and RMSE between our simulation
and some observations stored in "ta\_1d\_average.nc".
```@julia bias_and_mse
julia> obs_var = OutputVar("ta_1d_average.nc"); # load in observational data
julia> sim_var = get(simdir("simulation_output"), "ta"); # load in simulation data
julia> ClimaAnalysis.short_name(sim_var)
"ta"
julia> bias_var = ClimaAnalysis.bias(sim_var, obs_var); # bias_var is a OutputVar that can be plotted
julia> global_bias(sim, obs)
2.0
julia> units(bias_var)
"K"
julia> se_var = ClimaAnalysis.squared_error(sim_var, obs_var); # can also be plotted
julia> global_mse(sim, obs)
4.0
julia> global_rmse(sim, obs)
2.0
julia> units(se_var)
"K^2"
```
| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 2718 | # Visualizing `OutputVar`s
This page is under construction, in the meantime, consult [`Visualize`](@ref).
### Masking part of the output in `GeoMakie`
When performing ocean or land simulations, it is often convenient to hide the
other component (e.g., hide the ocean and focus on the continents). For
`GeoMakie` plots, there is a direct way to accomplish this. In this section, we
discuss this feature.
The main `GeoMakie` plots are [`Visualize.contour2D_on_globe!`](@ref) and
[`Visualize.heatmap2D_on_globe!`](@ref). Both these functions take a `mask` argument. By
default, `mask` is `nothing`, meaning that the entire output is displayed on the
globe. Alternatively, `mask` can be a collection of polygons that can be plotted
with [`Makie.poly`](https://docs.makie.org/v0.21/reference/plots/poly).
`ClimaAnalysis` comes with the most important ones [`Visualize.oceanmask`](@ref) and
[`Visualize.landmask`](@ref), to hide ocean and continents respectively.
For example, suppose `var` it the variable we want to plot with a ocean mask
```julia
import ClimaAnalysis.Visualize: contour2D_on_globe!, oceanmask
import ClimaAnalysis.Utils: kwargs as ca_kwargs
import GeoMakie
import CairoMakie
fig = CairoMakie.Figure()
contour2D_on_globe!(fig,
var,
mask = oceanmask(),
more_kwargs = Dict(:mask => ca_kwargs(color = :blue)),
)
CairoMakie.save("myfigure.pdf", fig)
```
In this example, we plotted `var` on the globe and overplotted a blue ocean.
`ca_kwargs` (`Utils.kwargs`) is a convenience function to pass keyword arguments
more easily.
!!! note Masking does not affect the colorbar. If you have values defined
beneath the map, they can still affect the colorbar.
The output might look something like:

### Plotting bias
After [computing the bias](@ref bias) between observational and simulation data, you may
want to plot the bias and display information such as the root mean squared error (RMSE) and
the global bias in the plot. To do this, you use the function [`plot_bias_on_globe!(fig, sim,
obs)`](@ref Visualize.plot_bias_on_globe!). In the example below, we plot the bias between our
simulation and some observations stored in `ta_1d_average.nc`.
```julia
import ClimaAnalysis
import ClimaAnalysis.Visualize: plot_bias_on_globe!
import GeoMakie
import CairoMakie
obs_var = ClimaAnalysis.OutputVar("ta_1d_average.nc")
sim_var = ClimaAnalysis.get(ClimaAnalysis.simdir("simulation_output"), "ta")
fig = CairoMakie.Figure()
plot_bias_on_globe!(fig, sim_var, obs_var)
CairoMakie.save("myfigure.pdf", fig)
```
The output produces something like:
 | ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"Apache-2.0"
] | 0.5.10 | 1f6c4859eafc66f1b6df4932bd7040747581d816 | docs | 3247 | # Visualizing `RMSEVariable`s
Instead of computing summary statistics, it may be more helpful to plot a box plot or a
heatmap. `ClimaAnalysis` provides the functions `plot_boxplot!` and `plot_leaderboard!`
to help visualize the root mean squared errors (RMSEs) in a `RMSEVariable`.
The function [`Visualize.plot_boxplot!`](@ref) makes a box plot for each category in the
`RMSEVariable`. The best model and worst model and any other models in `model_names` are
plotted. When finding the best and worst single models, any models in `model_names` will be
excluded. The category to find the best and worst model defaults to "ANN", but can be
changed using the parameter `best_and_worst_category_name`.
The function [`Visualize.plot_leaderboard!`](@ref) makes a heatmap of the
RMSEs between the variables of interest and the categories. The best model for each variable
of interest and the models in `model_names` are shown in the heatmap. Similar to
`plot_boxplot!`, the category to find the best model defaults to "ANN", but can be changed
using the parameter `best_category_name`. The values of the heatmap are normalized by
dividing over the median model's RMSEs for each variable.
```@setup plotting
import ClimaAnalysis
import CairoMakie
csv_file_path = "./data/test_csv.csv"
rmse_var_ta = ClimaAnalysis.read_rmses(csv_file_path, "ta")
rmse_var_ta = ClimaAnalysis.add_model(rmse_var_ta, "CliMA", "test1", "test2")
rmse_var_ta[:, :] = [
[10.0 11.0 12.0 13.0 14.0]
[36.0 37.0 38.0 39.0 30.0]
[11.0 12.0 13.0 14.0 15.0]
[13.0 13.0 13.0 13.0 15.0]
[24.0 24.0 24.0 24.0 24.0]
]
ClimaAnalysis.add_unit!(
rmse_var_ta,
Dict(
"ACCESS-ESM1-5" => "K",
"ACCESS-CM2" => "K",
"CliMA" => "K",
"test1" => "K",
"test2" => "K",
),
)
rmse_var_pr = ClimaAnalysis.read_rmses(csv_file_path, "pr")
rmse_var_pr = ClimaAnalysis.add_model(rmse_var_pr, "CliMA")
rmse_var_pr[:, :] = [
[6.0 7.0 8.0 9.0 10.0]
[11.0 12.0 13.0 14.0 15.0]
[1.0 2.0 3.0 4.0 11.0]
]
ClimaAnalysis.add_unit!(
rmse_var_pr,
Dict(
"ACCESS-ESM1-5" => "kg m^-2 s^-1",
"ACCESS-CM2" => "kg m^-2 s^-1",
"CliMA" => "kg m^-2 s^-1",
),
)
rmse_var_ha = ClimaAnalysis.read_rmses(csv_file_path, "ha")
rmse_var_ha = ClimaAnalysis.add_model(rmse_var_ha, "CliMA")
rmse_var_ha[:, :] = [
[0.5 1.0 1.5 2.0 2.5]
[6.0 7.0 8.0 9.0 10.0]
[11.0 12.0 13.0 14.0 7.0]
]
ClimaAnalysis.add_unit!(
rmse_var_ha,
Dict(
"ACCESS-ESM1-5" => "m^2 s^-2",
"ACCESS-CM2" => "m^2 s^-2",
"CliMA" => "m^2 s^-2",
),
)
```
```@example plotting
import ClimaAnalysis
import CairoMakie
# Plot box plots
rmse_vars = (rmse_var_ta, rmse_var_pr, rmse_var_ha)
fig = CairoMakie.Figure(; size = (800, 300 * 3 + 400), fontsize = 20)
for i in 1:3
ClimaAnalysis.Visualize.plot_boxplot!(
fig,
rmse_vars[i],
ploc = (i, 1),
best_and_worst_category_name = "ANN",
)
end
# Plot leaderboard
ClimaAnalysis.Visualize.plot_leaderboard!(
fig,
rmse_vars...,
best_category_name = "ANN",
ploc = (4, 1),
)
CairoMakie.save("./assets/boxplot_and_leaderboard.png", fig)
nothing # hide
```

| ClimaAnalysis | https://github.com/CliMA/ClimaAnalysis.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 463 | using Documenter, Carlo, Carlo.JobTools
makedocs(
sitename = "Carlo.jl",
format = Documenter.HTML(prettyurls = false),
checkdocs = :all,
pages = [
"index.md",
"abstract_mc.md",
"Advanced Topics" => [
"evaluables.md",
"jobtools.md",
"resulttools.md",
"rng.md",
"parallel_run_mode.md",
],
],
)
deploydocs(repo = "github.com/lukas-weber/Carlo.jl.git")
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 659 | module Carlo
export AbstractMC,
MCContext, measure!, is_thermalized, write_checkpoint, read_checkpoint!, read_checkpoint
export Evaluator, evaluate!, start
using Statistics
include("jobtools/JobTools.jl")
include("resulttools/ResultTools.jl")
include("log.jl")
include("util.jl")
include("random_wrap.jl")
include("accumulator.jl")
include("measurements.jl")
include("mc_context.jl")
include("abstract_mc.jl")
include("version.jl")
include("results.jl")
include("merge.jl")
include("evaluable.jl")
include("run.jl")
include("scheduler_task.jl")
include("scheduler_single.jl")
include("scheduler_mpi.jl")
include("cli.jl")
include("precompile.jl")
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2965 | using MPI
"""
This type is an interface for implementing your own Monte Carlo algorithm that will be run by Carlo.
"""
abstract type AbstractMC end
"""
Carlo.init!(mc::YourMC, ctx::MCContext, params::AbstractDict)
Executed when a simulation is started from scratch.
"""
function init! end
init!(mc::AbstractMC, ctx::MCContext, params::AbstractDict, comm::MPI.Comm) =
init!(mc, ctx, params)
"""
Carlo.sweep!(mc::YourMC, ctx::MCContext)
Perform one Monte Carlo sweep or update to the configuration.
!!! note
Doing measurements is supported during this step as some algorithms require doing so for efficiency. Remember to check for [`is_thermalized`](@ref) in that case.
"""
function sweep! end
sweep!(mc::AbstractMC, ctx::MCContext, comm::MPI.Comm) = sweep!(mc, ctx)
"""
Carlo.measure!(mc::YourMC, ctx::MCContext)
Perform one Monte Carlo measurement.
"""
function measure! end
function measure!(mc::AbstractMC, ctx::MCContext, comm::MPI.Comm)
if MPI.Comm_size(comm) == 1
measure!(mc, ctx)
else
error(
"running in parallel run mode but measure(::MC, ::MCContext, ::MPI.Comm) not implemented",
)
end
return nothing
end
"""
Carlo.write_checkpoint(mc::YourMC, out::HDF5.Group)
Save the complete state of the simulation to `out`.
"""
function write_checkpoint end
function write_checkpoint(
mc::AbstractMC,
dump_file::Union{HDF5.Group,Nothing},
comm::MPI.Comm,
)
if MPI.Comm_size(comm) == 1
write_checkpoint(mc, dump_file)
else
error(
"running in parallel run mode but write_checkpoint(::MC, ::Union{HDF5.Group,Nothing}, ::MPI.Comm) not implemented",
)
end
return nothing
end
function write_checkpoint(obj, dump_file::HDF5.Group)
@warn "checkpointing $(typeof(obj)) not supported. Implement Carlo.write_checkpoint."
return nothing
end
"""
Carlo.read_checkpoint!(mc::YourMC, in::HDF5.Group)
Read the state of the simulation from `in`.
"""
function read_checkpoint! end
function read_checkpoint!(
mc::AbstractMC,
dump_file::Union{HDF5.Group,Nothing},
comm::MPI.Comm,
)
if MPI.Comm_size(comm) == 1
read_checkpoint!(mc, dump_file)
else
error(
"running in parallel run mode but read_checkpoint!(::MC, ::Union{HDF5.Group,Nothing}, ::MPI.Comm) not implemented",
)
end
return nothing
end
function read_checkpoint(::Type{T}, dump_file::HDF5.Group) where {T}
@warn "checkpointing $(T) not supported. Implement Carlo.read_checkpoint. Attempting to construct $(T)() as a stand-in..."
return T()
end
"""
Carlo.register_evaluables(mc::Type{YourMC}, eval::Evaluator, params::AbstractDict)
This function is used to calculate postprocessed quantities from quantities that were measured during the simulation. Common examples are variances or ratios of observables.
See [evaluables](@ref) for more details.
"""
function register_evaluables end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 3485 | using HDF5
using ElasticArrays
const binning_output_chunk_size = 1000
mutable struct Accumulator{T<:Number,N,M}
const bin_length::Int64
bins::ElasticArray{T,N,M,Vector{T}}
current_filling::Int64
end
function Accumulator{T}(bin_length::Integer, shape::Tuple{Vararg{Integer}}) where {T}
if bin_length < 1
throw(ArgumentError("bin_length ($bin_length) needs to be >= 1"))
end
bins = ElasticArray{T}(undef, shape..., 1)
bins .= 0
return Accumulator(Int64(bin_length), bins, Int64(0))
end
Base.isempty(acc::Accumulator) = num_bins(acc) == 0 && acc.current_filling == 0
has_complete_bins(acc::Accumulator) = num_bins(acc) > 0
Statistics.mean(acc::Accumulator) =
dropdims(mean(bins(acc); dims = ndims(acc.bins)); dims = ndims(acc.bins))
std_of_mean(acc::Accumulator) = dropdims(
std(bins(acc); dims = ndims(acc.bins)) / sqrt(num_bins(acc));
dims = ndims(acc.bins),
)
bins(acc::Accumulator) = Array(@view acc.bins[axes(acc.bins)[1:end-1]..., 1:end-1])
shape(acc::Accumulator) = size(acc.bins)[1:end-1]
num_bins(acc::Accumulator) = size(acc.bins)[end] - 1
function add_sample!(acc::Accumulator, value)
if size(value) != shape(acc)
error(
"size of added value ($(length(value))) does not size of accumulator ($(shape(acc)))",
)
end
current_bin = @view acc.bins[axes(acc.bins)[1:end-1]..., end:end]
# this one avoids some allocations
for i in eachindex(value)
current_bin[i] += value[i]
end
acc.current_filling += 1
if acc.current_filling == acc.bin_length
current_bin ./= acc.bin_length
append!(acc.bins, zeros(shape(acc)...))
acc.current_filling = 0
end
return nothing
end
function write_measurements!(acc::Accumulator{T}, out::HDF5.Group) where {T}
if has_complete_bins(acc)
if haskey(out, "samples")
saved_samples = out["samples"]
old_bin_count = size(saved_samples, ndims(saved_samples))
else
out["bin_length"] = acc.bin_length
saved_samples = create_dataset(
out,
"samples",
eltype(acc.bins),
((shape(acc)..., num_bins(acc)), (shape(acc)..., -1));
chunk = (shape(acc)..., binning_output_chunk_size),
)
attributes(out["samples"])["v0.2_format"] = true
old_bin_count = 0
end
HDF5.set_extent_dims(saved_samples, (shape(acc)..., old_bin_count + num_bins(acc)))
saved_samples[axes(saved_samples)[1:end-1]..., old_bin_count+1:end] = bins(acc)
acc.bins = acc.bins[axes(acc.bins)[1:end-1]..., end:end]
end
return nothing
end
function write_checkpoint(acc::Accumulator, out::HDF5.Group)
out["bin_length"] = acc.bin_length
out["current_bin_filling"] = acc.current_filling
out["samples"] = Array(acc.bins)
if size(out["samples"]) == (1, 1)
attributes(out["samples"])["v0.2_format"] = true
end
return nothing
end
function read_checkpoint(::Type{<:Accumulator}, in::HDF5.Group)
samples = read(in, "samples")
# TODO: this maintains checkpoint compatibility with Carlo v0.1.5. remove in v0.3
if size(samples) == (1, 1) && !haskey(attributes(in["samples"]), "v0.2_format")
samples = dropdims(samples; dims = 1)
end
return Accumulator(
read(in, "bin_length"),
ElasticArray(samples),
read(in, "current_bin_filling"),
)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 3296 | using ArgParse
using PrecompileTools
using PrettyTables
"""
start(job::JobInfo, ARGS)
Call this from your job script to start the Carlo command line interface.
If for any reason you do not want to use job scripts, you can directly schedule a job using
start(Carlo.MPIScheduler, job)
"""
function start(job::JobInfo, args::AbstractVector{<:AbstractString})
s = ArgParseSettings()
@add_arg_table! s begin
"run", "r"
help = "Starts a simulation"
action = :command
"status", "s"
help = "Check the progress of a simulation"
action = :command
"merge", "m"
help = "Merge results of an incomplete simulation"
action = :command
"delete", "d"
help = "Clean up a simulation directory"
action = :command
end
@add_arg_table! s["run"] begin
"--single", "-s"
help = "run in single core mode"
action = :store_true
"--restart", "-r"
help = "delete existing files and start from scratch"
action = :store_true
end
parsed_args = parse_args(args, s)
if isnothing(parsed_args)
return nothing
end
cmd = parsed_args["%COMMAND%"]
cmd_funcs = Dict(
"run" => cli_run,
"merge" => cli_merge,
"status" => cli_status,
"delete" => cli_delete,
)
return cmd_funcs[cmd](job, parsed_args[cmd])
end
function cli_run(job::JobInfo, args::AbstractDict)
if args["restart"]
if args["single"] || (MPI.Init(); MPI.Comm_rank(MPI.COMM_WORLD)) == 0
cli_delete(job, Dict())
end
end
scheduler = args["single"] ? SingleScheduler : MPIScheduler
if scheduler == MPIScheduler && (MPI.Init(); MPI.Comm_size(MPI.COMM_WORLD)) == 1
@info "running with a single process: defaulting to --single scheduler"
scheduler = SingleScheduler
end
return with_logger(default_logger()) do
start(scheduler, job)
end
end
function cli_status(job::JobInfo, ::AbstractDict)
try
tasks = JobTools.read_progress(job)
data = permutedims(
hcat(
(
[
basename(x.dir),
x.sweeps,
x.target_sweeps,
x.num_runs,
"$(round(Int,100*x.thermalization_fraction))%",
] for x in tasks
)...,
),
)
header = ["task", "sweeps", "target", "runs", "thermalized"]
pretty_table(data, vlines = :none, header = header, crop = :none)
return all(map(x -> x.sweeps >= x.target_sweeps, tasks))
catch err
if isa(err, Base.IOError)
@error "Could not read job progress. Not run yet?"
exit(1)
else
rethrow(err)
end
end
end
function cli_delete(job::JobInfo, ::AbstractDict)
rm("$(job.dir)/../$(job.name).results.json"; force = true)
rm(job.dir; recursive = true, force = true)
return nothing
end
function cli_merge(job::JobInfo, ::AbstractDict)
for task in job.tasks
merge_results(job.mc, JobTools.task_dir(job, task); parameters = task.params)
end
JobTools.concatenate_results(job)
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 3799 |
struct Evaluable{T<:Number,R<:Real}
internal_bin_length::Int64
rebin_length::Int64
rebin_count::Int64
mean::Vector{T}
error::Vector{R}
end
function jackknife(func::Function, sample_set::Tuple{Vararg{AbstractArray,N}}) where {N}
sample_count = minimum(x -> last(size(x)), sample_set)
# truncate sample counts to smallest
sample_set = map(s -> s[axes(s)[1:end-1]..., 1:sample_count], sample_set)
# the .+0 is a trick to decay 0-dim arrays to scalars
sums = map(s -> dropdims(sum(s; dims = ndims(s)); dims = ndims(s)) .+ 0, sample_set)
# evaluation based on complete dataset (truncated to the lowest sample_count)
complete_eval = func((sums ./ sample_count)...)
# evaluation on the jacked datasets
jacked_eval_mean = zero(complete_eval)
for k = 1:sample_count
jacked_means = (
(sum .- view(samples, axes(samples)[1:end-1]..., k)) ./ (sample_count - 1) for
(sum, samples) in zip(sums, sample_set)
)
jacked_eval_mean += func(jacked_means...)
end
jacked_eval_mean /= sample_count
@assert length(complete_eval) == length(jacked_eval_mean)
# mean and error
bias_corrected_mean =
sample_count * complete_eval .- (sample_count - 1) * jacked_eval_mean
error = real.(zero(complete_eval))
for k = 1:sample_count
jacked_means = (
(sum .- view(samples, axes(samples)[1:end-1]..., k)) ./ (sample_count - 1) for
(sum, samples) in zip(sums, sample_set)
)
# use abs2 to give real number error for complex number variables
error += abs2.(func(jacked_means...) - jacked_eval_mean)
end
error = sqrt.((sample_count - 1) .* error ./ sample_count)
return vec(collect(bias_corrected_mean)), vec(collect(error))
end
function evaluate(
evaluation::Func,
used_observables::NTuple{N,ResultObservable},
)::Union{Evaluable,Nothing} where {Func,N}
internal_bin_length = minimum(obs -> obs.internal_bin_length, used_observables)
rebin_length = minimum(obs -> obs.rebin_length, used_observables)
bin_count = minimum(rebin_count, used_observables)
if bin_count == 0
return nothing
end
return Evaluable(
internal_bin_length,
rebin_length,
bin_count,
jackknife(evaluation, map(obs -> obs.rebin_means, used_observables))...,
)
end
function ResultObservable(eval::Evaluable)
return ResultObservable(
eval.internal_bin_length,
eval.rebin_length,
eval.mean,
eval.error,
fill(NaN, size(eval.mean)...),
eltype(eval.mean)[],
)
end
struct Evaluator
observables::Dict{Symbol,ResultObservable}
evaluables::Dict{Symbol,Evaluable}
end
Evaluator(observables::Dict{Symbol,ResultObservable}) =
Evaluator(observables, Dict{Symbol,Evaluable}())
"""
evaluate!(func::Function, eval::Evaluator, name::Symbol, (ingredients::Symbol...))
Define an evaluable called `name`, i.e. a quantity depending on the observable averages `ingredients...`. The function `func` will get the ingredients as parameters and should return the value of the evaluable. Carlo will then perform jackknifing to calculate a bias-corrected result with correct error bars that appears together with the observables in the result file.
"""
function evaluate!(
evaluation::Func,
eval::Evaluator,
name::Symbol,
ingredients::NTuple{N,Symbol},
) where {Func,N}
notfound = setdiff(ingredients, keys(eval.observables))
if !isempty(notfound)
@warn "Evaluable '$name': ingredients $notfound not found. Skipping..."
return nothing
end
eval.evaluables[name] =
evaluate(evaluation, tuple((eval.observables[i] for i in ingredients)...))
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 380 | using Logging
using Dates
const date_format = "yyyy-mm-dd HH:MM:SS"
function log_formatter(level::LogLevel, _module, group, id, file, line)
(color, prefix, suffix) = Logging.default_metafmt(level, _module, group, id, file, line)
return color, "$(prefix) $(Dates.format(now(), date_format))", suffix
end
default_logger() = ConsoleLogger(meta_formatter = log_formatter)
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2271 | using HDF5
using Random
"""
Holds the Carlo-internal state of the simulation and provides an interface to
- **Random numbers**: the public field `MCContext.rng` is a random number generator (see [rng](@ref))
- **Measurements**: see [`measure!(::MCContext, ::Symbol, ::Any)`](@ref)
- **Simulation state**: see [`is_thermalized`](@ref)
"""
mutable struct MCContext{RNG<:Random.AbstractRNG}
sweeps::Int64
thermalization_sweeps::Int64
rng::RNG
measure::Measurements
end
"""
measure!(ctx::MCContext, name::Symbol, value)
Measure a sample for the observable named `name`. The sample `value` may be either a scalar or vector of a float type.
"""
measure!(ctx::MCContext, name::Symbol, value) = add_sample!(ctx.measure, name, value)
"""
is_thermalized(ctx::MCContext)::Bool
Returns true if the simulation is thermalized.
"""
is_thermalized(ctx::MCContext) = ctx.sweeps > ctx.thermalization_sweeps
function MCContext{RNG}(parameters::AbstractDict; seed_variation::Integer = 0) where {RNG}
measure = Measurements(parameters[:binsize])
register_observable!(measure, :_ll_checkpoint_read_time, 1, ())
register_observable!(measure, :_ll_checkpoint_write_time, 1, ())
if haskey(parameters, :seed)
rng = RNG(parameters[:seed] * (1 + seed_variation))
else
rng = RNG()
end
return MCContext(Int64(0), Int64(parameters[:thermalization]), rng, measure)
end
function write_measurements!(ctx::MCContext, meas_file::HDF5.Group)
write_measurements!(ctx.measure, create_absent_group(meas_file, "observables"))
return nothing
end
function write_checkpoint(ctx::MCContext, out::HDF5.Group)
write_checkpoint(ctx.rng, create_group(out, "random_number_generator"))
write_checkpoint(ctx.measure, create_group(out, "measurements"))
out["sweeps"] = ctx.sweeps
out["thermalization_sweeps"] = ctx.thermalization_sweeps
return nothing
end
function read_checkpoint(::Type{MCContext{RNG}}, in::HDF5.Group) where {RNG}
sweeps = read(in, "sweeps")
therm_sweeps = read(in, "thermalization_sweeps")
return MCContext(
sweeps,
therm_sweeps,
read_checkpoint(RNG, in["random_number_generator"]),
read_checkpoint(Measurements, in["measurements"]),
)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2161 | mutable struct Measurements
default_bin_size::Int64
observables::Dict{Symbol,Accumulator}
end
Measurements(default_bin_size::Integer) = Measurements(default_bin_size, Dict())
function add_sample!(meas::Measurements, obsname::Symbol, value)
if !haskey(meas.observables, obsname)
register_observable!(
meas,
obsname,
meas.default_bin_size,
size(value),
float(eltype(value)),
)
end
add_sample!(
meas.observables[obsname]::Accumulator{
float(eltype(value)),
ndims(value) + 1,
ndims(value),
},
value,
)
return nothing
end
Base.isempty(meas::Measurements) = all(isempty.(values(meas.observables)))
has_complete_bins(meas::Measurements) = any(has_complete_bins.(values(meas.observables)))
function register_observable!(
meas::Measurements,
obsname::Symbol,
bin_length::Integer,
shape::Tuple{Vararg{Integer}},
T::Type{<:Number} = Float64,
)
if haskey(meas.observables, obsname)
error("Accumulator '$obsname' already exists.")
end
meas.observables[obsname] = Accumulator{T}(bin_length, shape)
return nothing
end
function write_measurements!(meas::Measurements, out::HDF5.Group)
for (name, obs) in meas.observables
if has_complete_bins(obs)
write_measurements!(obs, create_absent_group(out, String(name)))
end
end
return nothing
end
function write_checkpoint(meas::Measurements, out::HDF5.Group)
out["default_bin_size"] = meas.default_bin_size
for (name, obs) in meas.observables
@assert !has_complete_bins(obs)
write_checkpoint(obs, create_group(out, "observables/$(name)"))
end
return nothing
end
function read_checkpoint(::Type{Measurements}, in::HDF5.Group)
default_bin_size = read(in, "default_bin_size")
observables = Dict{Symbol,Accumulator}()
for obsname in keys(in["observables"])
observables[Symbol(obsname)] =
read_checkpoint(Accumulator, in["observables/$(obsname)"])
end
return Measurements(default_bin_size, observables)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 5606 | using Logging
"""Determine the number of bins in the rebin procedure. Rebinning will not be performed if the number of samples is smaller than `min_bin_count`."""
function calc_rebin_count(sample_count::Integer, min_bin_count::Integer = 10)::Integer
return sample_count <= min_bin_count ? sample_count :
(min_bin_count + round(cbrt(sample_count - min_bin_count)))
end
function calc_rebin_length(total_sample_count, rebin_length)
if total_sample_count == 0
return 1
elseif rebin_length !== nothing
return rebin_length
else
return total_sample_count Γ· calc_rebin_count(total_sample_count)
end
end
"""
This helper function consecutively opens all ".meas.h5" files of a task. For each
observable in the file, it calls
states[obs_key] = func(obs_key, obs, get(states, obs_key, nothing))
Finally the dictionary `states` is returned. This construction allows `func` to only care about a single observable, simplifying the merging code.
"""
function iterate_measfile_observables(func::Func, filenames) where {Func}
states = Dict{Symbol,Any}()
for filename in filenames
h5open(filename, "r") do meas_file
for obs_name in keys(meas_file["observables"])
obs_key = Symbol(obs_name)
obs = nothing
try
obs = meas_file["observables"][obs_name]
catch err
if err isa KeyError
@warn "$(obs_name): $(err). Skipping..."
continue
end
rethrow(err)
end
states[obs_key] = func(obs_key, obs, get(states, obs_key, nothing))
end
end
end
return states
end
function merge_results(
::Type{MC},
taskdir::AbstractString;
parameters::Dict{Symbol,Any},
rebin_length::Union{Integer,Nothing} = get(parameters, :rebin_length, nothing),
sample_skip::Integer = get(parameters, :rebin_sample_skip, 0),
) where {MC<:AbstractMC}
merged_results = merge_results(
JobTools.list_run_files(taskdir, "meas\\.h5");
rebin_length,
sample_skip,
)
evaluator = Evaluator(merged_results)
register_evaluables(MC, evaluator, parameters)
results = merge(
merged_results,
Dict(name => ResultObservable(obs) for (name, obs) in evaluator.evaluables),
)
write_results(results, taskdir * "/results.json", taskdir, parameters, Version(MC))
return nothing
end
function merge_results(
filenames::AbstractArray{<:AbstractString};
rebin_length::Union{Integer,Nothing},
sample_skip::Integer = 0,
)
obs_types = iterate_measfile_observables(filenames) do _, obs_group, state
internal_bin_length = read(obs_group, "bin_length")
sample_size = size(obs_group["samples"])
# TODO: compat for v0.1.5 format. Remove in v0.3
if length(sample_size) == 2 &&
sample_size[1] == 1 &&
!haskey(attributes(obs_group["samples"]), "v0.2_format")
sample_size = (sample_size[2],)
end
shape = sample_size[1:end-1]
nsamples = max(0, sample_size[end] - sample_skip)
type = eltype(obs_group["samples"])
if isnothing(state)
return (; T = type, internal_bin_length, shape, total_sample_count = nsamples)
end
if shape != state.shape
error("Observable shape ($shape) does not agree between runs ($(state.shape))")
end
return (;
T = promote_type(state.T, type),
internal_bin_length = state.internal_bin_length,
shape = state.shape,
total_sample_count = state.total_sample_count + nsamples,
)
end
binned_obs = iterate_measfile_observables(filenames) do obs_name, obs_group, state
obs_type = obs_types[obs_name]
if state === nothing
binsize = calc_rebin_length(obs_type.total_sample_count, rebin_length)
state = (;
acc = Accumulator{obs_type.T}(binsize, obs_type.shape),
accΒ² = Accumulator{real(obs_type.T)}(binsize, obs_type.shape),
)
end
samples = read(obs_group, "samples")
# TODO: compat for v0.1.5 format. Remove in v0.3
if !haskey(attributes(obs_group["samples"]), "v0.2_format")
samples = reshape(samples, obs_type.shape..., :)
end
for value in Iterators.drop(eachslice(samples; dims = ndims(samples)), sample_skip)
add_sample!(state.acc, value)
add_sample!(state.accΒ², abs2.(value))
end
return state
end
return Dict{Symbol,ResultObservable}(
obs_name => begin
ΞΌ = mean(obs.acc)
Ο = std_of_mean(obs.acc)
no_rebinning_Ο =
sqrt.(
max.(0, mean(obs.accΒ²) .- abs2.(ΞΌ)) ./
(obs.acc.bin_length * num_bins(obs.acc) - 1)
)
autocorrelation_time = 0.5 .* (Ο ./ no_rebinning_Ο) .^ 2
# broadcasting promotes 0-dim arrays to scalar, which we do not want
ensure_array(x::Number) = fill(x)
ensure_array(x::AbstractArray) = x
ResultObservable(
obs_types[obs_name].internal_bin_length,
obs.acc.bin_length,
ΞΌ,
Ο,
ensure_array(autocorrelation_time),
bins(obs.acc),
)
end for (obs_name, obs) in binned_obs if num_bins(obs.acc) > 0
)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 259 | using PrecompileTools
using ..JobTools
@setup_workload begin
@compile_workload begin
tm = TaskMaker()
tm.thermalization = 10
tm.sweeps = 10
tm.binsize = 1
tm.test = [1]
task(tm, Lx = 2, T = 1)
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1077 | using Random
using HDF5
function guess_xoshiro_version()
num_fields = length(fieldnames(Xoshiro))
if num_fields == 4
return 1
elseif num_fields == 5
return 2
end
error(
"Carlo wrapper does not support this version of Xoshiro yet. Please file a bug report",
)
end
function write_checkpoint(rng::Xoshiro, out::HDF5.Group)
out["type"] = "xoroshiro256++"
out["state"] = collect(getproperty.(rng, fieldnames(Xoshiro)))
out["rng_version"] = guess_xoshiro_version()
return nothing
end
function read_checkpoint(::Type{Xoshiro}, in::HDF5.Group)
rng_type = read(in["type"])
if rng_type != "xoroshiro256++"
error("checkpoint was done with a different RNG: $(rng_type)")
end
rng_version = read(in["rng_version"])
if rng_version != guess_xoshiro_version()
error(
"checkpoint was done with a different version of Xoshiro. Try running with the version of Julia you used originally.",
)
end
state = read(in["state"])
return Random.Xoshiro(state...)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1191 | import JSON
"""Result of a Carlo Monte Carlo calculation containing the mean, statistical error and autocorrelation time."""
mutable struct ResultObservable{T<:Number,R<:Real,N,M}
internal_bin_length::Int64
rebin_length::Int64
mean::Array{T,N}
error::Array{R,N}
autocorrelation_time::Array{R,N}
rebin_means::Array{T,M}
end
rebin_count(obs::ResultObservable) = Int64(size(obs.rebin_means)[end])
JSON.lower(obs::ResultObservable) = Dict(
"mean" => obs.mean,
"error" => obs.error,
"autocorr_time" => maximum(obs.autocorrelation_time),
"rebin_len" => obs.rebin_length,
"rebin_count" => rebin_count(obs),
"internal_bin_len" => obs.internal_bin_length,
)
function write_results(
observables::AbstractDict,
filename::AbstractString,
taskdir::AbstractString,
parameters::Dict,
version::Version,
)
open(filename, "w") do file
JSON.print(
file,
Dict(
"task" => taskdir,
"parameters" => parameters,
"results" => observables,
"version" => to_dict(version),
),
1,
)
end
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 4721 | using Random
using Printf
struct Run{MC<:AbstractMC,RNG<:Random.AbstractRNG}
context::MCContext{RNG}
implementation::MC
end
function Run{MC,RNG}(params::Dict, comm::MPI.Comm) where {MC<:AbstractMC,RNG<:AbstractRNG}
seed_variation = MPI.Comm_rank(comm)
context = MCContext{RNG}(params; seed_variation)
implementation = MC(merge(params, Dict(:_comm => comm)))
init!(implementation, context, params, comm)
return Run{MC,RNG}(context, implementation)
end
"""Perform one MC step. Returns the number of thermalized sweeps performed"""
function step!(run::Run, comm::MPI.Comm)
sweep_time = @elapsed sweep!(run.implementation, run.context, comm)
run.context.sweeps += 1
if is_thermalized(run.context)
measure_time = @elapsed measure!(run.implementation, run.context, comm)
if MPI.Comm_rank(comm) == 0
measure!(run.context, :_ll_sweep_time, sweep_time)
measure!(run.context, :_ll_measure_time, measure_time)
end
return 1
end
if MPI.Comm_rank(comm) != 0
if !isempty(run.context.measure)
error(
"In parallel run mode, only the first rank of the run communicator is allowed to do measurements!",
)
end
end
return 0
end
function write_measurements(run::Run, file_prefix::AbstractString)
try
cp(file_prefix * ".meas.h5", file_prefix * ".meas.h5.tmp", force = true)
catch e
if !isa(e, Base.IOError)
rethrow()
end
end
h5open(file_prefix * ".meas.h5.tmp", "cw") do file
write_measurements!(run.context, file["/"])
write_hdf5(
Version(typeof(run.implementation)),
create_absent_group(file, "version"),
)
end
@assert !has_complete_bins(run.context.measure)
end
function write_checkpoint!(run::Run, file_prefix::AbstractString, comm::MPI.Comm)
checkpoint_write_time = @elapsed begin
is_run_leader = MPI.Comm_rank(comm) == 0
if is_run_leader
write_measurements(run, file_prefix)
elseif !isempty(run.context.measure)
error("In parallel run mode, only the first rank of a run can do measurements!")
end
contexts = MPI.gather(run.context, comm)
if !is_run_leader
write_checkpoint(run.implementation, nothing, comm)
else
h5open(file_prefix * ".dump.h5.tmp", "w") do file
for (i, context) in enumerate(contexts)
write_checkpoint(context, create_group(file, @sprintf("context/%04d", i)))
end
write_checkpoint(run.implementation, create_group(file, "simulation"), comm)
write_hdf5(
Version(typeof(run.implementation)),
create_group(file, "version"),
)
end
end
end
if is_run_leader
add_sample!(run.context.measure, :_ll_checkpoint_write_time, checkpoint_write_time)
end
return nothing
end
function write_checkpoint_finalize(file_prefix::AbstractString)
mv(file_prefix * ".dump.h5.tmp", file_prefix * ".dump.h5", force = true)
mv(file_prefix * ".meas.h5.tmp", file_prefix * ".meas.h5", force = true)
return nothing
end
function read_checkpoint(
::Type{Run{MC,RNG}},
file_prefix::AbstractString,
parameters::Dict,
comm::MPI.Comm,
)::Union{Run{MC,RNG},Nothing} where {MC,RNG}
no_checkpoint = Ref(false)
if is_run_leader(comm)
no_checkpoint[] = !isfile(file_prefix * ".dump.h5")
end
MPI.Bcast!(no_checkpoint, 0, comm)
if no_checkpoint[]
return nothing
end
context = nothing
mc = MC(merge(parameters, Dict(:_comm => comm)))
if is_run_leader(comm)
checkpoint_read_time = @elapsed begin
h5open(file_prefix * ".dump.h5", "r") do file
ranks = 0:MPI.Comm_size(comm)-1
keys = [@sprintf("context/%04d", rank + 1) for rank in ranks]
contexts = [
haskey(file, key) ? read_checkpoint(MCContext{RNG}, file[key]) :
MCContext{RNG}(parameters; seed_variation = rank) for
(rank, key) in zip(ranks, keys)
]
context = MPI.scatter(contexts, comm)
read_checkpoint!(mc, file["simulation"], comm)
end
end
@assert context !== nothing
add_sample!(context.measure, :_ll_checkpoint_read_time, checkpoint_read_time)
else
context = MPI.scatter(nothing, comm)
read_checkpoint!(mc, nothing, comm)
end
@assert context !== nothing
return Run{MC,RNG}(context, mc)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 12579 | using Dates
using MPI
using HDF5
using .JobTools: JobInfo
@enum MPISchedulerAction begin
A_INVALID = 0x00
A_EXIT = 0x01
A_CONTINUE = 0x02
A_NEW_TASK = 0x03
A_PROCESS_DATA_NEW_TASK = 0x04
end
function warn_if_controller_slow(delay::Real)
if delay > 0.5
@warn "controller took a long time to respond: $delay"
end
end
struct TaskInterruptedException <: Exception end
struct MPISchedulerIdleResponse
action::MPISchedulerAction
task_id::Int
run_id::Int
sweeps_until_comm::Int64
end
MPISchedulerIdleResponse(action::MPISchedulerAction) =
MPISchedulerIdleResponse(action, 0, 0, 0)
struct MPISchedulerBusyResponse
action::MPISchedulerAction
sweeps_until_comm::Int64
end
MPISchedulerBusyResponse(action::MPISchedulerAction) = MPISchedulerBusyResponse(action, 0)
struct MPISchedulerBusyRequest
task_id::Int
sweeps_since_last_query::Int64
end
const T_STATUS_REQUEST = 4355
const T_IDLE_RESPONSE = 4356
const T_BUSY_REQUEST = 4357
const T_BUSY_RESPONSE = 4358
@enum MPISchedulerStatus begin
S_IDLE = 9
S_BUSY = 10
S_TIMEUP = 11
end
struct MPIScheduler <: AbstractScheduler end
mutable struct MPISchedulerController <: AbstractScheduler
num_active_ranks::Int
task_id::Union{Int,Nothing}
tasks::Vector{SchedulerTask}
end
MPISchedulerController(job::JobInfo, active_ranks::Integer) = MPISchedulerController(
active_ranks,
length(job.tasks),
[
SchedulerTask(
p.target_sweeps,
p.sweeps,
t.params[:thermalization],
p.dir,
0,
get(t.params, :max_runs_per_task, typemax(Int64)),
) for (p, t) in zip(JobTools.read_progress(job), job.tasks)
],
)
mutable struct MPISchedulerWorker
task_id::Int
run_id::Int
task::SchedulerTask
run::Union{Run,Nothing}
end
function start(::Type{MPIScheduler}, job::JobInfo)
JobTools.create_job_directory(job)
MPI.Init()
comm = MPI.COMM_WORLD
rank = MPI.Comm_rank(comm)
num_ranks = MPI.Comm_size(comm)
rc = false
if num_ranks == 1
@error "started MPIScheduler with a single rank, but at least two are required for doing any work. Use SingleScheduler instead."
end
ranks_per_run = job.ranks_per_run == :all ? num_ranks - 1 : job.ranks_per_run
if (num_ranks - 1) % ranks_per_run != 0
error(
"Number of MPI worker ranks ($num_ranks - 1 = $(num_ranks-1)) is not a multiple of ranks per run ($(ranks_per_run))!",
)
end
run_comm = MPI.Comm_split(comm, rank == 0 ? 0 : 1 + (rank - 1) Γ· ranks_per_run, 0)
run_leader_comm = MPI.Comm_split(comm, is_run_leader(run_comm) ? 1 : nothing, 0)
if rank == 0
@info "starting job '$(job.name)'"
if ranks_per_run != 1
@info "running in parallel run mode with $(ranks_per_run) ranks per run"
end
rc = start(MPISchedulerController, job, run_leader_comm)
@info "controller: concatenating results"
JobTools.concatenate_results(job)
else
start(MPISchedulerWorker, job, run_leader_comm, run_comm)
end
MPI.Barrier(comm)
# MPI.Finalize()
return rc
end
function start(::Type{MPISchedulerController}, job::JobInfo, run_leader_comm::MPI.Comm)
controller = MPISchedulerController(job, MPI.Comm_size(run_leader_comm) - 1)
MPI.Barrier(run_leader_comm)
while controller.num_active_ranks > 0
react!(controller, run_leader_comm)
end
all_done = controller.task_id === nothing
@info "controller: stopping due to $(all_done ? "completion" : "time limit")"
return !all_done
end
function get_new_task_id_with_significant_work(
tasks::AbstractVector{<:SchedulerTask},
task_id::Union{Nothing,Integer},
)
for _ in eachindex(tasks)
task_id = get_new_task_id(tasks, task_id)
if task_id === nothing
return nothing
end
task = tasks[task_id]
if task.target_sweeps - task.sweeps >
max(task.thermalization * task.scheduled_runs, task.scheduled_runs)
return task_id
end
end
return nothing
end
function controller_react_idle(
controller::MPISchedulerController,
run_leader_comm::MPI.Comm,
rank::Integer,
)
controller.task_id =
get_new_task_id_with_significant_work(controller.tasks, controller.task_id)
if controller.task_id === nothing
MPI.Send(
MPISchedulerIdleResponse(A_EXIT),
run_leader_comm;
dest = rank,
tag = T_IDLE_RESPONSE,
)
controller.num_active_ranks -= 1
else
task = controller.tasks[controller.task_id]
task.scheduled_runs += 1
@assert controller.num_active_ranks > 0
sweeps_until_comm = clamp(
(task.target_sweeps - task.sweeps) Γ· task.scheduled_runs,
0,
task.target_sweeps Γ· controller.num_active_ranks,
)
MPI.Send(
MPISchedulerIdleResponse(
A_NEW_TASK,
controller.task_id,
task.scheduled_runs,
sweeps_until_comm,
),
run_leader_comm;
dest = rank,
tag = T_IDLE_RESPONSE,
)
end
return nothing
end
function controller_react_busy(
controller::MPISchedulerController,
run_leader_comm::MPI.Comm,
rank::Integer,
)
msg = MPI.Recv(
MPISchedulerBusyRequest,
run_leader_comm;
source = rank,
tag = T_BUSY_REQUEST,
)
task = controller.tasks[msg.task_id]
task.sweeps += msg.sweeps_since_last_query
if is_done(task)
task.scheduled_runs -= 1
if task.scheduled_runs > 0
@info "$(basename(task.dir)) has enough sweeps. Waiting for $(task.scheduled_runs) busy ranks."
MPI.Send(
MPISchedulerBusyResponse(A_NEW_TASK),
run_leader_comm;
dest = rank,
tag = T_BUSY_RESPONSE,
)
else
@info "$(basename(task.dir)) is done. Merging."
MPI.Send(
MPISchedulerBusyResponse(A_PROCESS_DATA_NEW_TASK),
run_leader_comm;
dest = rank,
tag = T_BUSY_RESPONSE,
)
end
else
sweeps_until_comm = clamp(
(task.target_sweeps - task.sweeps) Γ· task.scheduled_runs,
1,
max(1, task.target_sweeps Γ· controller.num_active_ranks),
)
MPI.Send(
MPISchedulerBusyResponse(A_CONTINUE, sweeps_until_comm),
run_leader_comm;
dest = rank,
tag = T_BUSY_RESPONSE,
)
end
return nothing
end
function controller_react_timeup(controller::MPISchedulerController)
controller.num_active_ranks -= 1
return nothing
end
function react!(controller::MPISchedulerController, run_leader_comm::MPI.Comm)
rank_status, status = MPI.Recv(
MPISchedulerStatus,
run_leader_comm,
MPI.Status;
source = MPI.ANY_SOURCE,
tag = T_STATUS_REQUEST,
)
rank = status.source
if rank_status == S_IDLE
controller_react_idle(controller, run_leader_comm, rank)
elseif rank_status == S_BUSY
controller_react_busy(controller, run_leader_comm, rank)
elseif rank_status == S_TIMEUP
controller_react_timeup(controller)
else
error("Invalid rank status $(rank_status)")
end
return nothing
end
function start(
::Type{MPISchedulerWorker},
job::JobInfo,
run_leader_comm::MPI.Comm,
run_comm::MPI.Comm,
)
worker::Union{MPISchedulerWorker,Nothing} = nothing
scheduler_task::Union{SchedulerTask,Nothing} = nothing
time_start = Dates.now()
time_last_checkpoint = Dates.now()
if is_run_leader(run_comm)
MPI.Barrier(run_leader_comm)
end
while true
if worker === nothing
response = worker_signal_idle(run_leader_comm, run_comm)
if response.action == A_EXIT
break
end
task = job.tasks[response.task_id]
scheduler_task = SchedulerTask(
response.sweeps_until_comm,
0,
task.params[:thermalization],
JobTools.task_dir(job, task),
)
rundir = run_dir(scheduler_task, response.run_id)
run = read_checkpoint(Run{job.mc,job.rng}, rundir, task.params, run_comm)
if run !== nothing
is_run_leader(run_comm) && @info "read $rundir"
else
run = Run{job.mc,job.rng}(task.params, run_comm)
is_run_leader(run_comm) && @info "initialized $rundir"
end
worker =
MPISchedulerWorker(response.task_id, response.run_id, scheduler_task, run)
time_last_checkpoint = Dates.now()
end
timeup = Ref(false)
while !is_done(worker.task)
worker.task.sweeps += step!(worker.run, run_comm)
yield()
timeup[] =
JobTools.is_checkpoint_time(job, time_last_checkpoint) ||
JobTools.is_end_time(job, time_start)
MPI.Bcast!(timeup, 0, run_comm)
if timeup[]
break
end
end
if JobTools.is_end_time(job, time_start)
worker_signal_timeup(run_leader_comm, run_comm)
@info "exits: time up"
break
end
response = worker_signal_busy(
run_leader_comm,
run_comm,
worker.task_id,
worker.task.sweeps,
)
worker.task.target_sweeps -= worker.task.sweeps
worker.task.sweeps = 0
if response.action == A_PROCESS_DATA_NEW_TASK
write_checkpoint(worker, run_comm)
if is_run_leader(run_comm)
merge_results(
job.mc,
worker.task.dir;
parameters = job.tasks[worker.task_id].params,
)
end
worker = nothing
elseif response.action == A_NEW_TASK
write_checkpoint(worker, run_comm)
worker = nothing
else
if timeup[]
write_checkpoint(worker, run_comm)
time_last_checkpoint = Dates.now()
end
@assert response.action == A_CONTINUE
worker.task.target_sweeps = response.sweeps_until_comm
@assert !is_done(worker.task)
end
end
end
is_run_leader(run_comm::MPI.Comm) = MPI.Comm_rank(run_comm) == 0
function worker_signal_timeup(run_leader_comm::MPI.Comm, run_comm::MPI.Comm)
if is_run_leader(run_comm)
MPI.Send(S_TIMEUP, run_leader_comm; dest = 0, tag = T_STATUS_REQUEST)
end
end
function worker_signal_idle(run_leader_comm::MPI.Comm, run_comm::MPI.Comm)
response = Ref{MPISchedulerIdleResponse}()
if is_run_leader(run_comm)
delay = @elapsed begin
MPI.Send(S_IDLE, run_leader_comm; dest = 0, tag = T_STATUS_REQUEST)
response[] = MPI.Recv(
MPISchedulerIdleResponse,
run_leader_comm;
source = 0,
tag = T_IDLE_RESPONSE,
)
end
warn_if_controller_slow(delay)
end
MPI.Bcast!(response, 0, run_comm)
return response[]
end
function worker_signal_busy(
run_leader_comm::MPI.Comm,
run_comm::MPI.Comm,
task_id::Integer,
sweeps_since_last_query::Integer,
)
response = Ref{MPISchedulerBusyResponse}()
if is_run_leader(run_comm)
MPI.Send(S_BUSY, run_leader_comm; dest = 0, tag = T_STATUS_REQUEST)
MPI.Send(
MPISchedulerBusyRequest(task_id, sweeps_since_last_query),
run_leader_comm;
dest = 0,
tag = T_BUSY_REQUEST,
)
response[] = MPI.Recv(
MPISchedulerBusyResponse,
run_leader_comm;
source = 0,
tag = T_BUSY_RESPONSE,
)
end
MPI.Bcast!(response, 0, run_comm)
return response[]
end
function write_checkpoint(scheduler::MPISchedulerWorker, run_comm::MPI.Comm)
rundir = run_dir(scheduler.task, scheduler.run_id)
write_checkpoint!(scheduler.run, rundir, run_comm)
if is_run_leader(run_comm)
write_checkpoint_finalize(rundir)
@info "rank $(MPI.Comm_rank(MPI.COMM_WORLD)): checkpointing $rundir"
end
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 3434 | using Dates
using Logging
using .JobTools: JobInfo
abstract type AbstractScheduler end
mutable struct SingleScheduler <: AbstractScheduler
job::JobInfo
run::Union{Run,Nothing}
time_start::Dates.DateTime
time_last_checkpoint::Dates.DateTime
task_id::Union{Int,Nothing}
tasks::Vector{SchedulerTask}
end
function SingleScheduler(job::JobInfo)
return SingleScheduler(job, nothing, Dates.now(), Dates.now(), 1, SchedulerTask[])
end
function start(::Type{SingleScheduler}, job::JobInfo)
MPI.Init()
if MPI.Comm_size(MPI.COMM_WORLD) > 1 && MPI.Comm_rank(MPI.COMM_WORLD) == 0
@info "started SingleScheduler running with multiple MPI ranks: running in parallel run mode!"
end
JobTools.create_job_directory(job)
scheduler = SingleScheduler(job)
scheduler.time_start = Dates.now()
scheduler.time_last_checkpoint = scheduler.time_start
scheduler.tasks = map(
# thermalization information is not needed by the single scheduler, so we pass 0
x -> SchedulerTask(x.target_sweeps, x.sweeps, 0, x.dir),
JobTools.read_progress(scheduler.job),
)
scheduler.task_id = get_new_task_id(scheduler.tasks, length(scheduler.tasks))
while scheduler.task_id !== nothing &&
!JobTools.is_end_time(scheduler.job, scheduler.time_start)
task = scheduler.job.tasks[scheduler.task_id]
scheduler_task = scheduler.tasks[scheduler.task_id]
rundir = run_dir(scheduler_task, 1)
scheduler.run =
read_checkpoint(Run{job.mc,job.rng}, rundir, task.params, MPI.COMM_WORLD)
if scheduler.run !== nothing
@info "read $rundir"
else
scheduler.run = Run{job.mc,job.rng}(task.params, MPI.COMM_WORLD)
@info "initialized $rundir"
end
while !is_done(scheduler_task) &&
!JobTools.is_end_time(scheduler.job, scheduler.time_start)
scheduler_task.sweeps += step!(scheduler.run, MPI.COMM_WORLD)
if JobTools.is_checkpoint_time(scheduler.job, scheduler.time_last_checkpoint)
write_checkpoint(scheduler)
end
end
write_checkpoint(scheduler)
taskdir = scheduler_task.dir
@info "merging $(taskdir)"
merge_results(job.mc, scheduler_task.dir; parameters = task.params)
scheduler.task_id = get_new_task_id(scheduler.tasks, scheduler.task_id)
end
JobTools.concatenate_results(scheduler.job)
all_done = scheduler.task_id === nothing
@info "stopping due to $(all_done ? "completion" : "time limit")"
MPI.Barrier(MPI.COMM_WORLD)
return !all_done
end
function get_new_task_id(
tasks::AbstractVector{SchedulerTask},
old_id::Integer,
)::Union{Integer,Nothing}
next_unshifted = findfirst(
x -> !is_done(x) && x.scheduled_runs < x.max_scheduled_runs,
circshift(tasks, -old_id),
)
if next_unshifted === nothing
return nothing
end
return (next_unshifted + old_id - 1) % length(tasks) + 1
end
get_new_task_id(::AbstractVector{SchedulerTask}, ::Nothing) = nothing
function write_checkpoint(scheduler::SingleScheduler)
scheduler.time_last_checkpoint = Dates.now()
rundir = run_dir(scheduler.tasks[scheduler.task_id], 1)
write_checkpoint!(scheduler.run, rundir, MPI.COMM_WORLD)
write_checkpoint_finalize(rundir)
@info "checkpointing $rundir"
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 587 | mutable struct SchedulerTask
target_sweeps::Int64
sweeps::Int64
thermalization::Int64 # used for scheduling estimates only
dir::String
scheduled_runs::Int64
max_scheduled_runs::Int64
end
SchedulerTask(
target_sweeps::Integer,
sweeps::Integer,
thermalization::Integer,
dir::AbstractString,
) = SchedulerTask(target_sweeps, sweeps, thermalization, dir, 0, typemax(Int64))
is_done(task::SchedulerTask) = task.sweeps >= task.target_sweeps
function run_dir(task::SchedulerTask, run_id::Integer)
return @sprintf "%s/run%04d" task.dir run_id
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 316 | using HDF5
using Dates
"""Helper to create a group inside a HDF5 node but only if it does not already exist."""
function create_absent_group(
g::Union{HDF5.File,HDF5.Group},
name::AbstractString,
)::HDF5.Group
return if haskey(g, name)
g[name]
else
create_group(g, name)
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1007 | function pkgversion_or_missing(mod)
if isdefined(Base, :pkgversion)
return string(pkgversion(mod))
end
return missing
end
"""
Version
The version information for both `Carlo` and the parent module of the `AbstractMC` implementation that is currently used.
"""
struct Version
carlo_version::Union{Missing,String}
mc_package::String
mc_version::Union{Missing,String}
function Version(mc::Type{<:AbstractMC})
return new(
pkgversion_or_missing(@__MODULE__),
string(parentmodule(mc)),
pkgversion_or_missing(parentmodule(mc)),
)
end
end
function write_hdf5(version::Version, group::HDF5.Group)
for (field, value) in to_dict(version)
if !haskey(group, field)
group[field] = string(value)
end
end
end
function to_dict(version::Version)
return Dict(
string.(fieldnames(typeof(version))) .=>
getfield.(Ref(version), fieldnames(typeof(version))),
)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 199 | module JobTools
export JobInfo, TaskInfo, TaskMaker, task, make_tasks, result_filename, current_task_name
using Statistics
include("taskinfo.jl")
include("jobinfo.jl")
include("taskmaker.jl")
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 4954 | using JSON
using Dates
using Random
"""Parse a duration of the format `[[hours:]minutes]:seconds`."""
function parse_duration(duration::AbstractString)::Dates.Period
m = match(r"^(((?<hours>\d+):)?(?<minutes>\d+):)?(?<seconds>\d+)$", duration)
if isnothing(m)
error("$duration does not match [[HH:]MM:]SS")
end
conv(period, x) =
isnothing(x) ? Dates.Second(0) : convert(Dates.Second, period(parse(Int32, x)))
return conv(Dates.Hour, m[:hours]) +
conv(Dates.Minute, m[:minutes]) +
conv(Dates.Second, m[:seconds])
end
parse_duration(duration::Dates.Period) = duration
"""
JobInfo(
job_directory_prefix::AbstractString,
mc::Type;
checkpoint_time::Union{AbstractString, Dates.Second},
run_time::Union{AbstractString, Dates.Second},
tasks::Vector{TaskInfo},
rng::Type = Random.Xoshiro,
ranks_per_run::Union{Integer, Symbol} = 1,
)
Holds all information required for a Monte Carlo calculation. The data of the calculation (parameters, results, and checkpoints) will be saved under `job_directory_prefix`.
`mc` is the the type of the algorithm to use, implementing the [abstract_mc](@ref) interface.
`checkpoint_time` and `run_time` specify the interval between checkpoints and the total desired run_time of the simulation. Both may be specified as a string of format `[[hours:]minutes:]seconds`
Each job contains a set of `tasks`, corresponding
to different sets of simulation parameters that should be run in parallel. The [`TaskMaker`](@ref) type can be used to conveniently generate them.
`rng` sets the type of random number generator that should be used.
Setting the optional parameter `ranks_per_run > 1` enables [Parallel run mode](@ref parallel_run_mode). The special value `ranks_per_run = :all` uses all available ranks for a single run."""
struct JobInfo
name::String
dir::String
mc::Type
rng::Type
tasks::Vector{TaskInfo}
checkpoint_time::Dates.Second
run_time::Dates.Second
ranks_per_run::Union{Int,Symbol}
end
function JobInfo(
job_file_name::AbstractString,
mc::Type;
rng::Type = Random.Xoshiro,
checkpoint_time::Union{AbstractString,Dates.Second},
run_time::Union{AbstractString,Dates.Second},
tasks::Vector{TaskInfo},
ranks_per_run::Union{Integer,Symbol} = 1,
)
job_file_name = expanduser(job_file_name)
if (ranks_per_run isa Symbol && ranks_per_run != :all) ||
(ranks_per_run isa Integer && ranks_per_run < 1)
throw(
ArgumentError(
"ranks_per_run should be positive integer or :all, not $ranks_per_run.",
),
)
end
return JobInfo(
basename(job_file_name),
job_file_name * ".data",
mc,
rng,
tasks,
parse_duration(checkpoint_time),
parse_duration(run_time),
ranks_per_run,
)
end
function task_dir(job::JobInfo, task::TaskInfo)
return "$(job.dir)/$(task.name)"
end
"""
result_filename(job::JobInfo)
Returns the filename of the `.results.json` file containing the merged results of the calculation of `job`.
"""
result_filename(job::JobInfo) = "$(job.dir)/../$(job.name).results.json"
function concatenate_results(job::JobInfo)
open(result_filename(job), "w") do out
results = skipmissing(map(job.tasks) do task
try
open(task_dir(job, task) * "/results.json", "r") do in
return JSON.parse(in)
end
catch e
if !isa(e, Base.SystemError)
rethrow()
end
return missing
end
end)
JSON.print(out, collect(results), 1)
end
return nothing
end
function create_job_directory(job::JobInfo)
mkpath(job.dir)
for task in job.tasks
mkpath(task_dir(job, task))
end
return nothing
end
function read_progress(job::JobInfo)
return map(job.tasks) do task
target_sweeps = task.params[:sweeps]
sweeps = read_dump_progress(task_dir(job, task))
num_runs = length(sweeps)
thermalized_sweeps = sum(
max(0, total_sweeps - therm_sweeps) for (total_sweeps, therm_sweeps) in sweeps;
init = 0,
)
thermalization_fraction = 0
if num_runs > 0
thermalization_fraction =
mean(ts == 0 ? 1.0 : min(s, ts) / ts for (s, ts) in sweeps)
end
return TaskProgress(
target_sweeps,
thermalized_sweeps,
num_runs,
thermalization_fraction,
task_dir(job, task),
)
end |> collect
end
is_checkpoint_time(job::JobInfo, time_last_checkpoint::Dates.DateTime) =
Dates.now() >= time_last_checkpoint + job.checkpoint_time
is_end_time(job::JobInfo, time_start::Dates.DateTime) =
Dates.now() >= time_start + job.run_time
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 3082 | using HDF5
using Printf
"""
TaskInfo(name::AbstractString, params::Dict{Symbol,Any})
Holds information of one parameter set in a Monte Carlo calculation. While it is possible to construct it by hand, for multiple tasks, it is recommended to
use [`TaskMaker`](@ref) for convenience.
# Special parameters
While `params` can hold any kind of parameter, some are special and used to configure the behavior of Carlo.
- `sweeps`: *required*. The minimum number of Monte Carlo measurement sweeps to perform for the task.
- `thermalization`: *required*. The number of thermalization sweeps to perform.
- `binsize`: *required*. The internal default binsize for observables. Carlo will merge this many samples into one bin before saving them.
On top of this, a rebinning analysis is performed, so that this setting mostly affects disk space and IO efficiency. To get correct autocorrelation times, it should be 1. In all other cases much higher.
- `rng`: *optional*. Type of the random number generator to use. See [rng](@ref).
- `seed`: *optional*. Optionally run calculations with a fixed seed. Useful for debugging.
- `rebin_length`: *optional*. Override the automatic rebinning length chosen by Carlo (β do not set without knowing what you are doing).
- `rebin_sample_skip`: *optional*. Skip the first ``N`` internal bins of each run when performing the rebinning analysis. Useful if `thermalization` was not set high enough at the start of the simulation.
- `max_runs_per_task`: *optional*. If set, puts a limit on the maximum number of runs that will be scheduled for this task.
Out of these parameters, it is only permitted to change `sweeps` for an existing calculation. This is handy to run the simulation for longer or shorter than planned originally.
"""
struct TaskInfo
name::String
params::Dict{Symbol,Any}
function TaskInfo(name::AbstractString, params::AbstractDict)
required_keys = [:sweeps, :thermalization, :binsize]
if !(required_keys β keys(params))
error(
"task $name missing required parameters $(setdiff(required_keys, keys(params)))",
)
end
return new(name, params)
end
end
function task_name(task_id::Integer)
return @sprintf "task%04d" task_id
end
function list_run_files(taskdir::AbstractString, ending::AbstractString)
return map(
x -> taskdir * "/" * x,
filter(x -> occursin(Regex("^run\\d{4,}\\.$ending\$"), x), readdir(taskdir)),
)
end
function read_dump_progress(taskdir::AbstractString)
sweeps = Tuple{Int64,Int64}[]
for dumpname in list_run_files(taskdir, "dump\\.h5")
h5open(dumpname, "r") do f
push!(
sweeps,
(
read(f["context/0001/sweeps"], Int64),
read(f["context/0001/thermalization_sweeps"], Int64),
),
)
end
end
return sweeps
end
struct TaskProgress
target_sweeps::Int64
sweeps::Int64
num_runs::Int64
thermalization_fraction::Float64
dir::String
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2374 | """
TaskMaker()
Tool for generating a list of tasks, i.e. parameter sets, to be simulated in a Monte Carlo simulation.
The fields of `TaskMaker` can be freely assigned and each time [`task`](@ref) is called, their current state will be copied into a new task.
Finally the list of tasks can be generated using [`make_tasks`](@ref)
In most cases the resulting tasks will be used in the constructor of [`JobInfo`](@ref), the basic description for jobs in Carlo.
# Example
The following example creates a list of 5 tasks for different parameters `T`. This could be a scan of the finite-temperature phase diagram of some model. The first task will be run with more sweeps than the rest.
```@example
tm = TaskMaker()
tm.sweeps = 10000
tm.thermalization = 2000
tm.binsize = 500
task(tm; T=0.04)
tm.sweeps = 5000
for T in range(0.1, 10, length=5)
task(tm; T=T)
end
tasks = make_tasks(tm)
```
"""
mutable struct TaskMaker
tasks::Vector{TaskInfo}
current_task_params::Dict{Symbol,Any}
TaskMaker() = new([], Dict{Symbol,Any}())
end
function Base.setproperty!(tm::TaskMaker, symbol::Symbol, value)
Base.getfield(tm, :current_task_params)[symbol] = value
return nothing
end
function Base.getproperty(tm::TaskMaker, symbol::Symbol)
return Base.getfield(tm, :current_task_params)[symbol]
end
"""
current_task_name(tm::TaskMaker)
Returns the name of the task that will be created by `task(tm)`.
"""
function current_task_name(tm::TaskMaker)
return task_name(length(Base.getfield(tm, :tasks)) + 1)
end
"""
task(tm::TaskMaker; kwargs...)
Creates a new task for the current set of parameters saved in `tm`. Optionally, `kwargs` can be used to specify parameters that are set for this task only.
"""
function task(tm::TaskMaker; kwargs...)
taskname = current_task_name(tm)
append!(
Base.getfield(tm, :tasks),
[
TaskInfo(
taskname,
merge(Base.getfield(tm, :current_task_params), Dict{Symbol,Any}(kwargs)),
),
],
)
return nothing
end
"""
make_tasks(tm::TaskMaker)::Vector{TaskInfo}
Generate a list of tasks from `tm` based on the previous calls of [`task`](@ref). The output of this will typically be supplied to the `tasks` argument of
[`JobInfo`](@ref).
"""
function make_tasks(tm::TaskMaker)
return Base.getfield(tm, :tasks)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1850 | module ResultTools
using JSON
using Measurements
make_scalar(x) = x isa AbstractVector && size(x) == (1,) ? only(x) : x
function measurement_from_obs(obsname, obs)
if ismissing(obs)
return missing
end
if !isnothing(obs["rebin_len"]) &&
!isnothing(obs["autocorr_time"]) &&
obs["autocorr_time"] >= obs["rebin_len"]
@warn "$obsname: autocorrelation time longer than rebin length. Results may be unreliable."
end
mean = obs["mean"]
if mean isa AbstractDict
mean = Complex(mean["re"], mean["im"])
end
error = obs["error"]
sanitize(m, e) = (isnothing(m) || isnothing(e)) ? missing : m Β± e
sanitize(m::Complex, e) =
(isnothing(m) || isnothing(e)) ? missing : Complex(real(m) Β± e, imag(m))
sanitize(m::AbstractDict, e) =
(isnothing(m) || isnothing(e)) ? missing : Complex(m["re"] Β± e, m["im"])
return make_scalar(sanitize.(mean, error))
end
"""
ResultTools.dataframe(result_json::AbstractString)
Helper to import result data from a `*.results.json` file produced after a Carlo calculation. Returns a Tables.jl-compatible dictionary that can be used as is or converted into a DataFrame or other table structure. Observables and their errorbars will be converted to Measurements.jl measurements.
"""
function dataframe(result_json::AbstractString)
json = JSON.parsefile(result_json)
obsnames = unique(Iterators.flatten(keys(t["results"]) for t in json))
flattened_json = Dict{String,Any}[
Dict(
"task" => basename(t["task"]),
t["parameters"]...,
Dict(
obsname =>
measurement_from_obs(obsname, get(t["results"], obsname, missing))
for obsname in obsnames
)...,
) for t in json
]
return flattened_json
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 683 | using Carlo
using Carlo.JobTools
function compat_job(versions; dir)
return JobInfo(
"$dir/dump_compat",
TestMC;
tasks = [
TaskInfo(
"julia$julia_version-$version",
Dict(
:sweeps => 1000,
:thermalization => 0,
:binsize => 100,
:min_julia_version => julia_version,
),
) for (julia_version, version) in versions
],
checkpoint_time = "00:40",
run_time = "00:10",
)
end
function gen_compat_data()
start(Carlo.SingleScheduler, compat_job([(VERSION, pkgversion(Carlo))]))
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 424 | using Carlo
using Carlo.JobTools
include("test_mc.jl")
tm = TaskMaker()
tm.binsize = 2
tm.rebin_sample_skip = 1000
tm.rebin_length = 1000
tm.sweeps = 10
tm.thermalization = 0
task(tm)
tm.thermalization = 100000
tm.sweeps = 100000000000
task(tm)
task(tm)
job = JobInfo(
ARGS[1] * "/test",
TestMC;
tasks = make_tasks(tm),
checkpoint_time = "00:05",
run_time = "00:10",
)
Carlo.start(job, ARGS[2:end])
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 532 | using Test
using Carlo
using Carlo.JobTools
import Carlo.JobTools as JT
using Carlo.ResultTools
using MPI
include("test_utils.jl")
include("test_mc.jl")
tests = [
"test_dump_compat.jl"
"test_taskinfo.jl"
"test_jobinfo.jl"
"test_evaluable.jl"
"test_run.jl"
"test_accumulator.jl"
"test_random_wrap.jl"
"test_results.jl"
"test_measurements.jl"
"test_mccontext.jl"
"test_merge.jl"
"test_taskmaker.jl"
"test_scheduler.jl"
"test_cli.jl"
]
for test in tests
include(test)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 780 | using Carlo
@struct_equal Carlo.Accumulator
@testset "Accumulator" begin
bin_length = 124
obs = Carlo.Accumulator{Float64}(bin_length, (2,))
Carlo.add_sample!(obs, [1, 1.2])
@test test_checkpointing(obs)
for i = 2:bin_length
Carlo.add_sample!(obs, [i, 1.2])
end
@test obs.current_filling == 0
@test size(obs.bins) == (2, 2)
@test isapprox(obs.bins[:, 1], [(bin_length + 1) / 2, 1.2])
for i = 1:3*bin_length+10
Carlo.add_sample!(obs, [1, 1.2])
end
@test size(obs.bins, 2) == 5
tmpdir = mktempdir()
h5open(tmpdir * "/test.h5", "w") do file
Carlo.write_measurements!(obs, create_group(file, "obs"))
@test size(obs.bins, 2) == 1
@test size(file["obs/samples"]) == (2, 4)
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2032 | function test_binning_properties(result_file)
results = JSON.parsefile(result_file)
for result in results
sweeps = result["parameters"]["sweeps"]
bin_length = result["parameters"]["binsize"]
rebin_length = result["parameters"]["rebin_length"]
rebin_sample_skip = result["parameters"]["rebin_sample_skip"]
for (obsname, obs) in result["results"]
if startswith(obsname, "_ll_") || isnothing(obs["internal_bin_len"])
continue
end
@test rebin_length == obs["rebin_len"]
@test bin_length == obs["internal_bin_len"]
@test obs["rebin_count"] * rebin_length <
sweeps Γ· bin_length - rebin_sample_skip
end
end
end
@testset "CLI" begin
mktempdir() do tmpdir
dummy_jobfile = "dummy_jobfile.jl"
run_cmd(cmd; quiet = false) = run(
pipeline(
`$(Base.julia_cmd()) $dummy_jobfile $tmpdir $cmd`,
stderr = quiet ? devnull : stderr,
),
)
@test_throws ProcessFailedException run_cmd("status"; quiet = true)
@test_throws ProcessFailedException run_cmd("merge"; quiet = true)
run_cmd("delete")
@test !isfile(tmpdir * "/test.results.json")
run_cmd("run")
run_cmd("status")
run_cmd("merge")
@test isfile(tmpdir * "/test.results.json")
test_binning_properties(tmpdir * "/test.results.json")
run_cmd("delete")
@test !isfile(tmpdir * "/test.results.json")
@test !isdir(tmpdir * "/test")
tm = TaskMaker()
tm.sweeps = 100
tm.thermalization = 100
tm.binsize = 10
task(tm)
task(tm)
task(tm)
job = JobInfo(
tmpdir * "/test",
TestMC;
tasks = make_tasks(tm),
checkpoint_time = "00:05",
run_time = "00:10",
)
JobTools.create_job_directory(job)
run_cmd("status")
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1990 | include("compat_job.jl")
@testset "Checkpoint compatibility" begin
mktempdir() do tmpdir
cp("dump_compat.data", tmpdir * "/dump_compat.data")
cp("dump_compat.results.json", tmpdir * "/dump_compat.results.json")
job = compat_job([(v"1.10.2", v"0.1.5")]; dir = tmpdir)
progress = JobTools.read_progress(job)
obs_names = Set([
:_ll_sweep_time,
:test,
:test2,
:test4,
:test_rng,
:test_vec,
:_ll_measure_time,
])
MPI.Init()
for (i, task) in enumerate(job.tasks)
@testset "$(task.name)" begin
if VERSION < task.params[:min_julia_version]
continue
end
run = Carlo.read_checkpoint(
Carlo.Run{job.mc,job.rng},
"$tmpdir/dump_compat.data/$(task.name)/run0001",
task.params,
MPI.COMM_WORLD,
)
for i = 1:10
Carlo.step!(run, MPI.COMM_WORLD)
end
Carlo.write_measurements(
run,
"$tmpdir/dump_compat.data/$(task.name)/run0001",
)
Carlo.write_checkpoint!(
run,
"$tmpdir/dump_compat.data/$(task.name)/run0001",
MPI.COMM_WORLD,
)
@test run.context.sweeps - run.context.thermalization_sweeps ==
progress[i].sweeps + 10
Carlo.merge_results(
job.mc,
"$tmpdir/dump_compat.data/$(task.name)";
parameters = task.params,
)
end
end
JobTools.concatenate_results(job)
df = ResultTools.dataframe("$tmpdir/dump_compat.results.json")
@test issubset(obs_names, Symbol.(keys(only(df))))
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1190 | using Carlo
@testset "Evaluable" begin
@testset "scalar" begin
func = (x::Real, y::Real) -> x / y
means = ([2, 3, 4], [5, 4, 3])
# TODO: proper statistical test
@test all(
Carlo.jackknife(func, means) .β ([0.712962962962963], [0.25726748128610744]),
)
end
@testset "vector" begin
func = x -> x[1] / x[2]
means = ([2 3 4; 5 4 3],)
@test all(
Carlo.jackknife(func, means) .β ([0.712962962962963], [0.25726748128610744]),
)
func2 = x -> [x[1] / x[2], 2x[1] / x[2]]
@test all(
Carlo.jackknife(func2, means) .β (
[0.712962962962963, 2 * 0.712962962962963],
[0.25726748128610744, 2 * 0.25726748128610744],
),
)
end
@testset "ComplexScalar" begin
func3 = (x::Complex, y::Complex) -> x / y
means = ([1 + 0im, 1 + 1im, 1 + 2im], [2 - 0im, 2 - 1im, 2 - 2im])
results = Carlo.jackknife(func3, means)
@test results[2][1] isa Real
@test all(
results .β ([0.2188235294117648 + 0.6847058823529415im], [0.3388562075744883]),
)
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1246 | using JSON
using Dates
using Carlo.JobTools
import Carlo.JobTools as JT
@testset "JobInfo" begin
tm = JT.TaskMaker()
tm.thermalization = 100
tm.sweeps = 7
tm.binsize = 10
task(tm; test = 1)
task(tm; test = 2)
task(tm; test = 3)
tmpdir = mktempdir()
job = JT.JobInfo(
tmpdir * "/test",
TestMC;
tasks = make_tasks(tm),
checkpoint_time = "15:00",
run_time = "30:00",
)
JT.create_job_directory(job)
for task in job.tasks
@test isdir(JT.task_dir(job, task))
open(JT.task_dir(job, task) * "/results.json", "w") do io
write(io, "{}")
end
end
JT.concatenate_results(job)
results = open(JSON.parse, "$(job.dir)/../$(job.name).results.json", "r")
@test results == [Dict(), Dict(), Dict()]
end
@testset "Parse Duration" begin
@test_throws ErrorException JT.parse_duration("10:")
@test_throws ErrorException JT.parse_duration("10::00")
@test_throws ErrorException JT.parse_duration("a:2:00")
@test JT.parse_duration("10:00") == Dates.Second(60 * 10)
@test JT.parse_duration("100") == Dates.Second(100)
@test JT.parse_duration("5:32:10") == Dates.Second(5 * 60 * 60 + 32 * 60 + 10)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 2937 | import Carlo
using HDF5
using MPI
struct TestMC <: AbstractMC end
TestMC(params::AbstractDict) = TestMC()
Carlo.init!(mc::TestMC, ctx::MCContext, params::AbstractDict) = nothing
Carlo.sweep!(mc::TestMC, ctx::MCContext) = nothing
function Carlo.measure!(mc::TestMC, ctx::MCContext)
measure!(ctx, :test, ctx.sweeps)
measure!(ctx, :test2, ctx.sweeps^2)
measure!(ctx, :test_vec, [ctx.sweeps, sin(ctx.sweeps)])
measure!(ctx, :test_rng, rand(ctx.rng))
return nothing
end
Carlo.write_checkpoint(mc::TestMC, out::HDF5.Group) = nothing
Carlo.read_checkpoint!(mc::TestMC, in::HDF5.Group) = nothing
function Carlo.register_evaluables(::Type{TestMC}, eval::Evaluator, params::AbstractDict)
evaluate!((x, y) -> y - x^2, eval, :test4, (:test, :test2))
evaluate!(x -> x^2, eval, :test5, (:test,))
evaluate!(eval, :test6, (:test_vec,)) do x
r = zero(x)
r[1] = x[1]
return r
end
return nothing
end
mutable struct TestParallelRunMC <: AbstractMC
state::Float64
try_measure_on_nonroot::Bool
end
TestParallelRunMC(params::AbstractDict) = TestParallelRunMC(0, false)
function Carlo.init!(
mc::TestParallelRunMC,
ctx::MCContext,
params::AbstractDict,
comm::MPI.Comm,
)
mc.state = MPI.Comm_rank(comm)
mc.try_measure_on_nonroot = get(params, :try_measure_on_nonroot, false)
return nothing
end
function Carlo.sweep!(mc::TestParallelRunMC, ctx::MCContext, comm::MPI.Comm)
chosen_rank = rand(ctx.rng, 0:MPI.Comm_size(comm)-1)
chosen_rank = MPI.Bcast(chosen_rank, 0, comm)
addition_state = MPI.Bcast(mc.state, chosen_rank, comm)
mc.state += sin(addition_state)
return nothing
end
function Carlo.measure!(mc::TestParallelRunMC, ctx::MCContext, comm::MPI.Comm)
mean = MPI.Reduce(mc.state, +, comm)
mean2 = MPI.Reduce(mc.state^2, +, comm)
if MPI.Comm_rank(comm) == 0
measure!(ctx, :test_det, sin(ctx.sweeps))
measure!(ctx, :test, mean)
measure!(ctx, :test2, mean2)
measure!(ctx, :test_local, rand(ctx.rng))
end
if mc.try_measure_on_nonroot
measure!(ctx, :test2, 0.0)
end
return nothing
end
function Carlo.write_checkpoint(
mc::TestParallelRunMC,
out::Union{HDF5.Group,Nothing},
comm::MPI.Comm,
)
states = MPI.Gather(mc.state, comm)
if MPI.Comm_rank(comm) == 0
out["states"] = states
end
return nothing
end
function Carlo.read_checkpoint!(
mc::TestParallelRunMC,
in::Union{HDF5.Group,Nothing},
comm::MPI.Comm,
)
if MPI.Comm_rank(comm) == 0
states = read(in["states"])
else
states = nothing
end
mc.state = MPI.Scatter(states, typeof(mc.state), comm)
return nothing
end
function Carlo.register_evaluables(
::Type{TestParallelRunMC},
eval::Evaluator,
params::AbstractDict,
)
evaluate!((x, y) -> y - x^2, eval, :test4, (:test, :test2))
return nothing
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 439 | using Carlo
@struct_equal Carlo.MCContext
@testset "MCContext" begin
thermalization = 10
ctx = Carlo.MCContext{Random.Xoshiro}(
Dict(:binsize => 3, :seed => 123, :thermalization => thermalization),
)
@test ctx.rng == Random.Xoshiro(123)
Carlo.measure!(ctx, :test, 2.0)
tmp_hdf5_file() do file
Carlo.write_measurements!(ctx, open_group(file, "/"))
end
@test test_checkpointing(ctx)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 679 | using Carlo
@struct_equal Carlo.Measurements
@testset "Measurements" begin
meas = Carlo.Measurements(3)
Carlo.add_sample!(meas, :test, 1)
Carlo.add_sample!(meas, :test, 2)
Carlo.add_sample!(meas, :test, 3)
Carlo.add_sample!(meas, :test2, [3, 4])
@test_throws TypeError Carlo.add_sample!(meas, :test2, 3)
@test_throws ErrorException Carlo.register_observable!(meas, :test2, 2, (3,))
Carlo.register_observable!(meas, :test3, 2, (3, 2))
Carlo.add_sample!(meas, :test3, rand(3, 2))
tmp_hdf5_file() do file
root = open_group(file, "/")
Carlo.write_measurements!(meas, root)
end
@test test_checkpointing(meas)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 5081 | using Carlo
using Random
using Statistics
function create_mock_data(
generator;
runs::Integer,
internal_binsize::Integer,
samples_per_run::Integer,
extra_samples::Integer = 0,
obsname::Symbol,
)
tmpdir = mktempdir()
all_samples = []
filenames = ["$tmpdir/run$i.h5" for i = 1:runs]
idx = 1
for run = 1:runs
nsamples = samples_per_run + extra_samples * (run == 1)
samples = []
h5open(filenames[run], "w") do file
meas = Carlo.Measurements(internal_binsize)
for i = 1:nsamples
value = generator(idx)
Carlo.add_sample!(meas, obsname, value)
if i <= (nsamples Γ· internal_binsize) * internal_binsize
append!(samples, value)
end
idx += 1
end
Carlo.write_measurements!(meas, create_group(file, "observables"))
end
push!(all_samples, samples)
end
return collect(filenames), all_samples
end
@testset "rebin_count" begin
for sample_count = 0:100
rebins = Carlo.calc_rebin_count(sample_count)
@test (sample_count != 0) <= rebins <= sample_count
end
end
@testset "Merge counter" begin
tmpdir = mktempdir()
runs = 4
for internal_binsize in [1, 3, 4]
for samples_per_run in [5, 7]
extra_samples = 100
total_samples = runs * samples_per_run + extra_samples
@testset "samples = $(total_samples), binsize = $(internal_binsize)" begin
filenames, samples = create_mock_data(;
runs = runs,
obsname = :count_test,
internal_binsize = internal_binsize,
samples_per_run = samples_per_run,
extra_samples = extra_samples,
) do idx
return idx
end
filenames2, _ = create_mock_data(
idx -> [idx+1.0im 1.0; 1.0im 0];
runs = runs,
obsname = :vec_test,
internal_binsize = internal_binsize,
samples_per_run = samples_per_run,
extra_samples = extra_samples,
)
@testset for sample_skip in [0, 10]
@testset for rebin_length in [nothing, 1, 2]
results = Carlo.merge_results(filenames; rebin_length, sample_skip)
count_obs = results[:count_test]
skipped_samples = mapreduce(
s -> s[1+internal_binsize*sample_skip:end],
vcat,
samples,
)
rebinned_samples =
skipped_samples[1:internal_binsize*count_obs.rebin_length*Carlo.rebin_count(
count_obs,
)]
@test count_obs.mean[1] β mean(rebinned_samples)
if rebin_length !== nothing
@test count_obs.rebin_length == rebin_length
end
results2 = Carlo.merge_results(
filenames2;
rebin_length = rebin_length,
sample_skip,
)
vec_obs = results2[:vec_test]
@test iszero(vec_obs.error[2])
@test vec_obs.error[1] β count_obs.error[1]
@test vec_obs.mean β [count_obs.mean[1]+1.0im 1.0; 1.0im 0]
end
end
end
end
end
end
@testset "Merge AR(1)" begin
runs = 2
# parameters for an AR(1) random walk y_{t+1} = Ξ± y_{t} + N(ΞΌ=0, Ο)
# autocorrelation time and error of this are known analytically
for ar1_alpha in [0.5, 0.7, 0.8, 0.9]
@testset "Ξ± = $ar1_alpha" begin
ar1_sigma = 0.54
ar1_y = 0
rng = Xoshiro(520)
filenames, _ = create_mock_data(;
runs = runs,
obsname = :ar1_test,
samples_per_run = 200000,
internal_binsize = 1,
) do idx
ar1_y = ar1_alpha * ar1_y + ar1_sigma * randn(rng)
return ar1_y
end
results = Carlo.merge_results(filenames; rebin_length = 100)
# AR(1)
ar1_obs = results[:ar1_test]
expected_mean = 0.0
expected_std = ar1_sigma / sqrt(1 - ar1_alpha^2)
expected_autocorrtime = -1 / log(ar1_alpha)
expected_autocorrtime = 0.5 * (1 + 2 * ar1_alpha / (1 - ar1_alpha))
@test abs(ar1_obs.mean[1] - expected_mean) < 4 * ar1_obs.error[1]
@test isapprox(
ar1_obs.autocorrelation_time[1],
expected_autocorrtime,
rtol = 0.1,
)
end
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 362 | using Carlo
using Random
@struct_equal Random.Xoshiro
@testset "Random wrapper" begin
rng = Random.Xoshiro(141376357, 3244512, 3768, 5326171454)
for i = 1:1000
rand(rng)
end
@test rand(rng, UInt32) == 1232139906
@test rand(rng, UInt32) == 1416645027
@test rand(rng, UInt32) == 1517520173
@test test_checkpointing(rng)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 370 | using Carlo
using JSON
@testset "Results" begin
result_obs = Carlo.ResultObservable(
Int64(100),
Int64(3),
[1.0, 2.0, 3.0],
[0.1, 0.1, 0.1],
[0.1, 0.2, 0.3],
zeros(3, 4),
)
repr = JSON.parse(JSON.json(result_obs, 1))
@test repr["mean"] == result_obs.mean
@test repr["error"] == result_obs.error
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 1228 | using Random
import Carlo
@testset "Run" begin
params = Dict(:thermalization => 100, :binsize => 13)
MPI.Init()
run = Carlo.Run{TestMC,Random.Xoshiro}(params, MPI.COMM_WORLD)
sweeps = 131
for i = 1:sweeps
Carlo.step!(run, MPI.COMM_WORLD)
end
@test run.context.sweeps == sweeps
tmpdir = mktempdir()
Carlo.write_checkpoint!(run, tmpdir * "/test", MPI.COMM_WORLD)
@test nothing === Carlo.read_checkpoint(
Carlo.Run{TestMC,Random.Xoshiro},
tmpdir * "/test",
params,
MPI.COMM_WORLD,
)
Carlo.write_checkpoint_finalize(tmpdir * "/test")
run2 = Carlo.read_checkpoint(
Carlo.Run{TestMC,Random.Xoshiro},
tmpdir * "/test",
params,
MPI.COMM_WORLD,
)
@test run.implementation == run2.implementation
@test run.context.rng == run2.context.rng
Carlo.step!(run, MPI.COMM_WORLD)
Carlo.write_checkpoint!(run, tmpdir * "/test", MPI.COMM_WORLD)
run3 = Carlo.read_checkpoint(
Carlo.Run{TestMC,Random.Xoshiro},
tmpdir * "/test",
params,
MPI.COMM_WORLD,
)
@test run3.implementation == run2.implementation
@test run3.context.rng == run2.context.rng
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 4923 | using Serialization
using Carlo.ResultTools
using Logging
@testset "Task Selection" begin
@test Carlo.get_new_task_id(Carlo.SchedulerTask[], 0) === nothing
@test Carlo.get_new_task_id_with_significant_work(Carlo.SchedulerTask[], 0) === nothing
sweeps = [100, 10, 10, 101, 10]
tasks = [Carlo.SchedulerTask(100, s, 10, "") for s in sweeps]
@test Carlo.get_new_task_id(tasks, 1) == 2
@test Carlo.get_new_task_id(tasks, 2) == 3
@test Carlo.get_new_task_id(tasks, 3) == 5
@test Carlo.get_new_task_id(tasks, 4) == 5
@test Carlo.get_new_task_id(tasks, 5) == 2
tasks = [Carlo.SchedulerTask(100, s, 10, "") for s in [100, 100, 100]]
for i = 1:length(tasks)
@test Carlo.get_new_task_id(tasks, i) === nothing
end
@test Carlo.get_new_task_id(tasks, nothing) === nothing
end
function make_test_job(
dir::AbstractString,
sweeps::Integer;
ranks_per_run = 1,
ntasks = 3,
kwargs...,
)
tm = TaskMaker()
tm.sweeps = sweeps
tm.seed = 13245432
tm.thermalization = 14
tm.binsize = 1
for (k, v) in kwargs
setproperty!(tm, k, v)
end
for i = 1:ntasks
task(tm; i = i)
end
return JobInfo(
dir,
ranks_per_run == 1 ? TestMC : TestParallelRunMC;
tasks = make_tasks(tm),
checkpoint_time = "1:00",
run_time = "10:00",
ranks_per_run = ranks_per_run,
)
end
function run_test_job_mpi(job::JobInfo; num_ranks::Integer, silent::Bool = false)
JT.create_job_directory(job)
job_path = job.dir * "/jobfile"
serialize(job_path, job)
cmd = `$(mpiexec()) -n $num_ranks $(Base.julia_cmd()) test_scheduler_mpi.jl $(job_path)`
if silent
cmd = pipeline(cmd; stdout = devnull, stderr = devnull)
end
run(cmd)
return nothing
end
function compare_results(job1::JobInfo, job2::JobInfo)
results1 = ResultTools.dataframe(JT.result_filename(job1))
results2 = ResultTools.dataframe(JT.result_filename(job2))
for (task1, task2) in zip(results1, results2)
for key in keys(task1)
if !startswith(key, "_ll_")
@test (key, task1[key]) == (key, task2[key])
end
end
end
end
@testset "Task Scheduling" begin
mktempdir() do tmpdir
@testset "MPI parallel run mode" begin
job_2rank = make_test_job("$tmpdir/test2_2rank", 100, ranks_per_run = 2)
run_test_job_mpi(job_2rank; num_ranks = 5)
tasks = JT.read_progress(job_2rank)
for task in tasks
@test task.sweeps >= task.target_sweeps
end
job_all_full = make_test_job("$tmpdir/test2_full", 200, ranks_per_run = :all)
run_test_job_mpi(job_all_full; num_ranks = 5)
# test checkpointing by resetting the seed on a finished simulation
job_all_half = make_test_job("$tmpdir/test2_half", 100, ranks_per_run = :all)
run_test_job_mpi(job_all_half; num_ranks = 5)
job_all_half = make_test_job("$tmpdir/test2_half", 200, ranks_per_run = :all)
run_test_job_mpi(job_all_half; num_ranks = 5)
compare_results(job_all_full, job_all_half)
tasks = JT.read_progress(job_all_half)
for task in tasks
@test task.num_runs == 1
@test task.sweeps == task.target_sweeps
end
job_fail = make_test_job(
"$tmpdir/test2_fail",
100;
ranks_per_run = 2,
try_measure_on_nonroot = true,
)
@test_throws ProcessFailedException run_test_job_mpi(
job_fail;
num_ranks = 5,
silent = true,
) # only run leader can measure
end
@testset "MPI" begin
job = make_test_job("$tmpdir/test1", 100)
run_test_job_mpi(job; num_ranks = 4)
tasks = JT.read_progress(job)
for task in tasks
@test task.sweeps >= task.target_sweeps
end
end
@testset "Single" begin
with_logger(Logging.NullLogger()) do
job3_full = make_test_job("$tmpdir/test3_full", 200)
start(Carlo.SingleScheduler, job3_full)
job3_halfhalf = make_test_job("$tmpdir/test3_halfhalf", 100)
start(Carlo.SingleScheduler, job3_halfhalf)
job3_halfhalf = make_test_job("$tmpdir/test3_halfhalf", 200)
start(Carlo.SingleScheduler, job3_halfhalf)
for job in (job3_full, job3_halfhalf)
tasks = JT.read_progress(job)
for task in tasks
@test task.sweeps == task.target_sweeps
end
end
compare_results(job3_full, job3_halfhalf)
end
end
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 200 | using HDF5
using Carlo
using MPI
using Serialization
using Logging
include("test_mc.jl")
job = deserialize(ARGS[1])
with_logger(Logging.NullLogger()) do
Carlo.start(Carlo.MPIScheduler, job)
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 886 | @testset "TaskInfo" begin
@testset "required parameters" begin
@test_throws ErrorException TaskInfo(
"test",
Dict(:thermalization => 100, :binsize => 10),
)
TaskInfo("test2", Dict(:thermalization => 100, :sweeps => 100, :binsize => 10))
end
@testset "list_run_files" begin
tmpdir = mktempdir()
files = ["0001", "0002", "10000", "9999999"]
for filename in files
h5open("$tmpdir/run$filename.dump.h5", "w") do file
create_group(file, "context")
file["context/sweeps"] = 4362
end
end
open("$tmpdir/run0001.dump.h", "w") do file
end
open("$tmpdir/walke0001.dump.h5", "w") do file
end
@test JT.list_run_files(tmpdir, "dump\\.h5") ==
map(x -> "$tmpdir/run$x.dump.h5", files)
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 524 | @struct_equal TaskInfo
@testset "TaskMaker" begin
tm = TaskMaker()
tm.sweeps = 2
tm.thermalization = 1
tm.binsize = "hi"
task(tm, baz = 2.4)
tm.binsize = "ho"
task(tm, baz = 1)
@test make_tasks(tm) == [
TaskInfo(
"task0001",
Dict(:baz => 2.4, :binsize => "hi", :thermalization => 1, :sweeps => 2),
),
TaskInfo(
"task0002",
Dict(:baz => 1, :binsize => "ho", :thermalization => 1, :sweeps => 2),
),
]
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | code | 460 | using HDF5
using StructEquality
function tmp_hdf5_file(func::Function)
tmp = tempname(cleanup = true)
res = try
h5open(func, tmp, "w")
finally
rm(tmp)
end
return res
end
function test_checkpointing(obj)
return tmp_hdf5_file() do file
group = create_group(file, "test")
Carlo.write_checkpoint(obj, group)
obj2 = Carlo.read_checkpoint(typeof(obj), group)
return obj == obj2
end
end
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 1394 | # 
[](https://lukas.weber.science/Carlo.jl/dev/)
[](https://lukas.weber.science/Carlo.jl/stable/)
[](https://github.com/lukas-weber/Carlo.jl/actions)
[](https://codecov.io/gh/lukas-weber/Carlo.jl)
Carlo is a framework for developing high-performance, distributed (quantum) Monte Carlo simultations.
Its aim is to take care of model-independent tasks such as
* autocorrelation and error analysis,
* Monte-Carlo-aware MPI scheduling, and
* checkpointing
while leaving all the flexibility of implementating Monte Carlo updates and estimators to you.
## Getting started
To install the package, type
```julia
using Pkg; Pkg.add("Carlo")
```
The package itself does not include Monte Carlo algorithms. The quickest way to see how to implement one yourself is to check out the reference implementation for the [Ising](https://github.com/lukas-weber/Ising.jl) model.
If you are a practitioner, stay tuned for a (not yet released) stochastic series expansion or auxiliary field quantum Monte Carlo packagesβor let me know if you are interested in testing it pre-release!
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 1000 | # [Implementing your algorithm](@id abstract_mc)
To run your own Monte Carlo algorithm with Carlo, you need to implement the `AbstractMC` interface documented in this file.
For an example implementation showcasing all the features, take a look at the [Ising](https://github.com/lukas-weber/Ising.jl) example
implementation.
```@docs
Carlo.AbstractMC
```
The following methods all need to be defined for your Monte Carlo algoritm type (here referred to as `YourMC <: AbstractMC`). See [Parallel run mode](@ref parallel_run_mode) for a slightly different interface that allows inner MPI parallelization of your algorithm.
```@docs
Carlo.init!
Carlo.sweep!
Carlo.measure!
Carlo.write_checkpoint
Carlo.read_checkpoint!
Carlo.register_evaluables
```
# [Interfacing with Carlo features](@id mc_context)
The `MCContext` type, passed to your code by some of the functions above enables to use some features provided by Carlo.
```@docs
MCContext
is_thermalized
measure!(::MCContext, ::Symbol, ::Any)
```
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 1625 | # [Evaluables](@id evaluables)
In addition to simply calculating the averages of some observables in your Monte Carlo simulations, sometimes you are also interested in quantities that are functions of these observables, such as the Binder cumulant which is related to the ratio of moments of the magnetization.
This presents two problems. First, estimating the errors of such quantities is not trivial due to correlations. Second, simply computing functions of quantities with errorbars incurs a bias.
Luckily, Carlo can help you with this by letting you define such quantities β we call them *evaluables* β in the [`Carlo.register_evaluables(YourMC, eval, params)`](@ref) function.
This function gets an `Evaluator` which can be used to
```@docs
evaluate!
```
## Example
This is an example for a `register_evaluables` implementation for a model of a magnet.
```@example
using Carlo
struct YourMC <: AbstractMC end # hide
function Carlo.register_evaluables(
::Type{YourMC},
eval::Evaluator,
params::AbstractDict,
)
T = params[:T]
Lx = params[:Lx]
Ly = get(params, :Ly, Lx)
evaluate!(eval, :Susceptibility, (:Magnetization2,)) do mag2
return Lx * Ly * mag2 / T
end
evaluate!(eval, :BinderRatio, (:Magnetization2, :Magnetization4)) do mag2, mag4
return mag2 * mag2 / mag4
end
return nothing
end
```
Note that this code is called after the simulation is over, so there is no way to access the simulation state. However, it is possible to get the needed information about the system (e.g. temperature, system size) from the task parameters `params`.
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 4043 | # Carlo.jl
## Overview
Carlo is a framework that aims to simplify the implementation of high-performance Monte Carlo codes
by handling the parallelization, checkpointing and error analysis. What sets it apart is a focus on
ease of use and minimalism.
## Installation
Installation is simple via the Julia REPL.
```julia
] add Carlo
```
If you wish to use the system MPI implementation, take a look at the [MPI.jl documentation](https://juliaparallel.org/MPI.jl/stable/configuration/#using_system_mpi) and be aware that in that case also the system binary of HDF5 as described [here](https://juliaio.github.io/HDF5.jl/stable/#Using-custom-or-system-provided-HDF5-binaries)!
## Usage
In order to work with Carlo, a Monte Carlo algorithm has to implement the [AbstractMC](@ref abstract_mc) interface. A full example of this is given in the
reference implementation for the [Ising](https://github.com/lukas-weber/Ising.jl) model.
Then, to perform simulation, one writes a *job script* defining all the parameters needed for the simulation, which could look something like the following.
```@example
#!/usr/bin/env julia
using Carlo
using Carlo.JobTools
using Ising
tm = TaskMaker()
tm.sweeps = 10000
tm.thermalization = 2000
tm.binsize = 100
tm.Lx = 10
tm.Ly = 10
Ts = range(0.1, 4, length=20)
for T in Ts
task(tm; T=T)
end
job = JobInfo(@__FILE__, Ising.MC;
checkpoint_time="30:00",
run_time="15:00",
tasks=make_tasks(tm)
)
start(dummy, dummy2) = nothing # hide
start(job, ARGS)
```
This example starts a simulation for the Ising model on the 10Γ10 lattice for 20 different temperatures. Using the function [`start(job::JobInfo, ARGS)`](@ref) enables the Carlo CLI when we execute the script above as follows.
```bash
./myjob --help
```
The command line interface allows (re)starting a job, merging preliminary results, and showing the completion status of a calculation.
### Starting jobs
```bash
./myjob run
```
This will start a simulation on a single core. To use multiple cores, use MPI.
```bash
mpirun -n $num_cores ./myjob run
```
Once the simulation is started, a directory `myjob.data` will be created to store all simulation data. The name of the directory corresponds to the first argument of `JobInfo`. Usually that will be `@__FILE__`, but you could also collect your simulation data in a different directory.
The data directory will contain hdf5 files for each task of the job that contain checkpointing snapshots and measurement results. Once the job is done, Carlo will average the measurement data for you and produce the file `myjob.results.json` in the same directory as the `myjob.data` directory. This file contains means and errorbars of all observables. See [ResultTools](@ref result_tools) for some tips on consuming this file back into julia for your plotting or other postprocessing.
### Job status
```bash
./myjob status
```
Use this command to find out the state of the simulation. It will show a table with the number of completed measurement sweeps, the target number of sweeps, the numbers of runs, and the fraction of them that is thermalized.
The fraction is defined as thermalization sweeps completed/total thermalization sweeps needed.
### Merging jobs
```bash
./myjob merge
```
Usually Carlo will automatically merge results once a job is complete, but when you are impatient and you want to check on results of a running or aborted job, this command is your friend. It will produce a `myjob.results.json` file containing the averages of the currently available data.
### Deleting jobs
```bash
./myjob delete
```
This deletes `myjob.data` and `myjob.results.json`. Of course, you should archive your simulation data instead of deleting them. However, if you made an error in a previous simulation, keep in mind that by default Carlo will continue it from the checkpoints.
For that case of restarting a job there is a handy shortcut as well
```bash
./myjob run --restart
```
## Shortcuts
All commands here have shortcut versions that you can view in the help.
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 302 | # [JobTools](@id jobtools)
This submodule contains tools to specify or read job information necessary to run Carlo calculations.
```@docs
JobInfo
TaskInfo
result_filename
start(job::JobInfo,::AbstractVector{<:AbstractString})
```
## TaskMaker
```@docs
TaskMaker
task
make_tasks
current_task_name
```
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 2327 | # [Parallel run mode](@id parallel_run_mode)
One of Carloβs features is to automatically parallelize independent Monte Carlo simulation runs over MPI. These runs can either share the same set of parameters β in which case their results are averaged β or have different parameters entirely.
Sometimes this kind of trivial parallelism is not satisfactory. For example, it does not shorten the time needed for thermalization, and some Monte Carlo algorithms can benefit from some sort of population control that exchanges data between different simulations of the same random process.
For these cases, Carlo features a *parallel run mode* where each Carlo run does not run on one but multiple MPI ranks. Parallel run mode is enabled in [`JobInfo`](@ref) by passing the `ranks_per_run` argument.
## Parallel `AbstractMC` interface
In order to use parallel run mode, the Monte Carlo algorithm must implement a modified version of the [`AbstractMC`](@ref) interface including additional `MPI.Comm` arguments that allow coordination between the different ranks per run.
The first three functions
Carlo.init!(mc::YourMC, ctx::MCContext, params::AbstractDict, comm::MPI.Comm)
Carlo.sweep!(mc::YourMC, ctx::MCContext, comm::MPI.Comm)
Carlo.measure!(mc::YourMC, ctx::MCContext, comm::MPI.Comm)
simply receive an additional `comm` argument. An important restriction here is that only rank 0 can make measurements on the given `MCContext`, so you are responsible to communicate the measurement results to that rank.
For checkpointing, there is a similar catch.
Carlo.write_checkpoint(mc::YourMC, out::Union{HDF5.Group,Nothing}, comm::MPI.Comm)
Carlo.read_checkpoint!(mc::YourMC, in::Union{HDF5.Group,Nothing}, comm::MPI.Comm)
In these methods, only rank 0 receives an `HDF5.Group` and the other ranks need to communicate. Carlo does not use the collective writing mode of parallel HDF5.
Sometimes, you also want to share work during the construction of `YourMC`. For this reason, Carlo will add the hidden parameter `_comm` to the parameter dictionary received by the constructor `YourMC(params::AbstractDict)`. `params[:_comm]` is then an MPI communicator similar to the `comm` argument of the functions above.
Lastly, the `Carlo.register_evaluables` function remains the same as in the normal interface.
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 625 | # [ResultTools](@id result_tools)
This is a small module to ease importing Carlo results back into Julia. It contains the function
```@docs
Carlo.ResultTools.dataframe
```
If we use ResultTools with DataFrames.jl to read out the results of the Ising example, it would be the following.
```@example
using Plots
using DataFrames
using Carlo.ResultTools
df = DataFrame(ResultTools.dataframe("example.results.json"))
plot(df.T, df.Energy; xlabel = "Temperature", ylabel="Energy per spin", group=df.Lx, legendtitle="L")
```
In the plot we can nicely see how the model approaches the ground state energy at low temperatures.
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.2.1 | 1f06171b2b00e966e2247339bb592ff6ac698b19 | docs | 353 | # [Random Number Generators](@id rng)
Carlo takes care of storing and managing the state of random number generators (RNG) for you. It is accessible through the `rng` field of [`MCContext`](@ref)
and the type of RNG to use can be set by the `rng` parameter in every task (see [`TaskInfo`](@ref)).
The currently supported types are
- `Random.Xoshiro`
| Carlo | https://github.com/lukas-weber/Carlo.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 675 | #! /usr/bin/env julia
using Documenter
using GeometryBasics
DocMeta.setdocmeta!(GeometryBasics, :DocTestSetup, :(using GeometryBasics); recursive=true)
makedocs(format=Documenter.HTML(prettyurls=get(ENV, "CI", "false") == "true"),
sitename="GeometryBasics.jl",
pages=[
"index.md",
"primitives.md",
"rectangles.md",
"polygons.md",
"meshes.md",
"decomposition.md",
"metadata.md",
"api.md"
],
modules=[GeometryBasics])
deploydocs(repo="github.com/JuliaGeometry/GeometryBasics.jl.git", push_preview=true)
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 2875 | module GeometryBasics
using StaticArrays, Tables, StructArrays, IterTools, LinearAlgebra
using GeoInterface
import Extents
using EarCut_jll
using Base: @propagate_inbounds
include("fixed_arrays.jl")
include("offsetintegers.jl")
include("basic_types.jl")
include("primitives/rectangles.jl")
include("primitives/spheres.jl")
include("primitives/cylinders.jl")
include("primitives/pyramids.jl")
include("primitives/particles.jl")
include("interfaces.jl")
include("metadata.jl")
include("viewtypes.jl")
include("geometry_primitives.jl")
include("meshes.jl")
include("triangulation.jl")
include("lines.jl")
include("boundingboxes.jl")
include("deprecated.jl")
include("geointerface.jl")
export AbstractGeometry, GeometryPrimitive
export Mat, Point, Vec
export LineFace, Polytope, Line, NgonFace, convert_simplex
export LineString, AbstractPolygon, Polygon, MultiPoint, MultiLineString, MultiPolygon
export Simplex, connect, Triangle, NSimplex, Tetrahedron
export QuadFace, metafree, coordinates, TetrahedronFace
export TupleView, SimplexFace, Mesh, meta
export Triangle, TriangleP
export AbstractFace, TriangleFace, QuadFace, GLTriangleFace
export OffsetInteger, ZeroIndex, OneIndex, GLIndex
export FaceView, SimpleFaceView
export AbstractPoint, PointMeta, PointWithUV
export PolygonMeta, MultiPointMeta, MultiLineStringMeta, MeshMeta, LineStringMeta,
MultiPolygonMeta
export decompose, coordinates, faces, normals, decompose_uv, decompose_normals,
texturecoordinates
export Tesselation, pointmeta, Normal, UV, UVW
export GLTriangleFace, GLUVMesh3D
export AbstractMesh, Mesh, TriangleMesh
export GLNormalMesh2D
export MetaT, meta_table
# all the different predefined mesh types
# Note: meshes can contain arbitrary meta information,
export AbstractMesh, TriangleMesh, PlainMesh, GLPlainMesh, GLPlainMesh2D, GLPlainMesh3D
export UVMesh, GLUVMesh, GLUVMesh2D, GLUVMesh3D
export NormalMesh, GLNormalMesh, GLNormalMesh2D, GLNormalMesh3D
export NormalUVMesh, GLNormalUVMesh, GLNormalUVMesh2D, GLNormalUVMesh3D
export NormalUVWMesh, GLNormalUVWMesh, GLNormalUVWMesh2D, GLNormalUVWMesh3D
# mesh creation functions
export triangle_mesh, triangle_mesh, uv_mesh
export uv_mesh, normal_mesh, uv_normal_mesh
export height, origin, radius, width, widths
export HyperSphere, Circle, Sphere
export Cylinder, Cylinder2, Cylinder3, Pyramid, extremity
export HyperRectangle, Rect, Rect2, Rect3, Recti, Rect2i, Rect3i, Rectf, Rect2f, Rect3f, Rectd, Rect2d, Rect3d
export before, during, meets, overlaps, intersects, finishes
export centered, direction, area, volume, update
export max_dist_dim, max_euclidean, max_euclideansq, min_dist_dim, min_euclidean
export min_euclideansq, minmax_dist_dim, minmax_euclidean, minmax_euclideansq
export self_intersections, split_intersections
if Base.VERSION >= v"1.4.2"
include("precompile.jl")
_precompile_()
end
end # module
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 14530 | """
Abstract Geometry in R{Dim} with Number type T
"""
abstract type AbstractGeometry{Dim,T<:Number} end
abstract type GeometryPrimitive{Dim,T} <: AbstractGeometry{Dim,T} end
Base.ndims(::AbstractGeometry{Dim}) where {Dim} = Dim
"""
Geometry made of N connected points. Connected as one flat geometry, it makes a Ngon / Polygon.
Connected as volume it will be a Simplex / Tri / Cube.
Note That `Polytope{N} where N == 3` denotes a Triangle both as a Simplex or Ngon.
"""
abstract type Polytope{Dim,T} <: AbstractGeometry{Dim,T} end
abstract type AbstractPolygon{Dim,T} <: Polytope{Dim,T} end
abstract type AbstractPoint{Dim,T} <: StaticVector{Dim,T} end
abstract type AbstractFace{N,T} <: StaticVector{N,T} end
abstract type AbstractSimplexFace{N,T} <: AbstractFace{N,T} end
abstract type AbstractNgonFace{N,T} <: AbstractFace{N,T} end
abstract type AbstractSimplex{Dim,N,T} <: StaticVector{Dim,T} end
"""
Face index, connecting points to form a simplex
"""
@fixed_vector SimplexFace = AbstractSimplexFace
const TetrahedronFace{T} = SimplexFace{4,T}
Face(::Type{<:SimplexFace{N}}, ::Type{T}) where {N,T} = SimplexFace{N,T}
"""
Face index, connecting points to form an Ngon
"""
@fixed_vector NgonFace = AbstractNgonFace
const LineFace{T} = NgonFace{2,T}
const TriangleFace{T} = NgonFace{3,T}
const QuadFace{T} = NgonFace{4,T}
function Base.show(io::IO, x::TriangleFace{T}) where {T}
return print(io, "TriangleFace(", join(x, ", "), ")")
end
Face(::Type{<:NgonFace{N}}, ::Type{T}) where {N,T} = NgonFace{N,T}
Face(F::Type{NgonFace{N,FT}}, ::Type{T}) where {FT,N,T} = F
@propagate_inbounds Base.getindex(x::Polytope, i::Integer) = coordinates(x)[i]
@propagate_inbounds Base.iterate(x::Polytope) = iterate(coordinates(x))
@propagate_inbounds Base.iterate(x::Polytope, i) = iterate(coordinates(x), i)
"""
Fixed Size Polygon, e.g.
- N 1-2 : Illegal!
- N = 3 : Triangle
- N = 4 : Quadrilateral (or Quad, Or tetragon)
- N = 5 : Pentagon
- ...
"""
struct Ngon{Dim,T<:Real,N,Point<:AbstractPoint{Dim,T}} <: AbstractPolygon{Dim,T}
points::SVector{N,Point}
end
const NNgon{N} = Ngon{Dim,T,N,P} where {Dim,T,P}
function (::Type{<:NNgon{N1}})(p0::P, points::Vararg{P,N2}) where {P<:AbstractPoint{Dim,T},
N1, N2} where {Dim,T}
@assert N1 == N2+1
return Ngon{Dim,T,N1,P}(SVector(p0, points...))
end
Base.show(io::IO, x::NNgon{N}) where {N} = print(io, "Ngon{$N}(", join(x, ", "), ")")
# Interfaces
coordinates(x::Ngon) = x.points
# Base Array interface
Base.length(::Type{<:NNgon{N}}) where {N} = N
Base.length(::NNgon{N}) where {N} = N
"""
The Ngon Polytope element type when indexing an array of points with a SimplexFace
"""
function Polytope(P::Type{<:AbstractPoint{Dim,T}},
::Type{<:AbstractNgonFace{N,IT}}) where {N,Dim,T,IT}
return Ngon{Dim,T,N,P}
end
"""
The fully concrete Ngon type, when constructed from a point type!
"""
function Polytope(::Type{<:NNgon{N}}, P::Type{<:AbstractPoint{NDim,T}}) where {N,NDim,T}
return Ngon{NDim,T,N,P}
end
const LineP{Dim,T,P<:AbstractPoint{Dim,T}} = Ngon{Dim,T,2,P}
const Line{Dim,T} = LineP{Dim,T,Point{Dim,T}}
# Simplex{D, T, 3} & Ngon{D, T, 3} are both representing a triangle.
# Since Ngon is supposed to be flat and a triangle is flat, lets prefer Ngon
# for triangle:
const TriangleP{Dim,T,P<:AbstractPoint{Dim,T}} = Ngon{Dim,T,3,P}
const Triangle{Dim,T} = TriangleP{Dim,T,Point{Dim,T}}
const Triangle3d{T} = Triangle{3,T}
Base.show(io::IO, x::TriangleP) = print(io, "Triangle(", join(x, ", "), ")")
Base.summary(io::IO, ::Type{<:TriangleP}) = print(io, "Triangle")
const Quadrilateral{Dim,T} = Ngon{Dim,T,4,P} where {P<:AbstractPoint{Dim,T}}
Base.show(io::IO, x::Quadrilateral) = print(io, "Quad(", join(x, ", "), ")")
Base.summary(io::IO, ::Type{<:Quadrilateral}) = print(io, "Quad")
function coordinates(lines::AbstractArray{LineP{Dim,T,PointType}}) where {Dim,T,PointType}
return if lines isa Base.ReinterpretArray
return coordinates(lines.parent)
else
result = PointType[]
for line in lines
append!(result, coordinates(line))
end
return result
end
end
"""
A `Simplex` is a generalization of an N-dimensional tetrahedra and can be thought
of as a minimal convex set containing the specified points.
* A 0-simplex is a point.
* A 1-simplex is a line segment.
* A 2-simplex is a triangle.
* A 3-simplex is a tetrahedron.
Note that this datatype is offset by one compared to the traditional
mathematical terminology. So a one-simplex is represented as `Simplex{2,T}`.
This is for a simpler implementation.
It applies to infinite dimensions. The structure of this type is designed
to allow embedding in higher-order spaces by parameterizing on `T`.
"""
struct Simplex{Dim,T<:Real,N,Point<:AbstractPoint{Dim,T}} <: Polytope{Dim,T}
points::SVector{N,Point}
end
const NSimplex{N} = Simplex{Dim,T,N,P} where {Dim,T,P}
const TetrahedronP{T,P<:AbstractPoint{3,T}} = Simplex{3,T,4,P}
const Tetrahedron{T} = TetrahedronP{T,Point{3,T}}
Base.show(io::IO, x::TetrahedronP) = print(io, "Tetrahedron(", join(x, ", "), ")")
coordinates(x::Simplex) = x.points
function (::Type{<:NSimplex{N1}})(p0::P, points::Vararg{P,N2}) where {P<:AbstractPoint{Dim,T},
N1, N2} where {Dim,T}
@assert N1 == N2+1
return Simplex{Dim,T,N1,P}(SVector(p0, points...))
end
# Base Array interface
Base.length(::Type{<:NSimplex{N}}) where {N} = N
Base.length(::NSimplex{N}) where {N} = N
"""
The Simplex Polytope element type when indexing an array of points with a SimplexFace
"""
function Polytope(P::Type{<:AbstractPoint{Dim,T}},
::Type{<:AbstractSimplexFace{N}}) where {N,Dim,T}
return Simplex{Dim,T,N,P}
end
"""
The fully concrete Simplex type, when constructed from a point type!
"""
function Polytope(::Type{<:NSimplex{N}}, P::Type{<:AbstractPoint{NDim,T}}) where {N,NDim,T}
return Simplex{NDim,T,N,P}
end
Base.show(io::IO, x::LineP) = print(io, "Line(", x[1], " => ", x[2], ")")
"""
LineString(points::AbstractVector{<:AbstractPoint})
A LineString is a geometry of connected line segments
"""
struct LineString{Dim,T<:Real,P<:AbstractPoint,V<:AbstractVector{<:LineP{Dim,T,P}}} <:
AbstractVector{LineP{Dim,T,P}}
points::V
end
coordinates(x::LineString) = coordinates(x.points)
Base.copy(x::LineString) = LineString(copy(x.points))
Base.size(x::LineString) = size(getfield(x, :points))
Base.getindex(x::LineString, i) = getindex(getfield(x, :points), i)
function LineString(points::AbstractVector{LineP{Dim,T,P}}) where {Dim,T,P}
return LineString{Dim,T,P,typeof(points)}(points)
end
"""
LineString(points::AbstractVector{<: AbstractPoint}, skip = 1)
Creates a LineString from a vector of points.
With `skip == 1`, the default, it will connect the line like this:
```julia
points = Point[a, b, c, d]
linestring = LineString(points)
@assert linestring == LineString([a => b, b => c, c => d])
```
"""
function LineString(points::AbstractVector{<:AbstractPoint}, skip=1)
return LineString(connect(points, LineP, skip))
end
function LineString(points::AbstractVector{<:Pair{P,P}}) where {P<:AbstractPoint{N,T}} where {N,
T}
return LineString(reinterpret(LineP{N,T,P}, points))
end
function LineString(points::AbstractVector{<:AbstractPoint},
faces::AbstractVector{<:LineFace})
return LineString(connect(points, faces))
end
"""
LineString(points::AbstractVector{<: AbstractPoint}, indices::AbstractVector{<: Integer}, skip = 1)
Creates a LineString from a vector of points and an index list.
With `skip == 1`, the default, it will connect the line like this:
```julia
points = Point[a, b, c, d]; faces = [1, 2, 3, 4]
linestring = LineString(points, faces)
@assert linestring == LineString([a => b, b => c, c => d])
```
To make a segmented line, set skip to 2
```julia
points = Point[a, b, c, d]; faces = [1, 2, 3, 4]
linestring = LineString(points, faces, 2)
@assert linestring == LineString([a => b, c => d])
```
"""
function LineString(points::AbstractVector{<:AbstractPoint},
indices::AbstractVector{<:Integer}, skip=1)
faces = connect(indices, LineFace, skip)
return LineString(points, faces)
end
"""
Polygon(exterior::AbstractVector{<:Point})
Polygon(exterior::AbstractVector{<:Point}, interiors::Vector{<:AbstractVector{<:AbstractPoint}})
"""
struct Polygon{Dim,T<:Real,P<:AbstractPoint{Dim,T},L<:AbstractVector{<:LineP{Dim,T,P}},
V<:AbstractVector{L}} <: AbstractPolygon{Dim,T}
exterior::L
interiors::V
end
Base.copy(x::Polygon) = Polygon(copy(x.exterior), copy(x.interiors))
function Base.:(==)(a::Polygon, b::Polygon)
return (a.exterior == b.exterior) && (a.interiors == b.interiors)
end
function Polygon(exterior::E,
interiors::AbstractVector{E}) where {E<:AbstractVector{LineP{Dim,T,P}}} where {Dim,
T,
P}
return Polygon{Dim,T,P,typeof(exterior),typeof(interiors)}(exterior, interiors)
end
Polygon(exterior::L) where {L<:AbstractVector{<:LineP}} = Polygon(exterior, L[])
function Polygon(exterior::AbstractVector{P},
skip::Int=1) where {P<:AbstractPoint{Dim,T}} where {Dim,T}
return Polygon(LineString(exterior, skip))
end
function Polygon(exterior::AbstractVector{P}, faces::AbstractVector{<:Integer},
skip::Int=1) where {P<:AbstractPoint{Dim,T}} where {Dim,T}
return Polygon(LineString(exterior, faces, skip))
end
function Polygon(exterior::AbstractVector{P},
faces::AbstractVector{<:LineFace}) where {P<:AbstractPoint{Dim,T}} where {Dim,
T}
return Polygon(LineString(exterior, faces))
end
function Polygon(exterior::AbstractVector{P},
interior::AbstractVector{<:AbstractVector{P}}) where {P<:AbstractPoint{Dim,
T}} where {Dim,
T}
ext = LineString(exterior)
# We need to take extra care for empty interiors, since
# if we just map over it, it won't infer the element type correctly!
int = typeof(ext)[]
foreach(x -> push!(int, LineString(x)), interior)
return Polygon(ext, int)
end
function coordinates(polygon::Polygon{N,T,PointType}) where {N,T,PointType}
exterior = coordinates(polygon.exterior)
if isempty(polygon.interiors)
return exterior
else
result = PointType[]
append!(result, exterior)
foreach(x -> append!(result, coordinates(x)), polygon.interiors)
return result
end
end
"""
MultiPolygon(polygons::AbstractPolygon)
"""
struct MultiPolygon{Dim,T<:Real,Element<:AbstractPolygon{Dim,T},
A<:AbstractVector{Element}} <: AbstractVector{Element}
polygons::A
end
function MultiPolygon(polygons::AbstractVector{P};
kw...) where {P<:AbstractPolygon{Dim,T}} where {Dim,T}
return MultiPolygon(meta(polygons; kw...))
end
Base.getindex(mp::MultiPolygon, i) = mp.polygons[i]
Base.size(mp::MultiPolygon) = size(mp.polygons)
struct MultiLineString{Dim,T<:Real,Element<:LineString{Dim,T},A<:AbstractVector{Element}} <:
AbstractVector{Element}
linestrings::A
end
function MultiLineString(linestrings::AbstractVector{L};
kw...) where {L<:AbstractVector{LineP{Dim,T,P}}} where {Dim,T,P}
return MultiLineString(meta(linestrings; kw...))
end
Base.getindex(ms::MultiLineString, i) = ms.linestrings[i]
Base.size(ms::MultiLineString) = size(ms.linestrings)
"""
MultiPoint(points::AbstractVector{AbstractPoint})
A collection of points
"""
struct MultiPoint{Dim,T<:Real,P<:AbstractPoint{Dim,T},A<:AbstractVector{P}} <:
AbstractVector{P}
points::A
end
function MultiPoint(points::AbstractVector{P};
kw...) where {P<:AbstractPoint{Dim,T}} where {Dim,T}
return MultiPoint(meta(points; kw...))
end
Base.getindex(mpt::MultiPoint, i) = mpt.points[i]
Base.size(mpt::MultiPoint) = size(mpt.points)
"""
AbstractMesh
An abstract mesh is a collection of Polytope elements (Simplices / Ngons).
The connections are defined via faces(mesh), the coordinates of the elements are returned by
coordinates(mesh). Arbitrary meta information can be attached per point or per face
"""
abstract type AbstractMesh{Element<:Polytope} <: AbstractVector{Element} end
"""
Mesh <: AbstractVector{Element}
The concrete AbstractMesh implementation.
"""
struct Mesh{Dim,T<:Number,Element<:Polytope{Dim,T},V<:AbstractVector{Element}} <:
AbstractMesh{Element}
simplices::V # usually a FaceView, to connect a set of points via a set of faces.
end
Tables.schema(mesh::Mesh) = Tables.schema(getfield(mesh, :simplices))
function Base.getproperty(mesh::Mesh, name::Symbol)
if name === :position
# a mesh always has position defined by coordinates...
return metafree(coordinates(mesh))
else
return getproperty(getfield(mesh, :simplices), name)
end
end
function Base.propertynames(mesh::Mesh)
names = propertynames(getfield(mesh, :simplices))
if :position in names
return names
else
# a mesh always has positions!
return (names..., :position)
end
end
function Base.summary(io::IO, ::Mesh{Dim,T,Element}) where {Dim,T,Element}
print(io, "Mesh{$Dim, $T, ")
summary(io, Element)
return print(io, "}")
end
Base.size(mesh::Mesh) = size(getfield(mesh, :simplices))
Base.getindex(mesh::Mesh, i::Integer) = getfield(mesh, :simplices)[i]
function Mesh(elements::AbstractVector{<:Polytope{Dim,T}}) where {Dim,T}
return Mesh{Dim,T,eltype(elements),typeof(elements)}(elements)
end
function Mesh(points::AbstractVector{<:AbstractPoint},
faces::AbstractVector{<:AbstractFace})
return Mesh(connect(points, faces))
end
function Mesh(points::AbstractVector{<:AbstractPoint}, faces::AbstractVector{<:Integer},
facetype=TriangleFace, skip=1)
return Mesh(connect(points, connect(faces, facetype, skip)))
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 1208 | function Rect(geometry::AbstractArray{<:Point{N,T}}) where {N,T}
return Rect{N,T}(geometry)
end
"""
Construct a HyperRectangle enclosing all points.
"""
function Rect{N1,T1}(geometry::AbstractArray{PT}) where {N1,T1,PT<:AbstractPoint}
N2, T2 = length(PT), eltype(PT)
@assert N1 >= N2
vmin = Point{N2,T2}(typemax(T2))
vmax = Point{N2,T2}(typemin(T2))
for p in geometry
vmin, vmax = minmax(p, vmin, vmax)
end
o = vmin
w = vmax - vmin
return if N1 > N2
z = zero(Vec{N1 - N2,T1})
Rect{N1,T1}(vcat(o, z), vcat(w, z))
else
Rect{N1,T1}(o, w)
end
end
function Rect(primitive::GeometryPrimitive{N,T}) where {N,T}
return Rect{N,T}(primitive)
end
function Rect{T}(primitive::GeometryPrimitive{N,T}) where {N,T}
return Rect{N,T}(primitive)
end
function Rect{T}(a::Pyramid) where {T}
w, h = a.width / T(2), a.length
m = Vec{3,T}(a.middle)
return Rect{T}(m .- Vec{3,T}(w, w, 0), m .+ Vec{3,T}(w, w, h))
end
function Rect{T}(a::Sphere) where {T}
mini, maxi = extrema(a)
return Rect{T}(mini, maxi .- mini)
end
Rect{T}(a) where {T} = Rect{T}(coordinates(a))
Rect{N,T}(a) where {N,T} = Rect{N,T}(coordinates(a))
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 784 | using Base: @deprecate_binding
# Types ...f0 renamed to ...f
@deprecate_binding Vecf0 Vecf
@deprecate_binding Pointf0 Pointf
for i in 1:4
for T in [:Point, :Vec]
oldname = Symbol("$T$(i)f0")
newname = Symbol("$T$(i)f")
@eval begin
@deprecate_binding $oldname $newname
end
end
oldname = Symbol("Mat$(i)f0")
newname = Symbol("Mat$(i)f")
@eval begin
@deprecate_binding $oldname $newname
end
end
# Rect types
@deprecate_binding Rect2D Rect2
@deprecate_binding Rect3D Rect3
@deprecate_binding FRect Rectf
@deprecate_binding FRect2D Rect2f
@deprecate_binding FRect3D Rect3f
@deprecate_binding IRect Recti
@deprecate_binding IRect2D Rect2i
@deprecate_binding IRect3D Rect3i
@deprecate_binding TRect RectT
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 5641 |
function unit(::Type{T}, i::Integer) where {T <: StaticVector}
tup = ntuple(Val(length(T))) do j
return ifelse(i == j, 1, 0)
end
return T(tup)
end
macro fixed_vector(name_parent)
@assert name_parent.head == :(=)
name, parent = name_parent.args
expr = quote
struct $(name){S,T} <: $(parent){S,T}
data::NTuple{S,T}
function $(name){S,T}(x::NTuple{S,T}) where {S,T}
return new{S,T}(x)
end
function $(name){S,T}(x::NTuple{S,Any}) where {S,T}
return new{S,T}(StaticArrays.convert_ntuple(T, x))
end
end
size_or(::Type{$(name)}, or) = or
eltype_or(::Type{$(name)}, or) = or
eltype_or(::Type{$(name){S,T} where S}, or) where {T} = T
eltype_or(::Type{$(name){S,T} where T}, or) where {S} = or
eltype_or(::Type{$(name){S,T}}, or) where {S,T} = T
size_or(::Type{$(name){S,T} where S}, or) where {T} = or
size_or(::Type{$(name){S,T} where T}, or) where {S} = Size{(S,)}()
size_or(::Type{$(name){S,T}}, or) where {S,T} = (S,)
# Array constructor
function $(name){S}(x::AbstractVector{T}) where {S,T}
@assert S <= length(x)
return $(name){S,T}(ntuple(i -> x[i], Val(S)))
end
function $(name){S,T1}(x::AbstractVector{T2}) where {S,T1,T2}
@assert S <= length(x)
return $(name){S,T1}(ntuple(i -> convert(T1, x[i]), Val(S)))
end
function $(name){S,T}(x) where {S,T}
return $(name){S,T}(ntuple(i -> convert(T, x), Val(S)))
end
$(name){S}(x::T) where {S,T} = $(name){S,T}(ntuple(i -> x, Val(S)))
$(name){1,T}(x::T) where {T} = $(name){1,T}((x,))
$(name)(x::NTuple{S}) where {S} = $(name){S}(x)
function $(name)(x::T) where {S,T <: Tuple{Vararg{Any,S}}}
return $(name){S,StaticArrays.promote_tuple_eltype(T)}(x)
end
function $(name){S}(x::T) where {S,T <: Tuple}
return $(name){S,StaticArrays.promote_tuple_eltype(T)}(x)
end
$(name){S,T}(x::StaticVector) where {S,T} = $(name){S,T}(Tuple(x))
@generated function (::Type{$(name){S,T}})(x::$(name)) where {S,T}
idx = [:(x[$i]) for i in 1:S]
return quote
$($(name)){S,T}($(idx...))
end
end
@generated function Base.convert(::Type{$(name){S,T}}, x::$(name)) where {S,T}
idx = [:(x[$i]) for i in 1:S]
return quote
$($(name)){S,T}($(idx...))
end
end
@generated function (::Type{SV})(x::StaticVector) where {SV <: $(name)}
len = size_or(SV, size(x))[1]
return if length(x) == len
:(SV(Tuple(x)))
elseif length(x) > len
elems = [:(x[$i]) for i in 1:len]
:(SV($(Expr(:tuple, elems...))))
else
error("Static Vector too short: $x, target type: $SV")
end
end
Base.@pure StaticArrays.Size(::Type{$(name){S,Any}}) where {S} = Size(S)
Base.@pure StaticArrays.Size(::Type{$(name){S,T}}) where {S,T} = Size(S)
Base.@propagate_inbounds function Base.getindex(v::$(name){S,T}, i::Int) where {S,T}
return v.data[i]
end
Base.Tuple(v::$(name)) = v.data
function Base.convert(::Type{$(name){S,T}}, x::NTuple{S,T}) where {S,T}
return $(name){S,T}(x)
end
function Base.convert(::Type{$(name){S,T}}, x::Tuple) where {S,T}
return $(name){S,T}(convert(NTuple{S,T}, x))
end
@generated function StaticArrays.similar_type(::Type{SV}, ::Type{T},
s::Size{S}) where {SV <: $(name),T,S}
return if length(S) === 1
$(name){S[1],T}
else
StaticArrays.default_similar_type(T, s(), Val{length(S)})
end
end
Base.:(*)(a::$name, b::$name) = a .* b
function Base.broadcasted(f, a::AbstractArray{T}, b::$name) where {T <: $name}
return Base.broadcasted(f, a, (b,))
end
end
return esc(expr)
end
abstract type AbstractPoint{Dim,T} <: StaticVector{Dim,T} end
@fixed_vector Point = AbstractPoint
@fixed_vector Vec = StaticVector
const Mat = SMatrix
const VecTypes{N,T} = Union{StaticVector{N,T},NTuple{N,T}}
const Vecf{N} = Vec{N,Float32}
const Pointf{N} = Point{N,Float32}
Base.isnan(p::Union{AbstractPoint,Vec}) = any(isnan, p)
Base.isinf(p::Union{AbstractPoint,Vec}) = any(isinf, p)
Base.isfinite(p::Union{AbstractPoint,Vec}) = all(isfinite, p)
## Generate aliases
## As a text file instead of eval/macro, to not confuse code linter
#=
open(joinpath(@__DIR__, "generated-aliases.jl"), "w") do io
for i in 1:4
for T in [:Point, :Vec, :Mat]
namei = "$T$i"
res = T == :Mat ? "Mat{$i,$i,T,$(i * i)}" : "$T{$i,T}"
println(io, "const $(namei){T} = $res")
println(io, "export $namei")
for (postfix, t) in ["d" => Float64, "f" => Float32, "i" => Int, "ui" => UInt]
namep = "$T$i$postfix"
println(io, "const $(namep) = $(namei){$t}")
println(io, "export $namep")
# mnamep = "$(mname)$postfix"
# println(io, "const $mnamep = $mname{$t}")
# println(io, "export $mnamep")
end
end
end
end
=#
include("generated-aliases.jl")
export Mat, Vec, Point, unit
export Vecf, Pointf
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 2537 | const Point1{T} = Point{1,T}
export Point1
const Point1d = Point1{Float64}
export Point1d
const Point1f = Point1{Float32}
export Point1f
const Point1i = Point1{Int64}
export Point1i
const Point1ui = Point1{UInt64}
export Point1ui
const Vec1{T} = Vec{1,T}
export Vec1
const Vec1d = Vec1{Float64}
export Vec1d
const Vec1f = Vec1{Float32}
export Vec1f
const Vec1i = Vec1{Int64}
export Vec1i
const Vec1ui = Vec1{UInt64}
export Vec1ui
const Mat1{T} = Mat{1,1,T,1}
export Mat1
const Mat1d = Mat1{Float64}
export Mat1d
const Mat1f = Mat1{Float32}
export Mat1f
const Mat1i = Mat1{Int64}
export Mat1i
const Mat1ui = Mat1{UInt64}
export Mat1ui
const Point2{T} = Point{2,T}
export Point2
const Point2d = Point2{Float64}
export Point2d
const Point2f = Point2{Float32}
export Point2f
const Point2i = Point2{Int64}
export Point2i
const Point2ui = Point2{UInt64}
export Point2ui
const Vec2{T} = Vec{2,T}
export Vec2
const Vec2d = Vec2{Float64}
export Vec2d
const Vec2f = Vec2{Float32}
export Vec2f
const Vec2i = Vec2{Int64}
export Vec2i
const Vec2ui = Vec2{UInt64}
export Vec2ui
const Mat2{T} = Mat{2,2,T,4}
export Mat2
const Mat2d = Mat2{Float64}
export Mat2d
const Mat2f = Mat2{Float32}
export Mat2f
const Mat2i = Mat2{Int64}
export Mat2i
const Mat2ui = Mat2{UInt64}
export Mat2ui
const Point3{T} = Point{3,T}
export Point3
const Point3d = Point3{Float64}
export Point3d
const Point3f = Point3{Float32}
export Point3f
const Point3i = Point3{Int64}
export Point3i
const Point3ui = Point3{UInt64}
export Point3ui
const Vec3{T} = Vec{3,T}
export Vec3
const Vec3d = Vec3{Float64}
export Vec3d
const Vec3f = Vec3{Float32}
export Vec3f
const Vec3i = Vec3{Int64}
export Vec3i
const Vec3ui = Vec3{UInt64}
export Vec3ui
const Mat3{T} = Mat{3,3,T,9}
export Mat3
const Mat3d = Mat3{Float64}
export Mat3d
const Mat3f = Mat3{Float32}
export Mat3f
const Mat3i = Mat3{Int64}
export Mat3i
const Mat3ui = Mat3{UInt64}
export Mat3ui
const Point4{T} = Point{4,T}
export Point4
const Point4d = Point4{Float64}
export Point4d
const Point4f = Point4{Float32}
export Point4f
const Point4i = Point4{Int64}
export Point4i
const Point4ui = Point4{UInt64}
export Point4ui
const Vec4{T} = Vec{4,T}
export Vec4
const Vec4d = Vec4{Float64}
export Vec4d
const Vec4f = Vec4{Float32}
export Vec4f
const Vec4i = Vec4{Int64}
export Vec4i
const Vec4ui = Vec4{UInt64}
export Vec4ui
const Mat4{T} = Mat{4,4,T,16}
export Mat4
const Mat4d = Mat4{Float64}
export Mat4d
const Mat4f = Mat4{Float32}
export Mat4f
const Mat4i = Mat4{Int64}
export Mat4i
const Mat4ui = Mat4{UInt64}
export Mat4ui
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 6745 | # Implementation of trait based interface from https://github.com/JuliaGeo/GeoInterface.jl/
GeoInterface.isgeometry(::Type{<:AbstractGeometry}) = true
GeoInterface.isgeometry(::Type{<:AbstractFace}) = true
GeoInterface.isgeometry(::Type{<:AbstractPoint}) = true
GeoInterface.isgeometry(::Type{<:AbstractMesh}) = true
GeoInterface.isgeometry(::Type{<:AbstractPolygon}) = true
GeoInterface.isgeometry(::Type{<:LineString}) = true
GeoInterface.isgeometry(::Type{<:MultiPoint}) = true
GeoInterface.isgeometry(::Type{<:MultiLineString}) = true
GeoInterface.isgeometry(::Type{<:MultiPolygon}) = true
GeoInterface.isgeometry(::Type{<:Mesh}) = true
GeoInterface.geomtrait(::Point) = PointTrait()
GeoInterface.geomtrait(::Line) = LineTrait()
GeoInterface.geomtrait(::LineString) = LineStringTrait()
GeoInterface.geomtrait(::Polygon) = PolygonTrait()
GeoInterface.geomtrait(::MultiPoint) = MultiPointTrait()
GeoInterface.geomtrait(::MultiLineString) = MultiLineStringTrait()
GeoInterface.geomtrait(::MultiPolygon) = MultiPolygonTrait()
GeoInterface.geomtrait(::Ngon) = PolygonTrait()
GeoInterface.geomtrait(::AbstractMesh) = PolyhedralSurfaceTrait()
# GeoInterface calls this method in `GeoInterface.convert(GeometryBasics, ...)`
geointerface_geomtype(::GeoInterface.PointTrait) = Point
geointerface_geomtype(::GeoInterface.MultiPointTrait) = MultiPoint
geointerface_geomtype(::GeoInterface.LineTrait) = Line
geointerface_geomtype(::GeoInterface.LineStringTrait) = LineString
geointerface_geomtype(::GeoInterface.MultiLineStringTrait) = MultiLineString
geointerface_geomtype(::GeoInterface.PolygonTrait) = Polygon
geointerface_geomtype(::GeoInterface.MultiPolygonTrait) = MultiPolygon
geointerface_geomtype(::GeoInterface.PolyhedralSurfaceTrait) = Mesh
GeoInterface.geomtrait(::Simplex{Dim,T,1}) where {Dim,T} = PointTrait()
GeoInterface.geomtrait(::Simplex{Dim,T,2}) where {Dim,T} = LineStringTrait()
GeoInterface.geomtrait(::Simplex{Dim,T,3}) where {Dim,T} = PolygonTrait()
GeoInterface.ncoord(::PointTrait, g::Point) = length(g)
GeoInterface.getcoord(::PointTrait, g::Point, i::Int) = g[i]
GeoInterface.ngeom(::LineTrait, g::Line) = length(g)
GeoInterface.getgeom(::LineTrait, g::Line, i::Int) = g[i]
GeoInterface.ngeom(::LineStringTrait, g::LineString) = length(g) + 1 # n line segments + 1
GeoInterface.ncoord(::LineStringTrait, g::LineString{Dim}) where {Dim} = Dim
function GeoInterface.getgeom(::LineStringTrait, g::LineString, i::Int)
return GeometryBasics.coordinates(g)[i]
end
GeoInterface.ngeom(::PolygonTrait, g::Polygon) = length(g.interiors) + 1 # +1 for exterior
function GeoInterface.getgeom(::PolygonTrait,
g::Polygon,
i::Int)::typeof(g.exterior)
return i > 1 ? g.interiors[i - 1] : g.exterior
end
GeoInterface.ngeom(::MultiPointTrait, g::MultiPoint) = length(g)
GeoInterface.getgeom(::MultiPointTrait, g::MultiPoint, i::Int) = g[i]
function GeoInterface.ngeom(::MultiLineStringTrait, g::MultiLineString)
return length(g)
end
function GeoInterface.getgeom(::MultiLineStringTrait, g::MultiLineString, i::Int)
return g[i]
end
GeoInterface.ncoord(::MultiLineStringTrait, g::MultiLineString{Dim}) where {Dim} = Dim
GeoInterface.ngeom(::MultiPolygonTrait, g::MultiPolygon) = length(g)
GeoInterface.getgeom(::MultiPolygonTrait, g::MultiPolygon, i::Int) = g[i]
function GeoInterface.ncoord(::AbstractGeometryTrait,
::Simplex{Dim,T,N,P}) where {Dim,T,N,P}
return Dim
end
function GeoInterface.ncoord(::AbstractGeometryTrait,
::AbstractGeometry{Dim,T}) where {Dim,T}
return Dim
end
function GeoInterface.ngeom(::AbstractGeometryTrait,
::Simplex{Dim,T,N,P}) where {Dim,T,N,P}
return N
end
GeoInterface.ngeom(::PolygonTrait, ::Ngon) = 1 # can't have any holes
GeoInterface.getgeom(::PolygonTrait, g::Ngon, _) = LineString(g.points)
function GeoInterface.ncoord(::PolyhedralSurfaceTrait,
::Mesh{Dim,T,E,V} where {Dim,T,E,V})
return Dim
end
GeoInterface.ngeom(::PolyhedralSurfaceTrait, g::AbstractMesh) = length(g)
GeoInterface.getgeom(::PolyhedralSurfaceTrait, g::AbstractMesh, i) = g[i]
function GeoInterface.convert(::Type{Point}, type::PointTrait, geom)
x, y = GeoInterface.x(geom), GeoInterface.y(geom)
if GeoInterface.is3d(geom)
z = GeoInterface.z(geom)
T = promote_type(typeof(x), typeof(y), typeof(z))
return Point{3,T}(x, y, z)
else
GeoInterface.x(geom), GeoInterface.y(geom)
T = promote_type(typeof(x), typeof(y))
return Point{2,T}(x, y)
end
end
function GeoInterface.convert(::Type{LineString}, type::LineStringTrait, geom)
g1 = getgeom(geom, 1)
x, y = GeoInterface.x(g1), GeoInterface.y(g1)
if GeoInterface.is3d(geom)
z = GeoInterface.z(g1)
T = promote_type(typeof(x), typeof(y), typeof(z))
return LineString([Point{3,T}(GeoInterface.x(p), GeoInterface.y(p), GeoInterface.z(p)) for p in getgeom(geom)])
else
T = promote_type(typeof(x), typeof(y))
return LineString([Point{2,T}(GeoInterface.x(p), GeoInterface.y(p)) for p in getgeom(geom)])
end
end
function GeoInterface.convert(::Type{Polygon}, type::PolygonTrait, geom)
t = LineStringTrait()
exterior = GeoInterface.convert(LineString, t, GeoInterface.getexterior(geom))
if GeoInterface.nhole(geom) == 0
return Polygon(exterior)
else
interiors = map(h -> GeoInterface.convert(LineString, t, h), GeoInterface.gethole(geom))
return Polygon(exterior, interiors)
end
end
function GeoInterface.convert(::Type{MultiPoint}, type::MultiPointTrait, geom)
g1 = getgeom(geom, 1)
x, y = GeoInterface.x(g1), GeoInterface.y(g1)
if GeoInterface.is3d(geom)
z = GeoInterface.z(g1)
T = promote_type(typeof(x), typeof(y), typeof(z))
return MultiPoint([Point{3,T}(GeoInterface.x(p), GeoInterface.y(p), GeoInterface.z(p)) for p in getgeom(geom)])
else
T = promote_type(typeof(x), typeof(y))
return MultiPoint([Point{2,T}(GeoInterface.x(p), GeoInterface.y(p)) for p in getgeom(geom)])
end
end
function GeoInterface.convert(::Type{MultiLineString}, type::MultiLineStringTrait, geom)
t = LineStringTrait()
return MultiLineString(map(l -> GeoInterface.convert(LineString, t, l), getgeom(geom)))
end
function GeoInterface.convert(::Type{MultiPolygon}, type::MultiPolygonTrait, geom)
t = PolygonTrait()
return MultiPolygon(map(poly -> GeoInterface.convert(Polygon, t, poly), getgeom(geom)))
end
function Extents.extent(rect::Rect2)
(xmin, ymin), (xmax, ymax) = extrema(rect)
return Extents.Extent(X=(xmin, xmax), Y=(ymin, ymax))
end | GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 4609 | ##
# Generic base overloads
Base.extrema(primitive::GeometryPrimitive) = (minimum(primitive), maximum(primitive))
function widths(x::AbstractRange)
mini, maxi = Float32.(extrema(x))
return maxi - mini
end
##
# conversion & decompose
convert_simplex(::Type{T}, x::T) where {T} = (x,)
function convert_simplex(NFT::Type{NgonFace{N,T1}},
f::Union{NgonFace{N,T2}}) where {T1,T2,N}
return (convert(NFT, f),)
end
function convert_simplex(NFT::Type{NgonFace{3,T}}, f::NgonFace{3,T2}) where {T,T2}
return (convert(NFT, f),)
end
function convert_simplex(NFT::Type{NgonFace{2,T}}, f::NgonFace{2,T2}) where {T,T2}
return (convert(NFT, f),)
end
"""
convert_simplex(::Type{Face{3}}, f::Face{N})
Triangulate an N-Face into a tuple of triangular faces.
"""
@generated function convert_simplex(::Type{TriangleFace{T}},
f::Union{SimplexFace{N},NgonFace{N}}) where {T,N}
3 <= N || error("decompose not implemented for N <= 3 yet. N: $N")# other wise degenerate
v = Expr(:tuple)
for i in 3:N
push!(v.args, :(TriangleFace{T}(f[1], f[$(i - 1)], f[$i])))
end
return v
end
"""
convert_simplex(::Type{Face{2}}, f::Face{N})
Extract all line segments in a Face.
"""
@generated function convert_simplex(::Type{LineFace{T}},
f::Union{SimplexFace{N},NgonFace{N}}) where {T,N}
2 <= N || error("decompose not implented for N <= 2 yet. N: $N")# other wise degenerate
v = Expr(:tuple)
for i in 1:(N - 1)
push!(v.args, :(LineFace{$T}(f[$i], f[$(i + 1)])))
end
# connect vertices N and 1
push!(v.args, :(LineFace{$T}(f[$N], f[1])))
return v
end
to_pointn(::Type{T}, x) where {T<:Point} = convert_simplex(T, x)[1]
# disambiguation method overlords
convert_simplex(::Type{Point}, x::Point) = (x,)
convert_simplex(::Type{Point{N,T}}, p::Point{N,T}) where {N,T} = (p,)
function convert_simplex(::Type{Point{N,T}}, x) where {N,T}
N2 = length(x)
return (Point{N,T}(ntuple(i -> i <= N2 ? T(x[i]) : T(0), N)),)
end
function convert_simplex(::Type{Vec{N,T}}, x) where {N,T}
N2 = length(x)
return (Vec{N,T}(ntuple(i -> i <= N2 ? T(x[i]) : T(0), N)),)
end
collect_with_eltype(::Type{T}, vec::Vector{T}) where {T} = vec
collect_with_eltype(::Type{T}, vec::AbstractVector{T}) where {T} = collect(vec)
function collect_with_eltype(::Type{T}, iter) where {T}
isempty(iter) && return T[]
# We need to get `eltype` information from `iter`, it seems to be `Any`
# most of the time so the eltype checks here don't actually work
l = if Base.IteratorSize(iter) isa Union{Base.HasShape,Base.HasLength}
if Base.IteratorEltype(iter) isa Base.HasEltype && isconcretetype(eltype(iter))
# Work out the exact length
length(convert_simplex(T, first(iter))) * length(iter)
else
# We know it is at least the length of iter,
# after that we will `push!` if we have to
length(iter)
end
else
0
end
n = 0
result = Vector{T}(undef, l)
for element in iter
# convert_simplex always returns a tuple,
# so that e.g. convert(Triangle, quad) can return 2 elements
for telement in convert_simplex(T, element)
n += 1
if n > l
push!(result, telement)
else
result[n] = telement
end
end
end
return result
end
"""
The unnormalized normal of three vertices.
"""
function orthogonal_vector(v1, v2, v3)
a = v2 - v1
b = v3 - v1
return cross(a, b)
end
"""
```
normals{VT,FD,FT,FO}(vertices::Vector{Point{3, VT}},
faces::Vector{Face{FD,FT,FO}},
NT = Normal{3, VT})
```
Compute all vertex normals.
"""
function normals(vertices::AbstractVector{<:AbstractPoint{3,T}}, faces::AbstractVector{F};
normaltype=Vec{3,T}) where {T,F<:NgonFace}
return normals(vertices, faces, normaltype)
end
function normals(vertices::AbstractVector{<:AbstractPoint{3,T}}, faces::AbstractVector{F},
::Type{N}) where {T,F<:NgonFace,N}
normals_result = zeros(N, length(vertices))
for face in faces
v = metafree.(vertices[face])
# we can get away with two edges since faces are planar.
n = orthogonal_vector(v[1], v[2], v[3])
for i in 1:length(F)
fi = face[i]
normals_result[fi] = normals_result[fi] + n
end
end
normals_result .= normalize.(normals_result)
return normals_result
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 5969 | """
coordinates(geometry)
Returns the edges/vertices/coordinates of a geometry. Is allowed to return lazy iterators!
Use `decompose(ConcretePointType, geometry)` to get `Vector{ConcretePointType}` with
`ConcretePointType` to be something like `Point{3, Float32}`.
"""
function coordinates(points::AbstractVector{<:AbstractPoint})
return points
end
"""
faces(geometry)
Returns the face connections of a geometry. Is allowed to return lazy iterators!
Use `decompose(ConcreteFaceType, geometry)` to get `Vector{ConcreteFaceType}` with
`ConcreteFaceType` to be something like `TriangleFace{Int}`.
"""
function faces(f::AbstractVector{<:AbstractFace})
return f
end
function normals(primitive, nvertices=nothing; kw...)
# doesn't have any specific algorithm to generate normals
# so will be generated from faces + positions
# which we indicate by returning nothing!
# Overload normals(primitive::YourPrimitive), to calcalute the normals
# differently
return nothing
end
function faces(primitive, nvertices=nothing; kw...)
# doesn't have any specific algorithm to generate faces
# so will try to triangulate the coordinates!
return nothing
end
texturecoordinates(primitive, nvertices=nothing) = nothing
"""
Tesselation(primitive, nvertices)
For abstract geometries, when we generate
a mesh from them, we need to decide how fine grained we want to mesh them.
To transport this information to the various decompose methods, you can wrap it
in the Tesselation object e.g. like this:
```julia
sphere = Sphere(Point3f(0), 1)
m1 = mesh(sphere) # uses a default value for tesselation
m2 = mesh(Tesselation(sphere, 64)) # uses 64 for tesselation
length(coordinates(m1)) != length(coordinates(m2))
```
For grid based tesselation, you can also use a tuple:
```julia
rect = Rect2(0, 0, 1, 1)
Tesselation(rect, (5, 5))
"""
struct Tesselation{Dim,T,Primitive,NGrid}
primitive::Primitive
nvertices::NTuple{NGrid,Int}
end
function Tesselation(primitive::GeometryPrimitive{Dim,T},
nvertices::NTuple{N,<:Integer}) where {Dim,T,N}
return Tesselation{Dim,T,typeof(primitive),N}(primitive, Int.(nvertices))
end
Tesselation(primitive, nvertices::Integer) = Tesselation(primitive, (nvertices,))
# This is a bit lazy, I guess we should just refactor these methods
# to directly work on Tesselation - but this way it's backward compatible and less
# refactor work :D
nvertices(tesselation::Tesselation) = tesselation.nvertices
nvertices(tesselation::Tesselation{T,N,P,1}) where {T,N,P} = tesselation.nvertices[1]
function coordinates(tesselation::Tesselation)
return coordinates(tesselation.primitive, nvertices(tesselation))
end
faces(tesselation::Tesselation) = faces(tesselation.primitive, nvertices(tesselation))
normals(tesselation::Tesselation) = normals(tesselation.primitive, nvertices(tesselation))
function texturecoordinates(tesselation::Tesselation)
return texturecoordinates(tesselation.primitive, nvertices(tesselation))
end
## Decompose methods
# Dispatch type to make `decompose(UV{Vec2f}, primitive)` work
# and to pass through tesselation information
# Types that can be converted to a mesh via the functions below
const Meshable{Dim,T} = Union{Tesselation{Dim,T},Mesh{Dim,T},AbstractPolygon{Dim,T},
GeometryPrimitive{Dim,T},
AbstractVector{<:AbstractPoint{Dim,T}}}
struct UV{T} end
UV(::Type{T}) where {T} = UV{T}()
UV() = UV(Vec2f)
struct UVW{T} end
UVW(::Type{T}) where {T} = UVW{T}()
UVW() = UVW(Vec3f)
struct Normal{T} end
Normal(::Type{T}) where {T} = Normal{T}()
Normal() = Normal(Vec3f)
function decompose(::Type{F}, primitive) where {F<:AbstractFace}
f = faces(primitive)
f === nothing && return nothing
return collect_with_eltype(F, f)
end
function decompose(::Type{P}, primitive) where {P<:AbstractPoint}
return collect_with_eltype(P, metafree(coordinates(primitive)))
end
function decompose(::Type{Point}, primitive::Meshable{Dim,T}) where {Dim,T}
return collect_with_eltype(Point{Dim,T}, metafree(coordinates(primitive)))
end
function decompose(::Type{Point}, primitive::LineString{Dim,T}) where {Dim,T}
return collect_with_eltype(Point{Dim,T}, metafree(coordinates(primitive)))
end
function decompose(::Type{T}, primitive) where {T}
return collect_with_eltype(T, primitive)
end
decompose_uv(primitive) = decompose(UV(), primitive)
decompose_uvw(primitive) = decompose(UVW(), primitive)
decompose_normals(primitive) = decompose(Normal(), primitive)
function decompose(NT::Normal{T}, primitive) where {T}
n = normals(primitive)
if n === nothing
return collect_with_eltype(T, normals(coordinates(primitive), faces(primitive)))
end
return collect_with_eltype(T, n)
end
function decompose(UVT::Union{UV{T},UVW{T}}, primitive) where {T}
# This is the fallback for texture coordinates if a primitive doesn't overload them
# We just take the positions and normalize them
uv = texturecoordinates(primitive)
if uv === nothing
# If the primitive doesn't even have coordinates, we're out of options and return
# nothing, indicating that texturecoordinates aren't implemented
positions = decompose(Point, primitive)
positions === nothing && return nothing
# Let this overlord do the work
return decompose(UVT, positions)
end
return collect_with_eltype(T, uv)
end
function decompose(UVT::Union{UV{T},UVW{T}},
positions::AbstractVector{<:VecTypes}) where {T}
N = length(T)
positions_nd = decompose(Point{N,eltype(T)}, positions)
bb = Rect(positions_nd) # Make sure we get this as points
return map(positions_nd) do p
return T((p .- minimum(bb)) ./ widths(bb))
end
end
# Stay backward compatible:
function decompose(::Type{T}, primitive::Meshable, nvertices) where {T}
return decompose(T, Tesselation(primitive, nvertices))
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 3678 |
"""
intersects(a::Line, b::Line) -> Bool, Point
Intersection of 2 line segments `a` and `b`.
Returns `(intersection_found::Bool, intersection_point::Point)`
"""
# 2D Line-segment intersection algorithm by Paul Bourke and many others.
# http://paulbourke.net/geometry/pointlineplane/
function intersects(a::Line{2,T1}, b::Line{2,T2}) where {T1,T2}
T = promote_type(T1, T2)
p0 = zero(Point2{T})
x1, y1 = a[1]
x2, y2 = a[2]
x3, y3 = b[1]
x4, y4 = b[2]
denominator = ((y4 - y3) * (x2 - x1)) - ((x4 - x3) * (y2 - y1))
numerator_a = ((x4 - x3) * (y1 - y3)) - ((y4 - y3) * (x1 - x3))
numerator_b = ((x2 - x1) * (y1 - y3)) - ((y2 - y1) * (x1 - x3))
if denominator == 0
# no intersection: lines are parallel
return false, p0
end
# If we ever need to know if the lines are coincident, we can get that too:
# denominator == numerator_a == numerator_b == 0 && return :coincident_lines
# unknown_a and b tell us how far along the line segment the intersection is.
unknown_a = numerator_a / denominator
unknown_b = numerator_b / denominator
# Values between [0, 1] mean the intersection point of the lines rests on
# both of the line segments.
if 0 <= unknown_a <= 1 && 0 <= unknown_b <= 1
# Substituting an unknown back lets us find the intersection point.
x = x1 + (unknown_a * (x2 - x1))
y = y1 + (unknown_a * (y2 - y1))
return true, Point2{T}(x, y)
end
# lines intersect, but outside of at least one of these line segments.
return false, p0
end
function simple_concat(vec::AbstractVector, range, endpoint::P) where {P}
result = Vector{P}(undef, length(range) + 1)
for (i, j) in enumerate(range)
result[i] = vec[mod1(j, length(vec))]
end
result[end] = endpoint
return result
end
function consecutive_pairs(arr)
n = length(arr)
return zip(view(arr, 1:(n - 1)), view(arr, 2:n))
end
"""
self_intersections(points::AbstractVector{AbstractPoint})
Finds all self intersections of polygon `points`
"""
function self_intersections(points::AbstractVector{<:AbstractPoint})
sections = similar(points, 0)
intersections = Int[]
wraparound(i) = mod1(i, length(points) - 1)
for (i, (a, b)) in enumerate(consecutive_pairs(points))
for (j, (a2, b2)) in enumerate(consecutive_pairs(points))
is1, is2 = wraparound(i + 1), wraparound(i - 1)
if i != j &&
is1 != j &&
is2 != j &&
!(i in intersections) &&
!(j in intersections)
intersected, p = intersects(Line(a, b), Line(a2, b2))
if intersected
push!(intersections, i, j)
push!(sections, p)
end
end
end
end
return intersections, sections
end
"""
split_intersections(points::AbstractVector{AbstractPoint})
Splits polygon `points` into it's self intersecting parts. Only 1 intersection
is handled right now.
"""
function split_intersections(points::AbstractVector{<:AbstractPoint})
intersections, sections = self_intersections(points)
return if isempty(intersections)
return [points]
elseif length(intersections) == 2 && length(sections) == 1
a, b = intersections
p = sections[1]
a, b = min(a, b), max(a, b)
poly1 = simple_concat(points, (a + 1):(b - 1), p)
poly2 = simple_concat(points, (b + 1):(length(points) + a), p)
return [poly1, poly2]
else
error("More than 1 intersections can't be handled currently. Found: $intersections, $sections")
end
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 10411 | const FaceMesh{Dim,T,Element} = Mesh{Dim,T,Element,<:FaceView{Element}}
coordinates(mesh::FaceMesh) = coordinates(getfield(mesh, :simplices))
faces(mesh::FaceMesh) = faces(getfield(mesh, :simplices))
function texturecoordinates(mesh::AbstractMesh)
hasproperty(mesh, :uv) && return mesh.uv
hasproperty(mesh, :uvw) && return mesh.uvw
return nothing
end
function normals(mesh::AbstractMesh)
hasproperty(mesh, :normals) && return mesh.normals
return nothing
end
const GLTriangleElement = Triangle{3,Float32}
const GLTriangleFace = TriangleFace{GLIndex}
const PointWithUV{Dim,T} = PointMeta{Dim,T,Point{Dim,T},(:uv,),Tuple{Vec{2,T}}}
const PointWithNormal{Dim,T} = PointMeta{Dim,T,Point{Dim,T},(:normals,),Tuple{Vec{3,T}}}
const PointWithUVNormal{Dim,T} = PointMeta{Dim,T,Point{Dim,T},(:normals, :uv),
Tuple{Vec{3,T},Vec{2,T}}}
const PointWithUVWNormal{Dim,T} = PointMeta{Dim,T,Point{Dim,T},(:normals, :uvw),
Tuple{Vec{3,T},Vec{3,T}}}
"""
TriangleMesh{Dim, T, PointType}
Abstract Mesh with triangle elements of eltype `T`.
"""
const TriangleMesh{Dim,T,PointType} = AbstractMesh{TriangleP{Dim,T,PointType}}
"""
PlainMesh{Dim, T}
Triangle mesh with no meta information (just points + triangle faces)
"""
const PlainMesh{Dim,T} = TriangleMesh{Dim,T,Point{Dim,T}}
const GLPlainMesh{Dim} = PlainMesh{Dim,Float32}
const GLPlainMesh2D = GLPlainMesh{2}
const GLPlainMesh3D = GLPlainMesh{3}
"""
UVMesh{Dim, T}
PlainMesh with texture coordinates meta at each point.
`uvmesh.uv isa AbstractVector{Vec2f}`
"""
const UVMesh{Dim,T} = TriangleMesh{Dim,T,PointWithUV{Dim,T}}
const GLUVMesh{Dim} = UVMesh{Dim,Float32}
const GLUVMesh2D = UVMesh{2}
const GLUVMesh3D = UVMesh{3}
"""
NormalMesh{Dim, T}
PlainMesh with normals meta at each point.
`normalmesh.normals isa AbstractVector{Vec3f}`
"""
const NormalMesh{Dim,T} = TriangleMesh{Dim,T,PointWithNormal{Dim,T}}
const GLNormalMesh{Dim} = NormalMesh{Dim,Float32}
const GLNormalMesh2D = GLNormalMesh{2}
const GLNormalMesh3D = GLNormalMesh{3}
"""
NormalUVMesh{Dim, T}
PlainMesh with normals and uv meta at each point.
`normalmesh.normals isa AbstractVector{Vec3f}`
`normalmesh.uv isa AbstractVector{Vec2f}`
"""
const NormalUVMesh{Dim,T} = TriangleMesh{Dim,T,PointWithUVNormal{Dim,T}}
const GLNormalUVMesh{Dim} = NormalUVMesh{Dim,Float32}
const GLNormalUVMesh2D = GLNormalUVMesh{2}
const GLNormalUVMesh3D = GLNormalUVMesh{3}
"""
NormalUVWMesh{Dim, T}
PlainMesh with normals and uvw (texture coordinates in 3D) meta at each point.
`normalmesh.normals isa AbstractVector{Vec3f}`
`normalmesh.uvw isa AbstractVector{Vec3f}`
"""
const NormalUVWMesh{Dim,T} = TriangleMesh{Dim,T,PointWithUVWNormal{Dim,T}}
const GLNormalUVWMesh{Dim} = NormalUVWMesh{Dim,Float32}
const GLNormalUVWMesh2D = GLNormalUVWMesh{2}
const GLNormalUVWMesh3D = GLNormalUVWMesh{3}
function decompose_triangulate_fallback(primitive::Meshable; pointtype, facetype)
positions = decompose(pointtype, primitive)
faces = decompose(facetype, primitive)
# If faces returns nothing for primitive, we try to triangulate!
if faces === nothing
# triangulation.jl
faces = decompose(facetype, positions)
end
return positions, faces
end
"""
mesh(primitive::GeometryPrimitive;
pointtype=Point, facetype=GLTriangle,
uv=nothing, normaltype=nothing)
Creates a mesh from `primitive`.
Uses the element types from the keyword arguments to create the attributes.
The attributes that have their type set to nothing are not added to the mesh.
Note, that this can be an `Int` or `Tuple{Int, Int}``, when the primitive is grid based.
It also only losely correlates to the number of vertices, depending on the algorithm used.
#TODO: find a better number here!
"""
function mesh(primitive::Meshable; pointtype=Point, facetype=GLTriangleFace, uv=nothing,
normaltype=nothing)
positions, faces = decompose_triangulate_fallback(primitive; pointtype=pointtype, facetype=facetype)
# We want to preserve any existing attributes!
attrs = attributes(primitive)
# Make sure this doesn't contain position, we'll add position explicitely via meta!
delete!(attrs, :position)
if uv !== nothing
# this may overwrite an existing :uv, but will only create a copy
# if it has a different eltype, otherwise it should replace it
# with exactly the same instance - which is what we want here
attrs[:uv] = decompose(UV(uv), primitive)
end
if normaltype !== nothing
primitive_normals = normals(primitive)
if primitive_normals !== nothing
attrs[:normals] = decompose(normaltype, primitive_normals)
else
# Normals not implemented for primitive, so we calculate them!
n = normals(positions, faces; normaltype=normaltype)
if n !== nothing # ok jeez, this is a 2d mesh which cant have normals
attrs[:normals] = n
end
end
end
return Mesh(meta(positions; attrs...), faces)
end
"""
mesh(polygon::AbstractVector{P}; pointtype=P, facetype=GLTriangleFace,
normaltype=nothing)
Create a mesh from a polygon given as a vector of points, using triangulation.
"""
function mesh(polygon::AbstractVector{P}; pointtype=P, facetype=GLTriangleFace,
normaltype=nothing) where {P<:AbstractPoint{2}}
return mesh(Polygon(polygon); pointtype=pointtype, facetype=facetype,
normaltype=normaltype)
end
function mesh(polygon::AbstractPolygon{Dim,T}; pointtype=Point{Dim,T},
facetype=GLTriangleFace, normaltype=nothing) where {Dim,T}
positions, faces = decompose_triangulate_fallback(polygon; pointtype=pointtype, facetype=facetype)
if normaltype !== nothing
n = normals(positions, faces; normaltype=normaltype)
positions = meta(positions; normals=n)
end
return Mesh(positions, faces)
end
pointtype(x::Mesh) = eltype(decompose(Point, x))
facetype(x::Mesh) = eltype(faces(x))
function triangle_mesh(primitive::Mesh{N}) where {N}
# already target type:
if pointtype(primitive) === Point{N,Float32} && GLTriangleFace === facetype(primitive)
return primitive
else
return mesh(primitive; pointtype=Point{N,Float32}, facetype=GLTriangleFace)
end
end
function triangle_mesh(primitive::Meshable{N}; nvertices=nothing) where {N}
if nvertices !== nothing
@warn("nvertices argument deprecated. Wrap primitive in `Tesselation(primitive, nvertices)`")
primitive = Tesselation(primitive, nvertices)
end
return mesh(primitive; pointtype=Point{N,Float32}, facetype=GLTriangleFace)
end
function uv_mesh(primitive::Meshable{N,T}) where {N,T}
return mesh(primitive; pointtype=Point{N,Float32}, uv=Vec2f, facetype=GLTriangleFace)
end
function uv_normal_mesh(primitive::Meshable{N}) where {N}
return mesh(primitive; pointtype=Point{N,Float32}, uv=Vec2f, normaltype=Vec3f,
facetype=GLTriangleFace)
end
function normal_mesh(points::AbstractVector{<:AbstractPoint},
faces::AbstractVector{<:AbstractFace})
_points = decompose(Point3f, points)
_faces = decompose(GLTriangleFace, faces)
return Mesh(meta(_points; normals=normals(_points, _faces)), _faces)
end
function normal_mesh(primitive::Meshable{N}; nvertices=nothing) where {N}
if nvertices !== nothing
@warn("nvertices argument deprecated. Wrap primitive in `Tesselation(primitive, nvertices)`")
primitive = Tesselation(primitive, nvertices)
end
return mesh(primitive; pointtype=Point{N,Float32}, normaltype=Vec3f,
facetype=GLTriangleFace)
end
"""
volume(triangle)
Calculate the signed volume of one tetrahedron. Be sure the orientation of your
surface is right.
"""
function volume(triangle::Triangle)
v1, v2, v3 = triangle
sig = sign(orthogonal_vector(v1, v2, v3) β
v1)
return sig * abs(v1 β
(v2 Γ v3)) / 6
end
"""
volume(mesh)
Calculate the signed volume of all tetrahedra. Be sure the orientation of your
surface is right.
"""
function volume(mesh::Mesh)
return sum(volume, mesh)
end
function Base.merge(meshes::AbstractVector{<:Mesh})
return if isempty(meshes)
error("No meshes to merge")
elseif length(meshes) == 1
return meshes[1]
else
ps = reduce(vcat, coordinates.(meshes))
fs = reduce(vcat, faces.(meshes))
idx = length(faces(meshes[1]))
offset = length(coordinates(meshes[1]))
for mesh in Iterators.drop(meshes, 1)
N = length(faces(mesh))
for i = idx .+ (1:N)
fs[i] = fs[i] .+ offset
end
idx += N
offset += length(coordinates(mesh))
end
return Mesh(ps, fs)
end
end
"""
pointmeta(mesh::Mesh; meta_data...)
Attaches metadata to the coordinates of a mesh
"""
function pointmeta(mesh::Mesh; meta_data...)
points = coordinates(mesh)
attr = attributes(points)
delete!(attr, :position) # position == metafree(points)
# delete overlapping attributes so we can replace with `meta_data`
foreach(k -> delete!(attr, k), keys(meta_data))
return Mesh(meta(metafree(points); attr..., meta_data...), faces(mesh))
end
function pointmeta(mesh::Mesh, uv::UV)
return pointmeta(mesh; uv=decompose(uv, mesh))
end
function pointmeta(mesh::Mesh, normal::Normal)
return pointmeta(mesh; normals=decompose(normal, mesh))
end
"""
pop_pointmeta(mesh::Mesh, property::Symbol)
Remove `property` from point metadata.
Returns the new mesh, and the property!
"""
function pop_pointmeta(mesh::Mesh, property::Symbol)
points = coordinates(mesh)
attr = attributes(points)
delete!(attr, :position) # position == metafree(points)
# delete overlapping attributes so we can replace with `meta_data`
m = pop!(attr, property)
return Mesh(meta(metafree(points); attr...), faces(mesh)), m
end
"""
facemeta(mesh::Mesh; meta_data...)
Attaches metadata to the faces of a mesh
"""
function facemeta(mesh::Mesh; meta_data...)
return Mesh(coordinates(mesh), meta(faces(mesh); meta_data...))
end
function attributes(hasmeta::Mesh)
return Dict{Symbol,Any}((name => getproperty(hasmeta, name)
for name in propertynames(hasmeta)))
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 10858 | #=
Helper functions that works around the fact, that there is no generic
Table interface for this functionality. Once this is in e.g. Tables.jl,
it should be removed from GeometryBasics!
=#
"""
attributes(hasmeta)
Returns all attributes of meta as a Dict{Symbol, Any}.
Needs to be overloaded, and returns empty dict for non overloaded types!
Gets overloaded by default for all Meta types.
"""
function attributes(hasmeta)
return Dict{Symbol,Any}()
end
function attributes(hasmeta::StructArray)
return Dict{Symbol,Any}((name => getproperty(hasmeta, name)
for name in propertynames(hasmeta)))
end
"""
getcolumns(t, colnames::Symbol...)
Gets a column from any Array like (Table/AbstractArray).
For AbstractVectors, a column will be the field names of the element type.
"""
function getcolumns(tablelike, colnames::Symbol...)
return getproperty.((tablelike,), colnames)
end
getcolumn(t, colname::Symbol) = getcolumns(t, colname)[1]
"""
MetaType(::Type{T})
Returns the Meta Type corresponding to `T`
E.g:
```julia
MetaType(Point) == PointMeta
```
"""
MetaType(::Type{T}) where {T} = error("No Meta Type for $T")
"""
MetaFree(::Type{T})
Returns the original type containing no metadata for `T`
E.g:
```julia
MetaFree(PointMeta) == Point
```
"""
MetaFree(::Type{T}) where {T} = error("No meta free Type for $T")
"""
meta(x::MetaObject)
Returns the metadata of `x`
"""
meta(x::T) where {T} = error("$T has no meta!")
metafree(x::T) where {T} = x
macro meta_type(name, mainfield, supertype, params...)
MetaName = Symbol("$(name)Meta")
field = QuoteNode(mainfield)
NoParams = Symbol("$(MetaName)NoParams")
params_sym = map(params) do param
param isa Symbol && return param
param isa Expr && param.head == :(<:) && return param.args[1]
return error("Unsupported type parameter: $(param)")
end
expr = quote
struct $MetaName{$(params...),Typ<:$supertype{$(params_sym...)},Names,Types} <:
$supertype{$(params_sym...)}
main::Typ
meta::NamedTuple{Names,Types}
end
const $NoParams{Typ,Names,Types} = $MetaName{$(params_sym...),Typ,Names,
Types} where {$(params_sym...)}
function Base.getproperty(x::$MetaName{$(params_sym...),Typ,Names,Types},
field::Symbol) where {$(params...),Typ,Names,Types}
field === $field && return getfield(x, :main)
field === :main && return getfield(x, :main)
Base.sym_in(field, Names) && return getfield(getfield(x, :meta), field)
return error("Field $field not part of Element")
end
function GeometryBasics.MetaType(XX::Type{<:$supertype{$(params_sym...)} where {$(params...)}})
return $MetaName
end
function GeometryBasics.MetaType(ST::Type{<:$supertype{$(params_sym...)}},
::Type{NamedTuple{Names,Types}}) where {$(params...),
Names,
Types}
return $MetaName{$(params_sym...),ST,Names,Types}
end
GeometryBasics.MetaFree(::Type{<:$MetaName{$(params_sym...),Typ}}) where {$(params_sym...), Typ<:$supertype{$(params_sym...)} } = Typ
GeometryBasics.MetaFree(::Type{<:$MetaName}) = $name
GeometryBasics.metafree(x::$MetaName) = getfield(x, :main)
GeometryBasics.metafree(x::AbstractVector{<:$MetaName}) = getproperty(x, $field)
GeometryBasics.meta(x::$MetaName) = getfield(x, :meta)
GeometryBasics.meta(x::AbstractVector{<:$MetaName}) = getproperty(x, :meta)
function GeometryBasics.meta(main::$supertype{$(params_sym...)};
meta...) where {$(params...)}
isempty(meta) && return elements # no meta to add!
return $MetaName(main; meta...)
end
function GeometryBasics.meta(elements::AbstractVector{XX};
meta...) where {XX<:$supertype{$(params_sym...)}} where {$(params...)}
isempty(meta) && return elements # no meta to add!
n = length(elements)
for (k, v) in meta
if v isa AbstractVector
mn = length(v)
mn != n && error("Metadata array needs to have same length as data.
Found $(n) data items, and $mn metadata items")
else
error("Metadata needs to be an array with the same length as data items. Found: $(typeof(v))")
end
end
nt = values(meta)
# get the first element to get the per element named tuple type
ElementNT = typeof(map(first, nt))
return StructArray{MetaType(XX, ElementNT)}(($(mainfield)=elements, nt...))
end
function GeometryBasics.attributes(hasmeta::$MetaName)
return Dict{Symbol,Any}((name => getproperty(hasmeta, name)
for name in propertynames(hasmeta)))
end
function (MT::Type{<:$MetaName})(args...; meta...)
nt = values(meta)
obj = MetaFree(MT)(args...)
return MT(obj, nt)
end
function (MT::Type{<:$MetaName})(main::$(name); meta...)
nt = values(meta)
return MT(main, nt)
end
function Base.propertynames(::$MetaName{$(params_sym...),Typ,Names,Types}) where {$(params...),
Typ,
Names,
Types}
return ($field, Names...)
end
function StructArrays.component(x::$MetaName{$(params_sym...),Typ,Names,Types},
field::Symbol) where {$(params...),Typ,Names,Types}
return getproperty(x, field)
end
function StructArrays.staticschema(::Type{$MetaName{$(params_sym...),Typ,Names,
Types}}) where {$(params...),
Typ,Names,Types}
return NamedTuple{($field, Names...),Base.tuple_type_cons(Typ, Types)}
end
function StructArrays.createinstance(::Type{$MetaName{$(params_sym...),Typ,Names,
Types}}, metafree,
args...) where {$(params...),Typ,Names,Types}
return $MetaName(metafree, NamedTuple{Names,Types}(args))
end
end
return esc(expr)
end
@meta_type(Point, position, AbstractPoint, Dim, T)
Base.getindex(x::PointMeta, idx::Int) = getindex(metafree(x), idx)
@meta_type(NgonFace, ngon, AbstractNgonFace, N, T)
Base.getindex(x::NgonFaceMeta, idx::Int) = getindex(metafree(x), idx)
@meta_type(SimplexFace, simplex, AbstractSimplexFace, N, T)
Base.getindex(x::SimplexFaceMeta, idx::Int) = getindex(metafree(x), idx)
@meta_type(Polygon, polygon, AbstractPolygon, N, T)
@meta_type(LineString, lines, AbstractVector, P <: Line)
Base.getindex(x::LineStringMeta, idx::Int) = getindex(metafree(x), idx)
Base.size(x::LineStringMeta) = size(metafree(x))
@meta_type(MultiPoint, points, AbstractVector, P <: AbstractPoint)
Base.getindex(x::MultiPointMeta, idx::Int) = getindex(metafree(x), idx)
Base.size(x::MultiPointMeta) = size(metafree(x))
@meta_type(MultiLineString, linestrings, AbstractVector, P <: LineString)
Base.getindex(x::MultiLineStringMeta, idx::Int) = getindex(metafree(x), idx)
Base.size(x::MultiLineStringMeta) = size(metafree(x))
@meta_type(MultiPolygon, polygons, AbstractVector, P <: Polygon)
Base.getindex(x::MultiPolygonMeta, idx::Int) = getindex(metafree(x), idx)
Base.size(x::MultiPolygonMeta) = size(metafree(x))
@meta_type(Mesh, mesh, AbstractMesh, Element <: Polytope)
Base.getindex(x::MeshMeta, idx::Int) = getindex(metafree(x), idx)
Base.size(x::MeshMeta) = size(metafree(x))
"""
MetaT(geometry, meta::NamedTuple)
MetaT(geometry; meta...)
Returns a `MetaT` that holds a geometry and its metadata
`MetaT` acts the same as `Meta` method.
The difference lies in the fact that it is designed to handle
geometries and metadata of different/heterogeneous types.
eg: While a Point MetaGeometry is a `PointMeta`, the MetaT representation is `MetaT{Point}`
The downside being it's not subtyped to `AbstractPoint` like a `PointMeta` is.
Example:
```julia
julia> MetaT(Point(1, 2), city = "Mumbai")
MetaT{Point{2,Int64},(:city,),Tuple{String}}([1, 2], (city = "Mumbai",))
```
"""
struct MetaT{T,Names,Types}
main::T
meta::NamedTuple{Names,Types}
end
MetaT(x; kwargs...) = MetaT(x, values(kwargs))
"""
metafree(x::MetaT)
metafree(x::Array{MetaT})
Free the MetaT from metadata
i.e. returns the geometry/array of geometries
"""
function metafree(x::MetaT)
return getfield(x, :main)
end
metafree(x::AbstractVector{<:MetaT}) = map(metafree, x)
"""
meta(x::MetaT)
meta(x::Array{MetaT})
Returns the metadata of a `MetaT`
"""
function meta(x::MetaT)
return getfield(x, :meta)
end
meta(x::AbstractVector{<:MetaT}) = map(meta, x)
# helper methods
function Base.getproperty(x::MetaT, field::Symbol)
return if field == :main
metafree(x)
elseif field == :meta
meta(x)
else
getproperty(meta(x), field)
end
end
Base.propertynames(x::MetaT) = (:main, propertynames(meta(x))...)
getnamestypes(::Type{MetaT{T,Names,Types}}) where {T,Names,Types} = (T, Names, Types)
# explicitly give the "schema" of the object to StructArrays
function StructArrays.staticschema(::Type{F}) where {F<:MetaT}
T, names, types = getnamestypes(F)
return NamedTuple{(:main, names...),Base.tuple_type_cons(T, types)}
end
# generate an instance of MetaT type
function StructArrays.createinstance(::Type{F}, x, args...) where {F<:MetaT}
T, names, types = getnamestypes(F)
return MetaT(x, NamedTuple{names,types}(args))
end
"""
Puts an iterable of MetaT's into a StructArray
"""
function meta_table(iter)
cols = Tables.columntable(iter)
return meta_table(first(cols), Base.tail(cols))
end
function meta_table(main, meta::NamedTuple{names}) where {names}
eltypes = Tuple{map(eltype, values(meta))...}
F = MetaT{eltype(main),names,eltypes}
return StructArray{F}(; main=main, meta...)
end
Base.getindex(x::MetaT, idx::Int) = getindex(metafree(x), idx)
Base.size(x::MetaT) = size(metafree(x))
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 2541 |
"""
OffsetInteger{O, T}
OffsetInteger type mainly for indexing.
* `O` - The offset relative to Julia arrays. This helps reduce copying when
communicating with 0-indexed systems such as OpenGL.
"""
struct OffsetInteger{O,T<:Integer} <: Integer
i::T
OffsetInteger{O,T}(x::Integer) where {O,T<:Integer} = new{O,T}(T(x + O))
end
const ZeroIndex{T<:Integer} = OffsetInteger{-1,T}
const OneIndex{T<:Integer} = OffsetInteger{0,T}
const GLIndex = ZeroIndex{Cuint}
raw(x::OffsetInteger) = x.i
raw(x::Integer) = x
value(x::OffsetInteger{O,T}) where {O,T} = raw(x) - O
value(x::Integer) = x
function show(io::IO, oi::OffsetInteger)
return print(io, "|$(raw(oi)) (indexes as $(value(oi))|")
end
Base.eltype(::Type{OffsetInteger{O,T}}) where {O,T} = T
Base.eltype(oi::OffsetInteger) = eltype(typeof(oi))
# constructors and conversion
function OffsetInteger{O1,T1}(x::OffsetInteger{O2,T2}) where {O1,O2,T1<:Integer,T2<:Integer}
return OffsetInteger{O1,T1}(convert(T2, x))
end
OffsetInteger{O}(x::Integer) where {O} = OffsetInteger{O,eltype(x)}(x)
OffsetInteger{O}(x::OffsetInteger) where {O} = OffsetInteger{O,eltype(x)}(x)
# This constructor has a massive method invalidation as a consequence,
# and doesn't seem to be needed, so let's remove it!
Base.convert(::Type{IT}, x::OffsetInteger) where {IT<:Integer} = IT(value(x))
Base.promote_rule(::Type{IT}, ::Type{<:OffsetInteger}) where {IT<:Integer} = IT
function Base.promote_rule(::Type{OffsetInteger{O1,T1}},
::Type{OffsetInteger{O2,T2}}) where {O1,O2,T1<:Integer,
T2<:Integer}
return OffsetInteger{pure_max(O1, O2),promote_type(T1, T2)}
end
Base.@pure pure_max(x1, x2) = x1 > x2 ? x1 : x2
# Need to convert to Int here because of: https://github.com/JuliaLang/julia/issues/35038
Base.to_index(I::OffsetInteger) = convert(Int, raw(OneIndex(I)))
Base.to_index(I::OffsetInteger{0}) = convert(Int, raw(I))
# basic operators
for op in (:(-), :abs)
@eval Base.$op(x::T) where {T<:OffsetInteger} = T($(op)(value(x)))
end
for op in (:(+), :(-), :(*), :(/), :div)
@eval begin
@inline function Base.$op(x::OffsetInteger{O}, y::OffsetInteger{O}) where {O}
return OffsetInteger{O}($op(value(x), value(y)))
end
end
end
for op in (:(==), :(>=), :(<=), :(<), :(>), :sub_with_overflow)
@eval begin
@inline function Base.$op(x::OffsetInteger{O}, y::OffsetInteger{O}) where {O}
return $op(x.i, y.i)
end
end
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 2908 | macro warnpcfail(ex::Expr)
modl = __module__
file = __source__.file === nothing ? "?" : String(__source__.file)
line = __source__.line
quote
$(esc(ex)) || @warn """precompile directive
$($(Expr(:quote, ex)))
failed. Please report an issue in $($modl) (after checking for duplicates) or remove this directive.""" _file=$file _line=$line
end
end
function _precompile_()
ccall(:jl_generating_output, Cint, ()) == 1 || return nothing
@warnpcfail precompile(HyperRectangle{2,Float32}, (Int, Int, Int, Int))
@warnpcfail precompile(==, (HyperRectangle{2,Float32}, HyperRectangle{2,Float32}))
@warnpcfail precompile(normal_mesh, (Tesselation{3,Float32,Cylinder{3,Float32},1},))
@warnpcfail precompile(normal_mesh, (Tesselation{3,Float32,HyperSphere{3,Float32},1},))
@warnpcfail precompile(normal_mesh, (HyperSphere{3,Float32},))
@warnpcfail precompile(Tuple{typeof(*),SMatrix{4, 4, Float32, 16},HyperRectangle{3, Float32}}) # time: 0.11091917
@warnpcfail precompile(Tuple{typeof(coordinates),HyperRectangle{2, Float32},Tuple{Int64, Int64}}) # time: 0.08693867
@warnpcfail precompile(union, (HyperRectangle{3, Float32}, HyperRectangle{3, Float32}))
@warnpcfail precompile(Tuple{typeof(decompose),Type{Point{2, Float32}},HyperRectangle{2, Float32}}) # time: 0.026609203
@warnpcfail precompile(Tuple{Type{HyperRectangle{3, Float32}},HyperRectangle{2, Float32}}) # time: 0.023717888
@warnpcfail precompile(Tuple{typeof(+),HyperRectangle{3, Float32},Point{3, Float32}}) # time: 0.006633118
@warnpcfail precompile(Tuple{Type{Rect2{T} where T},Float32,Float32,Float32,Float32}) # time: 0.001636267
@warnpcfail precompile(Tuple{typeof(*),HyperRectangle{2, Float32},Float32}) # time: 0.001057589
if Base.VERSION >= v"1.6.0-DEV.1083"
@warnpcfail precompile(triangle_mesh, (Polygon{2, Float32, Point2f, LineString{2, Float32, Point2f,
Base.ReinterpretArray{Line{2, Float32}, 1, Tuple{Point2f, Point2f}, TupleView{Tuple{Point2f, Point2f}, 2, 1, Vector{Point2f}}, false}},
Vector{LineString{2, Float32, Point2f, Base.ReinterpretArray{Line{2, Float32}, 1, Tuple{Point2f, Point2f}, TupleView{Tuple{Point2f, Point2f}, 2, 1, Vector{Point2f}}, false}}}},))
else
@warnpcfail precompile(triangle_mesh, (Polygon{2, Float32, Point2f, LineString{2, Float32, Point2f,
Base.ReinterpretArray{Line{2, Float32}, 1, Tuple{Point2f, Point2f}, TupleView{Tuple{Point2f, Point2f}, 2, 1, Vector{Point2f}}}},
Vector{LineString{2, Float32, Point2f, Base.ReinterpretArray{Line{2, Float32}, 1, Tuple{Point2f, Point2f}, TupleView{Tuple{Point2f, Point2f}, 2, 1, Vector{Point2f}}}}}},))
end
@warnpcfail precompile(split_intersections, (Vector{Point2f},))
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 7657 | #=
ported from:
http://docs.ros.org/jade/api/convex_decomposition/html/triangulate_8cpp_source.html
The MIT License (MIT)
Copyright (c) 2006 John W. Ratcliff
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
=#
"""
area(vertices::AbstractVector{AbstractPoint{3}}, face::TriangleFace)
Calculate the area of one triangle.
"""
function area(vertices::AbstractVector{<:AbstractPoint{3,VT}},
face::TriangleFace{FT}) where {VT,FT}
v1, v2, v3 = vertices[face]
return 0.5 * norm(orthogonal_vector(v1, v2, v3))
end
"""
area(vertices::AbstractVector{AbstractPoint{3}}, faces::AbstractVector{TriangleFace})
Calculate the area of all triangles.
"""
function area(vertices::AbstractVector{<:AbstractPoint{3,VT}},
faces::AbstractVector{TriangleFace{FT}}) where {VT,FT}
return sum(x -> area(vertices, x), faces)
end
"""
area(contour::AbstractVector{AbstractPoint}})
Calculate the area of a polygon.
For 2D points, the oriented area is returned (negative when the points are
oriented clockwise).
"""
function area(contour::AbstractVector{<:AbstractPoint{2,T}}) where {T}
length(contour) < 3 && return zero(T)
A = zero(T)
p = lastindex(contour)
for q in eachindex(contour)
A += cross(contour[p], contour[q])
p = q
end
return A * T(0.5)
end
function area(contour::AbstractVector{<:AbstractPoint{3,T}}) where {T}
A = zero(eltype(contour))
o = first(contour)
for i in (firstindex(contour) + 1):(lastindex(contour) - 1)
A += cross(contour[i] - o, contour[i + 1] - o)
end
return norm(A) * T(0.5)
end
"""
in(point, triangle)
Determine if a point is inside of a triangle.
"""
function Base.in(P::T, triangle::Triangle) where {T<:AbstractPoint}
A, B, C = coordinates(triangle)
a = C .- B
b = A .- C
c = B .- A
ap = P .- A
bp = P .- B
cp = P .- C
a_bp = a[1] * bp[2] - a[2] * bp[1]
c_ap = c[1] * ap[2] - c[2] * ap[1]
b_cp = b[1] * cp[2] - b[2] * cp[1]
t0 = zero(eltype(T))
return ((a_bp >= t0) && (b_cp >= t0) && (c_ap >= t0))
end
function snip(contour::AbstractVector{<:AbstractPoint}, u, v, w, n, V)
A = contour[V[u]]
B = contour[V[v]]
C = contour[V[w]]
x = (((B[1] - A[1]) * (C[2] - A[2])) - ((B[2] - A[2]) * (C[1] - A[1])))
if 0.0000000001f0 > x
return false
end
for p in 1:n
((p == u) || (p == v) || (p == w)) && continue
P = contour[V[p]]
if P in Triangle(A, B, C)
return false
end
end
return true
end
"""
decompose(facetype, contour::AbstractArray{<:AbstractPoint})
Triangulate a Polygon without hole.
Returns a Vector{`facetype`} defining indexes into `contour`.
"""
function decompose(::Type{FaceType},
points::AbstractArray{P}) where {P<:AbstractPoint,FaceType<:AbstractFace}
#= allocate and initialize list of Vertices in polygon =#
result = FaceType[]
# the algorithm doesn't like closed contours
contour = if isapprox(last(points), first(points))
@view points[1:(end - 1)]
else
@view points[1:end]
end
n = length(contour)
if n < 3
error("Not enough points in the contour. Found: $contour")
end
#= we want a counter-clockwise polygon in V =#
if 0 < area(contour)
V = Int[i for i in 1:n]
else
V = Int[i for i in n:-1:1]
end
nv = n
#= remove nv-2 Vertices, creating 1 triangle every time =#
count = 2 * nv #= error detection =#
v = nv
while nv > 2
#= if we loop, it is probably a non-simple polygon =#
if 0 >= count
return result
end
count -= 1
#= three consecutive vertices in current polygon, <u,v,w> =#
u = v
(u > nv) && (u = 1) #= previous =#
v = u + 1
(v > nv) && (v = 1) #= new v =#
w = v + 1
(w > nv) && (w = 1) #= next =#
if snip(contour, u, v, w, nv, V)
#= true names of the vertices =#
a = V[u]
b = V[v]
c = V[w]
#= output Triangle =#
push!(result, convert_simplex(FaceType, TriangleFace(a, b, c))...)
#= remove v from remaining polygon =#
s = v
t = v + 1
while t <= nv
V[s] = V[t]
s += 1
t += 1
end
nv -= 1
#= resest error detection counter =#
count = 2 * nv
end
end
return result
end
function earcut_triangulate(polygon::Vector{Vector{Point{2,Float64}}})
lengths = map(x -> UInt32(length(x)), polygon)
len = UInt32(length(lengths))
array = ccall((:u32_triangulate_f64, libearcut), Tuple{Ptr{GLTriangleFace},Cint},
(Ptr{Ptr{Float64}}, Ptr{UInt32}, UInt32), polygon, lengths, len)
return unsafe_wrap(Vector{GLTriangleFace}, array[1], array[2]; own=true)
end
function earcut_triangulate(polygon::Vector{Vector{Point{2,Float32}}})
lengths = map(x -> UInt32(length(x)), polygon)
len = UInt32(length(lengths))
array = ccall((:u32_triangulate_f32, libearcut), Tuple{Ptr{GLTriangleFace},Cint},
(Ptr{Ptr{Float32}}, Ptr{UInt32}, UInt32), polygon, lengths, len)
return unsafe_wrap(Vector{GLTriangleFace}, array[1], array[2]; own=true)
end
function earcut_triangulate(polygon::Vector{Vector{Point{2,Int64}}})
lengths = map(x -> UInt32(length(x)), polygon)
len = UInt32(length(lengths))
array = ccall((:u32_triangulate_i64, libearcut), Tuple{Ptr{GLTriangleFace},Cint},
(Ptr{Ptr{Int64}}, Ptr{UInt32}, UInt32), polygon, lengths, len)
return unsafe_wrap(Vector{GLTriangleFace}, array[1], array[2]; own=true)
end
function earcut_triangulate(polygon::Vector{Vector{Point{2,Int32}}})
lengths = map(x -> UInt32(length(x)), polygon)
len = UInt32(length(lengths))
array = ccall((:u32_triangulate_i32, libearcut), Tuple{Ptr{GLTriangleFace},Cint},
(Ptr{Ptr{Int32}}, Ptr{UInt32}, UInt32), polygon, lengths, len)
return unsafe_wrap(Vector{GLTriangleFace}, array[1], array[2]; own=true)
end
best_earcut_eltype(x) = Float64
best_earcut_eltype(::Type{Float64}) = Float64
best_earcut_eltype(::Type{<:AbstractFloat}) = Float32
best_earcut_eltype(::Type{Int64}) = Int64
best_earcut_eltype(::Type{Int32}) = Int32
best_earcut_eltype(::Type{<:Integer}) = Int64
function faces(polygon::Polygon{Dim,T}) where {Dim,T}
PT = Point{Dim,best_earcut_eltype(T)}
points = [decompose(PT, polygon.exterior)]
foreach(x -> push!(points, decompose(PT, x)), polygon.interiors)
return earcut_triangulate(points)
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 5247 | """
TupleView{T, N, Skip, A}
TupleView, groups elements of an array as tuples.
N is the dimension of the tuple, M tells how many elements to skip to the next tuple.
By default TupleView{N} defaults to skip N items.
# a few examples:
```julia
x = [1, 2, 3, 4, 5, 6]
TupleView{2, 1}(x):
> [(1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
TupleView{2}(x):
> [(1, 2), (3, 4), (5, 6)]
TupleView{2, 3}(x):
> [(1, 2), (4, 5)]
TupleView{3, 1}(x):
> [(1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6)]
```
TupleView can be used together with reinterpret:
```julia
x = [1, 2, 3, 4]
y = reinterpret(Point{2, Int}, TupleView{2, 1}(x))
> [Point(1, 2), Point(2, 3), Point(3, 4)]
```
"""
struct TupleView{T,N,Skip,A} <: AbstractVector{T}
data::A
connect::Bool
end
coordinates(tw::TupleView) = coordinates(tw.data)
function Base.size(x::TupleView{T,N,M}) where {T,N,M}
nitems = length(x.data) Γ· (N - (N - M))
nitems = nitems - max(N - M, 0)
return (nitems + x.connect,) # plus one item if we connect
end
function Base.getindex(x::TupleView{T,N,M}, index::Integer) where {T,N,M}
return ntuple(i -> x.data[mod1(((index - 1) * M) + i, length(x.data))], N)
end
function TupleView{N}(x::AbstractVector; connect=false) where {N}
return TupleView{N,N}(x, connect=connect)
end
function TupleView{N,M}(x::AbstractVector{T}; connect=false) where {T,N,M}
return TupleView{NTuple{N,T},N,M,typeof(x)}(x, connect)
end
@inline function connected_line(points::AbstractVector{<:AbstractPoint{N}},
skip=N) where {N}
return connect(points, Line, skip)
end
"""
connect(points::AbstractVector{<: AbstractPoint}, P::Type{<: Polytope{N}}, skip::Int = N)
Creates a view that connects a number of points to a Polytope `P`.
Between each polytope, `skip` elements are skipped untill the next starts.
Example:
```julia
x = connect(Point[(1, 2), (3, 4), (5, 6), (7, 8)], Line, 2)
x == [Line(Point(1, 2), Point(3, 4)), Line(Point(5, 6), Point(7, 8))]
"""
@inline function connect(points::AbstractVector{Point},
P::Type{<:Polytope{N,T} where {N,T}},
skip::Int=length(P)) where {Point <: AbstractPoint}
return reinterpret(Polytope(P, Point), TupleView{length(P),skip}(points))
end
@inline function connect(points::AbstractVector{T}, P::Type{<:Point{N}},
skip::Int=N) where {T <: Real,N}
return reinterpret(Point{N,T}, TupleView{N,skip}(points))
end
@inline function connect(points::AbstractVector{T}, P::Type{<:AbstractFace{N}},
skip::Int=N) where {T <: Real,N}
return reinterpret(Face(P, T), TupleView{N,skip}(points))
end
@inline function connect(points::AbstractMatrix{T},
P::Type{<:AbstractPoint{N}}) where {T <: Real,N}
return if size(points, 1) === N
return reinterpret(Point{N,T}, points)
elseif size(points, 2) === N
seglen = size(points, 1)
columns = ntuple(N) do i
return view(points, ((i - 1) * seglen + 1):(i * seglen))
end
return StructArray{Point{N,T}}(columns)
else
error("Dim 1 or 2 must be equal to the point dimension!")
end
end
"""
FaceView{Element, Point, Face, P, F}
FaceView enables to link one array of points via a face array, to generate one
abstract array of elements.
E.g., this becomes possible:
```
x = FaceView(rand(Point3f, 10), TriangleFace[(1, 2, 3), (2, 4, 5), ...])
x[1] isa Triangle == true
x isa AbstractVector{<: Triangle} == true
# This means we can use it as a mesh:
Mesh(x) # should just work!
# Can also be used for Points:
linestring = FaceView(points, LineFace[...])
Polygon(linestring)
```
"""
struct FaceView{Element,Point <: AbstractPoint,Face <: AbstractFace,P <: AbstractVector{Point},F <: AbstractVector{Face}} <: AbstractVector{Element}
elements::P
faces::F
end
const SimpleFaceView{Dim,T,NFace,IndexType,PointType <: AbstractPoint{Dim,T},FaceType <: AbstractFace{NFace,IndexType}} = FaceView{Ngon{Dim,T,NFace,PointType},PointType,FaceType,Vector{PointType},Vector{FaceType}}
function Base.getproperty(faceview::FaceView, name::Symbol)
return getproperty(getfield(faceview, :elements), name)
end
function Base.propertynames(faceview::FaceView)
return propertynames(getfield(faceview, :elements))
end
Tables.schema(faceview::FaceView) = Tables.schema(getfield(faceview, :elements))
Base.size(faceview::FaceView) = size(getfield(faceview, :faces))
@propagate_inbounds function Base.getindex(x::FaceView{Element}, i) where {Element}
return Element(map(idx -> coordinates(x)[idx], faces(x)[i]))
end
@propagate_inbounds function Base.setindex!(x::FaceView{Element}, element::Element,
i) where {Element}
face = faces(x)[i]
for (i, f) in enumerate(face) # TODO unroll!?
coordinates(x)[face[i]] = element[i]
end
return element
end
function connect(points::AbstractVector{P},
faces::AbstractVector{F}) where {P <: AbstractPoint,F <: AbstractFace}
return FaceView{Polytope(P, F),P,F,typeof(points),typeof(faces)}(points, faces)
end
coordinates(mesh::FaceView) = getfield(mesh, :elements)
faces(mesh::FaceView) = getfield(mesh, :faces)
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 3246 | """
Cylinder{N, T}
A `Cylinder` is a 2D rectangle or a 3D cylinder defined by its origin point,
its extremity and a radius. `origin`, `extremity` and `r`, must be specified.
"""
struct Cylinder{N,T} <: GeometryPrimitive{N,T}
origin::Point{N,T}
extremity::Point{N,T}
r::T
end
"""
Cylinder2{T}
Cylinder3{T}
A `Cylinder2` or `Cylinder3` is a 2D/3D cylinder defined by its origin point,
its extremity and a radius. `origin`, `extremity` and `r`, must be specified.
"""
const Cylinder2{T} = Cylinder{2,T}
const Cylinder3{T} = Cylinder{3,T}
origin(c::Cylinder{N,T}) where {N,T} = c.origin
extremity(c::Cylinder{N,T}) where {N,T} = c.extremity
radius(c::Cylinder{N,T}) where {N,T} = c.r
height(c::Cylinder{N,T}) where {N,T} = norm(c.extremity - c.origin)
direction(c::Cylinder{N,T}) where {N,T} = (c.extremity .- c.origin) ./ height(c)
function rotation(c::Cylinder{2,T}) where {T}
d2 = direction(c)
u = @SVector [d2[1], d2[2], T(0)]
v = @MVector [u[2], -u[1], T(0)]
normalize!(v)
return hcat(v, u, @SVector T[0, 0, 1])
end
function rotation(c::Cylinder{3,T}) where {T}
d3 = direction(c)
u = @SVector [d3[1], d3[2], d3[3]]
if abs(u[1]) > 0 || abs(u[2]) > 0
v = @MVector [u[2], -u[1], T(0)]
else
v = @MVector [T(0), -u[3], u[2]]
end
normalize!(v)
w = @SVector [u[2] * v[3] - u[3] * v[2], -u[1] * v[3] + u[3] * v[1],
u[1] * v[2] - u[2] * v[1]]
return hcat(v, w, u)
end
function coordinates(c::Cylinder{2,T}, nvertices=(2, 2)) where {T}
r = Rect(c.origin[1] - c.r / 2, c.origin[2], c.r, height(c))
M = rotation(c)
points = coordinates(r, nvertices)
vo = to_pointn(Point3{T}, origin(c))
return (M * (to_pointn(Point3{T}, point) .- vo) .+ vo for point in points)
end
function faces(sphere::Cylinder{2}, nvertices=(2, 2))
return faces(Rect(0, 0, 1, 1), nvertices)
end
function coordinates(c::Cylinder{3,T}, nvertices=30) where {T}
if isodd(nvertices)
nvertices = 2 * (nvertices Γ· 2)
end
nvertices = max(8, nvertices)
nbv = nvertices Γ· 2
M = rotation(c)
h = height(c)
range = 1:(2 * nbv + 2)
function inner(i)
return if i == length(range)
c.extremity
elseif i == length(range) - 1
origin(c)
else
phi = T((2Ο * (((i + 1) Γ· 2) - 1)) / nbv)
up = ifelse(isodd(i), 0, h)
(M * Point(c.r * cos(phi), c.r * sin(phi), up)) .+ c.origin
end
end
return (inner(i) for i in range)
end
function faces(c::Cylinder{3}, facets=30)
isodd(facets) ? facets = 2 * div(facets, 2) : nothing
facets < 8 ? facets = 8 : nothing
nbv = Int(facets / 2)
indexes = Vector{TriangleFace{Int}}(undef, facets)
index = 1
for j in 1:(nbv - 1)
indexes[index] = (index + 2, index + 1, index)
indexes[index + 1] = (index + 3, index + 1, index + 2)
index += 2
end
indexes[index] = (1, index + 1, index)
indexes[index + 1] = (2, index + 1, 1)
for i in 1:length(indexes)
i % 2 == 1 ? push!(indexes, (indexes[i][1], indexes[i][3], 2 * nbv + 1)) :
push!(indexes, (indexes[i][2], indexes[i][1], 2 * nbv + 2))
end
return indexes
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 99 | struct Particle{N,T} <: GeometryPrimitive{N,T}
position::Point{N,T}
velocity::Vec{N,T}
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 726 | struct Pyramid{T} <: GeometryPrimitive{3,T}
middle::Point{3,T}
length::T
width::T
end
function coordinates(p::Pyramid{T}) where {T}
leftup = Point{3,T}(-p.width, p.width, 0) / 2
leftdown = Point(-p.width, -p.width, 0) / 2
tip = Point{3,T}(p.middle + Point{3,T}(0, 0, p.length))
lu = Point{3,T}(p.middle + leftup)
ld = Point{3,T}(p.middle + leftdown)
ru = Point{3,T}(p.middle - leftdown)
rd = Point{3,T}(p.middle - leftup)
return Point{3,T}[
tip, rd, ru,
tip, ru, lu,
tip, lu, ld,
tip, ld, rd,
ru, rd, lu,
ld, lu, rd
]
end
function faces(::Pyramid)
return (TriangleFace(triangle) for triangle in TupleView{3}(1:18))
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 17063 |
"""
HyperRectangle{N, T}
A `HyperRectangle` is a generalization of a rectangle into N-dimensions.
Formally it is the cartesian product of intervals, which is represented by the
`origin` and `widths` fields, whose indices correspond to each of the `N` axes.
"""
struct HyperRectangle{N,T} <: GeometryPrimitive{N,T}
origin::Vec{N,T}
widths::Vec{N,T}
end
##
# Constructors & typealiases
"""
const Rect{N,T} = HyperRectangle{N,T}
A rectangle in N dimensions, formally the cartesian product of intervals. See also [`HyperRectangle`](@ref). Its aliases are
| |`T`(eltype)|`Float64` |`Float32` |`Int` |
|--------|-----------|----------|----------|----------|
|`N`(dim)|`Rect{N,T}`|`Rectd{N}`|`Rectf{N}`|`Recti{N}`|
|`2` |`Rect2{T}` |`Rect2d` |`Rect2f` |`Rect2i` |
|`3` |`Rect3{T}` |`Rect3d` |`Rect3f` |`Rect3i` |
There is an additional unexported alias `RectT` that simply reverses the order of type parameters: `RectT{T,N} == Rect{N,T}`.
"""
Rect, Rect2, Rect3, RectT, Rectd, Rect2d, Rect3d, Rectf, Rect2f, Rect3f, Recti, Rect2i, Rect3i
const Rect{N,T} = HyperRectangle{N,T}
const Rect2{T} = Rect{2,T}
const Rect3{T} = Rect{3,T}
const RectT{T,N} = Rect{N,T}
const Rectd{N} = Rect{N,Float64}
const Rect2d = Rect2{Float64}
const Rect3d = Rect3{Float64}
const Rectf{N} = Rect{N,Float32}
const Rect2f = Rect2{Float32}
const Rect3f = Rect3{Float32}
const Recti{N} = Rect{N,Int}
const Rect2i = Rect2{Int}
const Rect3i = Rect3{Int}
Rect() = Rect{2,Float32}()
RectT{T}() where {T} = Rect{2,T}()
Rect{N}() where {N} = Rect{N,Float32}()
function Rect{N,T}() where {T,N}
# empty constructor such that update will always include the first point
return Rect{N,T}(Vec{N,T}(typemax(T)), Vec{N,T}(typemin(T)))
end
# conversion from other Rects
function Rect{N,T1}(a::Rect{N,T2}) where {N,T1,T2}
return Rect(Vec{N,T1}(minimum(a)), Vec{N,T1}(widths(a)))
end
function Rect(v1::Vec{N,T1}, v2::Vec{N,T2}) where {N,T1,T2}
T = promote_type(T1, T2)
return Rect{N,T}(Vec{N,T}(v1), Vec{N,T}(v2))
end
function RectT{T}(v1::VecTypes{N}, v2::VecTypes{N}) where {N,T}
return if T <: Integer
Rect{N,T}(round.(T, v1), round.(T, v2))
else
return Rect{N,T}(Vec{N,T}(v1), Vec{N,T}(v2))
end
end
function Rect{N}(v1::VecTypes{N}, v2::VecTypes{N}) where {N}
T = promote_type(eltype(v1), eltype(v2))
return Rect{N,T}(Vec{N,T}(v1), Vec{N,T}(v2))
end
"""
Rect(vals::Number...)
```
Rect(vals::Number...)
```
Rect constructor for individually specified intervals.
e.g. Rect(0,0,1,2) has origin == Vec(0,0) and
width == Vec(1,2)
"""
@generated function Rect(vals::Number...)
# Generated so we get goodish codegen on each signature
n = length(vals)
@assert iseven(n)
mid = div(n, 2)
v1 = Expr(:call, :Vec)
v2 = Expr(:call, :Vec)
# TODO this can be inbounds
append!(v1.args, [:(vals[$i]) for i in 1:mid])
append!(v2.args, [:(vals[$i]) for i in (mid + 1):length(vals)])
return Expr(:call, :Rect, v1, v2)
end
Rect3(a::Vararg{Number,6}) = Rect3(Vec{3}(a[1], a[2], a[3]), Vec{3}(a[4], a[5], a[6]))
Rect3(args::Vararg{Number,4}) = Rect3(Rect{2}(args...))
#=
From different args
=#
function (Rect)(args::Vararg{Number,4})
args_prom = promote(args...)
return Rect2{typeof(args_prom[1])}(args_prom...)
end
function (Rect2)(args::Vararg{Number,4})
args_prom = promote(args...)
return Rect2{typeof(args_prom[1])}(args_prom...)
end
function (Rect{2,T})(args::Vararg{Number,4}) where {T}
x, y, w, h = T <: Integer ? round.(T, args) : args
return Rect2{T}(Vec{2,T}(x, y), Vec{2,T}(w, h))
end
function RectT{T}(args::Vararg{Number,4}) where {T}
x, y, w, h = T <: Integer ? round.(T, args) : args
return Rect2{T}(Vec{2,T}(x, y), Vec{2,T}(w, h))
end
function Rect3f(x::Rect2{T}) where {T}
return Rect{3,T}(Vec{3,T}(minimum(x)..., 0), Vec{3,T}(widths(x)..., 0.0))
end
function Rect2{T}(a::Rect2) where {T}
return Rect2{T}(minimum(a), widths(a))
end
function RectT{T}(a::Rect2) where {T}
return Rect2{T}(minimum(a), widths(a))
end
function Rect{N,T}(a::GeometryPrimitive) where {N,T}
return Rect{N,T}(Vec{N,T}(minimum(a)), Vec{N,T}(widths(a)))
end
function Rect2(xy::VecTypes{2}, w::Number, h::Number)
return Rect2(xy..., w, h)
end
function Rect2(x::Number, y::Number, wh::VecTypes{2})
return Rect2(x, y, wh...)
end
function RectT{T}(xy::VecTypes{2}, w::Number, h::Number) where {T}
return Rect2{T}(xy..., w, h)
end
function RectT{T}(x::Number, y::Number, wh::VecTypes{2}) where {T}
return Rect2{T}(x, y, wh...)
end
# TODO These are kinda silly
function Rect2(xy::NamedTuple{(:x, :y)}, wh::NamedTuple{(:width, :height)})
return Rect2(xy.x, xy.y, wh.width, wh.height)
end
function Rect3f(x::Tuple{Tuple{<:Number,<:Number},Tuple{<:Number,<:Number}})
return Rect3f(Vec3f(x[1]..., 0), Vec3f(x[2]..., 0))
end
function Rect3f(x::Tuple{Tuple{<:Number,<:Number,<:Number},
Tuple{<:Number,<:Number,<:Number}})
return Rect3f(Vec3f(x[1]...), Vec3f(x[2]...))
end
# allow auto-conversion between different eltypes
Base.convert(::Type{Rect{N, T}}, r::Rect{N}) where {N, T} = Rect{N, T}(r)
origin(prim::Rect) = prim.origin
Base.maximum(prim::Rect) = origin(prim) + widths(prim)
Base.minimum(prim::Rect) = origin(prim)
Base.length(prim::Rect{N,T}) where {T,N} = N
widths(prim::Rect) = prim.widths
width(prim::Rect) = prim.widths[1]
height(prim::Rect) = prim.widths[2]
volume(prim::HyperRectangle) = prod(prim.widths)
area(prim::Rect2) = volume(prim)
"""
split(rectangle, axis, value)
Splits an Rect into two along an axis at a given location.
"""
split(b::Rect, axis, value::Integer) = _split(b, axis, value)
split(b::Rect, axis, value::Number) = _split(b, axis, value)
function _split(b::H, axis, value) where {H<:Rect}
bmin = minimum(b)
bmax = maximum(b)
b1max = setindex(bmax, value, axis)
b2min = setindex(bmin, value, axis)
return H(bmin, b1max - bmin), H(b2min, bmax - b2min)
end
###
# Algebraic operations
"""
*(m::Mat, h::Rect)
Transform a `Rect` using a matrix. Maintains axis-align properties
so a significantly larger Rect may be generated.
"""
function Base.:(*)(m::Mat{N1,N1,T1}, h::Rect{N2,T2}) where {N1,N2,T1,T2}
# TypeVar constants
T = promote_type(T1, T2)
D = N1 - N2
# get all points on the Rect
d = decompose(Point, h)
# make sure our points are sized for the tranform
pts = (Vec{N1,T}[vcat(pt, ones(Vec{D,T})) for pt in d]...,)::NTuple{2^N2,Vec{N1,T}}
vmin = Vec{N1,T}(typemax(T))
vmax = Vec{N1,T}(typemin(T))
# tranform all points, tracking min and max points
for pt in pts
pn = m * pt
vmin = min.(pn, vmin)
vmax = max.(pn, vmax)
end
return Rect{N2,T}(vmin, vmax - vmin)
end
# equal dimension case
function Base.:(*)(m::Mat{N,N,T1}, h::Rect{N,T2}) where {N,T1,T2}
# TypeVar constants
T = promote_type(T1, T2)
# get all points on the Rect
pts = decompose(Point, h)
# make sure our points are sized for the tranform
vmin = Vec{N,T}(typemax(T))
vmax = Vec{N,T}(typemin(T))
# tranform all points, tracking min and max points
for pt in pts
pn = m * Vec(pt)
vmin = min.(pn, vmin)
vmax = max.(pn, vmax)
end
return Rect{N,T}(vmin, vmax - vmin)
end
# fast path. TODO make other versions fast without code duplications like now
function Base.:(*)(m::Mat{4,4,T}, h::Rect{3,T}) where {T}
# equal dimension case
# get all points on the Rect
pts = (Vec{4,T}(0.0, 0.0, 0.0, 1.0), Vec{4,T}(1.0, 0.0, 0.0, 1.0),
Vec{4,T}(0.0, 1.0, 0.0, 1.0), Vec{4,T}(1.0, 1.0, 0.0, 1.0),
Vec{4,T}(0.0, 0.0, 1.0, 1.0), Vec{4,T}(1.0, 0.0, 1.0, 1.0),
Vec{4,T}(0.0, 1.0, 1.0, 1.0), Vec{4,T}(1.0, 1.0, 1.0, 1.0))
# make sure our points are sized for the tranform
vmin = Vec{4,T}(typemax(T))
vmax = Vec{4,T}(typemin(T))
o, w = origin(h), widths(h)
_o = Vec{4,T}(o[1], o[2], o[3], T(0))
_w = Vec{4,T}(w[1], w[2], w[3], T(1))
# tranform all points, tracking min and max points
for pt in pts
pn = m * (_o + (pt .* _w))
vmin = min.(pn, vmin)
vmax = max.(pn, vmax)
end
_vmin = Vec{3,T}(vmin[1], vmin[2], vmin[3])
_vmax = Vec{3,T}(vmax[1], vmax[2], vmax[3])
return Rect{3,T}(_vmin, _vmax - _vmin)
end
Base.:(-)(h::Rect{N,T}, move::Number) where {N,T} = h - Vec{N,T}(move)
Base.:(+)(h::Rect{N,T}, move::Number) where {N,T} = h + Vec{N,T}(move)
function Base.:(-)(h::Rect{N,T}, move::StaticVector{N}) where {N,T}
return Rect{N,T}(minimum(h) .- move, widths(h))
end
function Base.:(+)(h::Rect{N,T}, move::StaticVector{N}) where {N,T}
return Rect{N,T}(minimum(h) .+ move, widths(h))
end
function Base.:(*)(rect::Rect, scaling::Union{Number,StaticVector})
return Rect(minimum(rect) .* scaling, widths(rect) .* scaling)
end
# Enables rectangular indexing into a matrix
function Base.to_indices(A::AbstractMatrix{T}, I::Tuple{Rect2{IT}}) where {T,IT<:Integer}
rect = I[1]
mini = minimum(rect)
wh = widths(rect)
return ((mini[1] + 1):(mini[1] + wh[1]), (mini[2] + 1):(mini[2] + wh[2]))
end
function minmax(p::StaticVector, vmin, vmax)
any(isnan, p) && return (vmin, vmax)
return min.(p, vmin), max.(p, vmax)
end
# Annoying special case for view(Vector{Point}, Vector{Face})
function minmax(tup::Tuple, vmin, vmax)
for p in tup
any(isnan, p) && continue
vmin = min.(p, vmin)
vmax = max.(p, vmax)
end
return vmin, vmax
end
function positive_widths(rect::Rect{N,T}) where {N,T}
mini, maxi = minimum(rect), maximum(rect)
realmin = min.(mini, maxi)
realmax = max.(mini, maxi)
return Rect{N,T}(realmin, realmax .- realmin)
end
###
# set operations
"""
isempty(h::Rect)
Return `true` if any of the widths of `h` are negative.
"""
Base.isempty(h::Rect{N,T}) where {N,T} = any(<(zero(T)), h.widths)
"""
Perform a union between two Rects.
"""
function Base.union(h1::Rect{N}, h2::Rect{N}) where {N}
m = min.(minimum(h1), minimum(h2))
mm = max.(maximum(h1), maximum(h2))
return Rect{N}(m, mm - m)
end
"""
diff(h1::Rect, h2::Rect)
Perform a difference between two Rects.
"""
diff(h1::Rect, h2::Rect) = h1
"""
intersect(h1::Rect, h2::Rect)
Perform a intersection between two Rects.
"""
function intersect(h1::Rect{N}, h2::Rect{N}) where {N}
m = max.(minimum(h1), minimum(h2))
mm = min.(maximum(h1), maximum(h2))
return Rect{N}(m, mm - m)
end
function update(b::Rect{N,T}, v::Vec{N,T2}) where {N,T,T2}
return update(b, Vec{N,T}(v))
end
function update(b::Rect{N,T}, v::Vec{N,T}) where {N,T}
m = min.(minimum(b), v)
maxi = maximum(b)
mm = if any(isnan, maxi)
v - m
else
max.(v, maxi) - m
end
return Rect{N,T}(m, mm)
end
# Min maximum distance functions between hrectangle and point for a given dimension
function min_dist_dim(rect::Rect{N,T}, p::Vec{N,T}, dim::Int) where {N,T}
return max(zero(T), max(minimum(rect)[dim] - p[dim], p[dim] - maximum(rect)[dim]))
end
function max_dist_dim(rect::Rect{N,T}, p::Vec{N,T}, dim::Int) where {N,T}
return max(maximum(rect)[dim] - p[dim], p[dim] - minimum(rect)[dim])
end
function min_dist_dim(rect1::Rect{N,T}, rect2::Rect{N,T}, dim::Int) where {N,T}
return max(zero(T),
max(minimum(rect1)[dim] - maximum(rect2)[dim],
minimum(rect2)[dim] - maximum(rect1)[dim]))
end
function max_dist_dim(rect1::Rect{N,T}, rect2::Rect{N,T}, dim::Int) where {N,T}
return max(maximum(rect1)[dim] - minimum(rect2)[dim],
maximum(rect2)[dim] - minimum(rect1)[dim])
end
# Total minimum maximum distance functions
function min_euclideansq(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
minimum_dist = T(0.0)
for dim in 1:length(p)
d = min_dist_dim(rect, p, dim)
minimum_dist += d * d
end
return minimum_dist
end
function max_euclideansq(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
maximum_dist = T(0.0)
for dim in 1:length(p)
d = max_dist_dim(rect, p, dim)
maximum_dist += d * d
end
return maximum_dist
end
function min_euclidean(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
return sqrt(min_euclideansq(rect, p))
end
function max_euclidean(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
return sqrt(max_euclideansq(rect, p))
end
# Functions that return both minimum and maximum for convenience
function minmax_dist_dim(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}},
dim::Int) where {N,T}
minimum_d = min_dist_dim(rect, p, dim)
maximum_d = max_dist_dim(rect, p, dim)
return minimum_d, maximum_d
end
function minmax_euclideansq(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
minimum_dist = min_euclideansq(rect, p)
maximum_dist = max_euclideansq(rect, p)
return minimum_dist, maximum_dist
end
function minmax_euclidean(rect::Rect{N,T}, p::Union{Vec{N,T},Rect{N,T}}) where {N,T}
minimumsq, maximumsq = minmax_euclideansq(rect, p)
return sqrt(minimumsq), sqrt(maximumsq)
end
# http://en.wikipedia.org/wiki/Allen%27s_interval_algebra
function before(b1::Rect{N}, b2::Rect{N}) where {N}
for i in 1:N
maximum(b1)[i] < minimum(b2)[i] || return false
end
return true
end
meets(b1::Rect{N}, b2::Rect{N}) where {N} = maximum(b1) == minimum(b2)
function overlaps(b1::Rect{N}, b2::Rect{N}) where {N}
for i in 1:N
maximum(b2)[i] > maximum(b1)[i] > minimum(b2)[i] &&
minimum(b1)[i] < minimum(b2)[i] || return false
end
return true
end
function starts(b1::Rect{N}, b2::Rect{N}) where {N}
return if minimum(b1) == minimum(b2)
for i in 1:N
maximum(b1)[i] < maximum(b2)[i] || return false
end
return true
else
return false
end
end
function during(b1::Rect{N}, b2::Rect{N}) where {N}
for i in 1:N
maximum(b1)[i] < maximum(b2)[i] && minimum(b1)[i] > minimum(b2)[i] || return false
end
return true
end
function finishes(b1::Rect{N}, b2::Rect{N}) where {N}
return if maximum(b1) == maximum(b2)
for i in 1:N
minimum(b1)[i] > minimum(b2)[i] || return false
end
return true
else
return false
end
end
###
# Containment
"""
in(b1::Rect, b2::Rect)
Check if Rect `b1` is contained in `b2`. This does not use
strict inequality, so Rects may share faces and this will still
return true.
"""
function Base.in(b1::Rect{N}, b2::Rect{N}) where {N}
for i in 1:N
maximum(b1)[i] <= maximum(b2)[i] && minimum(b1)[i] >= minimum(b2)[i] || return false
end
return true
end
"""
in(pt::VecTypes, b1::Rect{N, T})
Check if a point is contained in a Rect. This will return true if
the point is on a face of the Rect.
"""
function Base.in(pt::VecTypes, b1::Rect{N,T}) where {T,N}
for i in 1:N
pt[i] <= maximum(b1)[i] && pt[i] >= minimum(b1)[i] || return false
end
return true
end
#
# Equality
#
Base.:(==)(b1::Rect, b2::Rect) = minimum(b1) == minimum(b2) && widths(b1) == widths(b2)
Base.isequal(b1::Rect, b2::Rect) = b1 == b2
centered(R::Type{Rect{N,T}}) where {N,T} = R(Vec{N,T}(-0.5), Vec{N,T}(1))
centered(R::Type{Rect{N}}) where {N} = R(Vec{N,Float32}(-0.5), Vec{N,Float32}(1))
centered(R::Type{Rect}) = R(Vec{2,Float32}(-0.5), Vec{2,Float32}(1))
##
# Rect2 decomposition
function faces(rect::Rect2, nvertices=(2, 2))
w, h = nvertices
idx = LinearIndices(nvertices)
quad(i, j) = QuadFace{Int}(idx[i, j], idx[i + 1, j], idx[i + 1, j + 1], idx[i, j + 1])
return ivec((quad(i, j) for i in 1:(w - 1), j in 1:(h - 1)))
end
function coordinates(rect::Rect2, nvertices=(2, 2))
mini, maxi = extrema(rect)
xrange, yrange = LinRange.(mini, maxi, nvertices)
return ivec(((x, y) for x in xrange, y in yrange))
end
function texturecoordinates(rect::Rect2, nvertices=(2, 2))
xrange, yrange = LinRange.((0, 1), (1, 0), nvertices)
return ivec(((x, y) for x in xrange, y in yrange))
end
function normals(rect::Rect2, nvertices=(2, 2))
return Iterators.repeated((0, 0, 1), prod(nvertices))
end
##
# Rect3 decomposition
function coordinates(rect::Rect3)
# TODO use n
w = widths(rect)
o = origin(rect)
points = Point{3,Int}[(0, 0, 0), (0, 0, 1), (0, 1, 1), (0, 1, 0), (0, 0, 0), (1, 0, 0),
(1, 0, 1), (0, 0, 1), (0, 0, 0), (0, 1, 0), (1, 1, 0), (1, 0, 0),
(1, 1, 1), (0, 1, 1), (0, 0, 1), (1, 0, 1), (1, 1, 1), (1, 0, 1),
(1, 0, 0), (1, 1, 0), (1, 1, 1), (1, 1, 0), (0, 1, 0), (0, 1, 1)]
return ((x .* w .+ o) for x in points)
end
function texturecoordinates(rect::Rect3)
return coordinates(Rect3(0, 0, 0, 1, 1, 1))
end
function faces(rect::Rect3)
return QuadFace{Int}[(1, 2, 3, 4), (5, 6, 7, 8), (9, 10, 11, 12), (13, 14, 15, 16),
(17, 18, 19, 20), (21, 22, 23, 24),]
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 2156 | """
HyperSphere{N, T}
A `HyperSphere` is a generalization of a sphere into N-dimensions.
A `center` and radius, `r`, must be specified.
"""
struct HyperSphere{N,T} <: GeometryPrimitive{N,T}
center::Point{N,T}
r::T
end
"""
Circle{T}
An alias for a HyperSphere of dimension 2. (i.e. `HyperSphere{2, T}`)
"""
const Circle{T} = HyperSphere{2,T}
"""
Sphere{T}
An alias for a HyperSphere of dimension 3. (i.e. `HyperSphere{3, T}`)
"""
const Sphere{T} = HyperSphere{3,T}
HyperSphere{N}(p::Point{N,T}, number) where {N,T} = HyperSphere{N,T}(p, convert(T, number))
widths(c::HyperSphere{N,T}) where {N,T} = Vec{N,T}(radius(c) * 2)
radius(c::HyperSphere) = c.r
origin(c::HyperSphere) = c.center
Base.minimum(c::HyperSphere{N,T}) where {N,T} = Vec{N,T}(origin(c)) - Vec{N,T}(radius(c))
Base.maximum(c::HyperSphere{N,T}) where {N,T} = Vec{N,T}(origin(c)) + Vec{N,T}(radius(c))
function Base.in(x::AbstractPoint, c::HyperSphere)
return norm(origin(c) - x) β€ radius(c)
end
centered(S::Type{HyperSphere{N,T}}) where {N,T} = S(Vec{N,T}(0), T(0.5))
function centered(::Type{T}) where {T<:HyperSphere}
return centered(HyperSphere{ndims_or(T, 3),eltype_or(T, Float32)})
end
function coordinates(s::Circle, nvertices=64)
rad = radius(s)
inner(fi) = Point(rad * sin(fi + pi), rad * cos(fi + pi)) .+ origin(s)
return (inner(fi) for fi in LinRange(0, 2pi, nvertices))
end
function texturecoordinates(s::Circle, nvertices=64)
return coordinates(Circle(Point2f(0.5), 0.5f0), nvertices)
end
function coordinates(s::Sphere, nvertices=24)
ΞΈ = LinRange(0, pi, nvertices)
Ο = LinRange(0, 2pi, nvertices)
inner(ΞΈ, Ο) = Point(cos(Ο) * sin(ΞΈ), sin(Ο) * sin(ΞΈ), cos(ΞΈ)) .* s.r .+ s.center
return ivec((inner(ΞΈ, Ο) for ΞΈ in ΞΈ, Ο in Ο))
end
function texturecoordinates(s::Sphere, nvertices=24)
ux = LinRange(0, 1, nvertices)
return ivec(((Ο, ΞΈ) for ΞΈ in reverse(ux), Ο in ux))
end
function faces(sphere::Sphere, nvertices=24)
return faces(Rect(0, 0, 1, 1), (nvertices, nvertices))
end
function normals(s::Sphere{T}, nvertices=24) where {T}
return coordinates(Sphere(Point{3,T}(0), 1), nvertices)
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 948 | using Test
@testset "conversion" begin
@test convert(Point, (2, 3)) === Point(2, 3)
@test convert(Point, (2.0, 3)) === Point(2.0, 3.0)
end
@testset "broadcast" begin
@testset for T in (Vec, Point)
x = [T(2, 3), T(7, 3)]
@test [T(4, 9), T(14, 9)] == x .* T(2, 3)
@test [T(4, 6), T(9, 6)] == x .+ T(2, 3)
@test [T(0, 0), T(5, 0)] == x .- T(2, 3)
end
end
@testset "finite, nan, inf tests" begin
for T in (Vec, Point)
@testset "$T" begin
nan_point = T(Float64.((1.0, 2.0, 3.0, NaN)))
inf_point = T(Float64.((1.0, 2.0, Inf, 4.0)))
@test isinf(inf_point)
@test !isinf(nan_point)
@test isnan(nan_point)
@test !isnan(inf_point)
@test !isfinite(nan_point)
@test !isfinite(inf_point)
@test !isfinite(nan_point + inf_point)
@test isfinite(T(1, 2, 3))
end
end
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 6021 | @testset "Basic types" begin
point = Point(2, 3)
@test geomtrait(point) isa PointTrait
@test testgeometry(point)
@test ncoord(point) == 2
@test getcoord(point, 2) == 3
@test GeoInterface.coordinates(point) == [2, 3]
line = Line(Point(2, 3), Point(4, 5))
@test geomtrait(line) isa LineTrait
@test testgeometry(line)
@test ngeom(line) == 2
@test getgeom(line, 2) == Point(4, 5)
@test GeoInterface.coordinates(line) == [[2, 3], [4, 5]]
mp = MultiPoint([point, point])
@test geomtrait(mp) isa MultiPointTrait
@test testgeometry(mp)
@test ngeom(mp) == 2
@test getgeom(mp, 2) == point
@test GeoInterface.coordinates(mp) == [[2, 3], [2, 3]]
linestring = LineString(Point{2,Int}[(10, 10), (20, 20), (10, 40)])
@test geomtrait(linestring) isa LineStringTrait
@test testgeometry(linestring)
@test ngeom(linestring) == 3
@test ncoord(linestring) == 2
@test getgeom(linestring, 1) == Point(10, 10)
@test getgeom(linestring, 2) == Point(20, 20)
@test getgeom(linestring, 3) == Point(10, 40)
@test GeoInterface.coordinates(linestring) == [[10, 10], [20, 20], [10, 40]]
multilinestring = MultiLineString([linestring, linestring])
@test geomtrait(multilinestring) isa MultiLineStringTrait
@test testgeometry(multilinestring)
@test GeoInterface.coordinates(multilinestring) ==
[[[10, 10], [20, 20], [10, 40]], [[10, 10], [20, 20], [10, 40]]]
@test ncoord(multilinestring) == 2
poly = Polygon(rand(Point{2,Float32}, 5), [rand(Point{2,Float32}, 5)])
@test geomtrait(poly) isa PolygonTrait
@test testgeometry(poly)
@test length(GeoInterface.coordinates(poly)) == 2
@test length(GeoInterface.coordinates(poly)[1]) == 5
triangle = Triangle(point, point, point)
@test geomtrait(triangle) isa PolygonTrait # ?? should it be a Triangle trait
@test testgeometry(triangle)
@test length(GeoInterface.coordinates(triangle)) == 1
@test length(GeoInterface.coordinates(triangle)[1]) == 3
polys = MultiPolygon([poly, poly])
@test geomtrait(polys) isa MultiPolygonTrait
@test testgeometry(polys)
@test length(GeoInterface.coordinates(polys)) == 2
@test length(GeoInterface.coordinates(polys)[1]) == 2
@test length(GeoInterface.coordinates(polys)[1][1]) == 5
end
@testset "Mesh" begin
mesh = triangle_mesh(Sphere(Point3f(0), 1))
@test testgeometry(mesh)
end
@testset "Convert" begin
# convert GeoJSON geometry types to GeometryBasics via the GeoInterface
point_str = """{"type":"Point","coordinates":[30.1,10.1]}"""
point_3d_str = """{"type":"Point","coordinates":[30.1,10.1,5.1]}"""
linestring_str = """{"type":"LineString","coordinates":[[30.1,10.1],[10.1,30.1],[40.1,40.1]]}"""
polygon_str = """{"type":"Polygon","coordinates":[[[30.1,10.1],[40.1,40.1],[20.1,40.1],[10.1,20.1],[30.1,10.1]]]}"""
polygon_hole_str = """{"type":"Polygon","coordinates":[[[35.1,10.1],[45.1,45.1],[15.1,40.1],[10.1,20.1],[35.1,10.1]],[[20.1,30.1],[35.1,35.1],[30.1,20.1],[20.1,30.1]]]}"""
multipoint_str = """{"type":"MultiPoint","coordinates":[[10.1,40.1],[40.1,30.1],[20.1,20.1],[30.1,10.1]]}"""
multilinestring_str = """{"type":"MultiLineString","coordinates":[[[10.1,10.1],[20.1,20.1],[10.1,40.1]],[[40.1,40.1],[30.1,30.1],[40.1,20.1],[30.1,10.1]]]}"""
multipolygon_str = """{"type":"MultiPolygon","coordinates":[[[[30.1,20.1],[45.1,40.1],[10.1,40.1],[30.1,20.1]]],[[[15.1,5.1],[40.1,10.1],[10.1,20.1],[5.1,10.1],[15.1,5.1]]]]}"""
multipolygon_hole_str = """{"type":"MultiPolygon","coordinates":[[[[40.1,40.1],[20.1,45.1],[45.1,30.1],[40.1,40.1]]],[[[20.1,35.1],[10.1,30.1],[10.1,10.1],[30.1,5.1],[45.1,20.1],[20.1,35.1]],[[30.1,20.1],[20.1,15.1],[20.1,25.1],[30.1,20.1]]]]}"""
point_json = GeoJSON.read(point_str)
point_3d_json = GeoJSON.read(point_3d_str)
linestring_json = GeoJSON.read(linestring_str)
polygon_json = GeoJSON.read(polygon_str)
polygon_hole_json = GeoJSON.read(polygon_hole_str)
multipoint_json = GeoJSON.read(multipoint_str)
multilinestring_json = GeoJSON.read(multilinestring_str)
multipolygon_json = GeoJSON.read(multipolygon_str)
multipolygon_hole_json = GeoJSON.read(multipolygon_hole_str)
point_gb = GeoInterface.convert(GeometryBasics, point_json)
point_3d_gb = GeoInterface.convert(GeometryBasics, point_3d_json)
linestring_gb = GeoInterface.convert(GeometryBasics, linestring_json)
polygon_gb = GeoInterface.convert(GeometryBasics, polygon_json)
polygon_hole_gb = GeoInterface.convert(GeometryBasics, polygon_hole_json)
multipoint_gb = GeoInterface.convert(GeometryBasics, multipoint_json)
multilinestring_gb = GeoInterface.convert(GeometryBasics, multilinestring_json)
multipolygon_gb = GeoInterface.convert(GeometryBasics, multipolygon_json)
multipolygon_hole_gb = GeoInterface.convert(GeometryBasics, multipolygon_hole_json)
@test point_gb === Point{2,Float32}(30.1, 10.1)
@test point_3d_gb === Point{3,Float32}(30.1, 10.1, 5.1)
@test linestring_gb isa LineString
@test length(linestring_gb) == 2
@test eltype(linestring_gb) == Line{2,Float32}
@test polygon_gb isa Polygon
@test isempty(polygon_gb.interiors)
@test polygon_hole_gb isa Polygon
@test length(polygon_hole_gb.interiors) == 1
@test multipoint_gb isa MultiPoint
@test length(multipoint_gb) == 4
@test multipoint_gb[4] === Point{2,Float32}(30.1, 10.1)
@test multilinestring_gb isa MultiLineString
@test length(multilinestring_gb) == 2
@test multipolygon_gb isa MultiPolygon
@test length(multipolygon_gb) == 2
@test multipolygon_hole_gb isa MultiPolygon
@test length(multipolygon_hole_gb) == 2
@test length(multipolygon_hole_gb[1].interiors) == 0
@test length(multipolygon_hole_gb[2].interiors) == 1
end
@testset "Extent" begin
rect = Rect2f(Vec2f(0), Vec2f(1.0))
ext = extent(rect)
@test ext.X == (0.0f0, 1.0f0)
@test ext.Y == (0.0f0, 1.0f0)
end | GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 11951 | using Test, GeometryBasics
@testset "Cylinder" begin
@testset "constructors" begin
o, extr, r = Point2f(1, 2), Point2f(3, 4), 5.0f0
s = Cylinder(o, extr, r)
@test typeof(s) == Cylinder{2,Float32}
@test typeof(s) == Cylinder2{Float32}
@test origin(s) == o
@test extremity(s) == extr
@test radius(s) == r
#@test abs(height(s)- norm([1,2]-[3,4]))<1e-5
h = norm(o - extr)
@test isapprox(height(s), h)
#@test norm(direction(s) - Point{2,Float32}([2,2]./norm([1,2]-[3,4])))<1e-5
@test isapprox(direction(s), Point2f(2, 2) ./ h)
v1 = rand(Point{3,Float64})
v2 = rand(Point{3,Float64})
R = rand()
s = Cylinder(v1, v2, R)
@test typeof(s) == Cylinder{3,Float64}
@test typeof(s) == Cylinder3{Float64}
@test origin(s) == v1
@test extremity(s) == v2
@test radius(s) == R
@test height(s) == norm(v2 - v1)
#@test norm(direction(s) - Point{3,Float64}((v2-v1)./norm(v2-v1)))<1e-10
@test isapprox(direction(s), (v2 - v1) ./ norm(v2 .- v1))
end
@testset "decompose" begin
m = GeometryBasics.normal_mesh(Sphere(Point3f(0), 1f0))
@test decompose_uv(m) isa Vector{Vec2f}
o, extr, r = Point2f(1, 2), Point2f(3, 4), 5.0f0
s = Cylinder(o, extr, r)
positions = Point{3,Float32}[(-0.7677671, 3.767767, 0.0),
(2.767767, 0.23223293, 0.0),
(0.23223293, 4.767767, 0.0),
(3.767767, 1.2322329, 0.0), (1.2322329, 5.767767, 0.0),
(4.767767, 2.232233, 0.0)]
@test decompose(Point3f, Tesselation(s, (2, 3))) β positions
FT = TriangleFace{Int}
faces = FT[(1, 2, 4), (1, 4, 3), (3, 4, 6), (3, 6, 5)]
@test faces == decompose(FT, Tesselation(s, (2, 3)))
v1 = Point{3,Float64}(1, 2, 3)
v2 = Point{3,Float64}(4, 5, 6)
R = 5.0
s = Cylinder(v1, v2, R)
positions = Point{3,Float64}[(4.535533905932738, -1.5355339059327373, 3.0),
(7.535533905932738, 1.4644660940672627, 6.0),
(3.0412414523193148, 4.041241452319315,
-1.0824829046386295),
(6.041241452319315, 7.041241452319315,
1.9175170953613705),
(-2.535533905932737, 5.535533905932738,
2.9999999999999996),
(0.46446609406726314, 8.535533905932738, 6.0),
(-1.0412414523193152, -0.04124145231931431,
7.0824829046386295),
(1.9587585476806848, 2.9587585476806857,
10.08248290463863), (1, 2, 3), (4, 5, 6)]
@test decompose(Point3{Float64}, Tesselation(s, 8)) β positions
faces = TriangleFace{Int}[(3, 2, 1), (4, 2, 3), (5, 4, 3), (6, 4, 5), (7, 6, 5),
(8, 6, 7), (1, 8, 7), (2, 8, 1), (3, 1, 9), (2, 4, 10),
(5, 3, 9), (4, 6, 10), (7, 5, 9), (6, 8, 10), (1, 7, 9),
(8, 2, 10)]
@test faces == decompose(TriangleFace{Int}, Tesselation(s, 8))
m = triangle_mesh(Tesselation(s, 8))
@test m === triangle_mesh(m)
@test GeometryBasics.faces(m) == faces
@test GeometryBasics.coordinates(m) β positions
m = normal_mesh(s)# just test that it works without explicit resolution parameter
@test m isa GLNormalMesh
muv = uv_mesh(s)
@test Rect(Point.(texturecoordinates(muv))) == Rect2f(Vec2f(0), Vec2f(1.0))
end
end
@testset "HyperRectangles" begin
a = Rect(Vec(0, 0), Vec(1, 1))
pt_expa = Point{2,Int}[(0, 0), (1, 0), (0, 1), (1, 1)]
@test decompose(Point{2,Int}, a) == pt_expa
mesh = normal_mesh(a)
@test decompose(Point2f, mesh) == pt_expa
b = Rect(Vec(1, 1, 1), Vec(1, 1, 1))
pt_expb = Point{3,Int64}[[1, 1, 1], [1, 1, 2], [1, 2, 2], [1, 2, 1], [1, 1, 1],
[2, 1, 1], [2, 1, 2], [1, 1, 2], [1, 1, 1], [1, 2, 1],
[2, 2, 1], [2, 1, 1], [2, 2, 2], [1, 2, 2], [1, 1, 2],
[2, 1, 2], [2, 2, 2], [2, 1, 2], [2, 1, 1], [2, 2, 1],
[2, 2, 2], [2, 2, 1], [1, 2, 1], [1, 2, 2]]
@test decompose(Point{3,Int}, b) == pt_expb
mesh = normal_mesh(b)
@test isempty(Rect{3,Float32}())
end
NFace = NgonFace
@testset "Faces" begin
@test convert_simplex(GLTriangleFace, QuadFace{Int}(1, 2, 3, 4)) ==
(GLTriangleFace(1, 2, 3), GLTriangleFace(1, 3, 4))
@test convert_simplex(NFace{3,ZeroIndex{Int}}, QuadFace{ZeroIndex{Int}}(1, 2, 3, 4)) ==
(NFace{3,ZeroIndex{Int}}(1, 2, 3), NFace{3,ZeroIndex{Int}}(1, 3, 4))
@test convert_simplex(NFace{3,OffsetInteger{3,Int}},
NFace{4,OffsetInteger{2,Int}}(1, 2, 3, 4)) ==
(NFace{3,OffsetInteger{3,Int}}(1, 2, 3), NFace{3,OffsetInteger{3,Int}}(1, 3, 4))
@test convert_simplex(LineFace{Int}, QuadFace{Int}(1, 2, 3, 4)) ==
(LineFace{Int}(1, 2), LineFace{Int}(2, 3), LineFace{Int}(3, 4),
LineFace{Int}(4, 1))
@testset "NgonFace ambiguity" begin
face = NgonFace((1, 2))
@test convert_simplex(NgonFace{2,UInt32}, face) === (NgonFace{2,UInt32}((1, 2)),)
@test convert_simplex(typeof(face), face) === (face,)
face = NgonFace((1,))
@test convert_simplex(NgonFace{1,UInt32}, face) === (NgonFace{1,UInt32}((1,)),)
@test convert_simplex(typeof(face), face) === (face,)
end
end
@testset "Normals" begin
n64 = Vec{3,Float64}[(0.0, 0.0, -1.0), (0.0, 0.0, -1.0), (0.0, 0.0, -1.0),
(0.0, 0.0, -1.0), (0.0, 0.0, 1.0), (0.0, 0.0, 1.0),
(0.0, 0.0, 1.0), (0.0, 0.0, 1.0), (-1.0, 0.0, 0.0),
(-1.0, 0.0, 0.0), (-1.0, 0.0, 0.0), (-1.0, 0.0, 0.0),
(1.0, 0.0, 0.0), (1.0, 0.0, 0.0), (1.0, 0.0, 0.0), (1.0, 0.0, 0.0),
(0.0, 1.0, 0.0), (0.0, 1.0, 0.0), (0.0, 1.0, 0.0), (0.0, 1.0, 0.0),
(0.0, -1.0, 0.0), (0.0, -1.0, 0.0), (0.0, -1.0, 0.0),
(0.0, -1.0, 0.0),]
n32 = map(Vec{3,Float32}, n64)
r = triangle_mesh(centered(Rect3))
# @test normals(coordinates(r), GeometryBasics.faces(r)) == n32
# @test normals(coordinates(r), GeometryBasics.faces(r)) == n64
end
@testset "HyperSphere" begin
sphere = Sphere{Float32}(Point3f(0), 1.0f0)
points = decompose(Point, Tesselation(sphere, 3))
point_target = Point{3,Float32}[[0.0, 0.0, 1.0], [1.0, 0.0, 6.12323e-17],
[1.22465e-16, 0.0, -1.0], [-0.0, 0.0, 1.0],
[-1.0, 1.22465e-16, 6.12323e-17],
[-1.22465e-16, 1.49976e-32, -1.0], [0.0, -0.0, 1.0],
[1.0, -2.44929e-16, 6.12323e-17],
[1.22465e-16, -2.99952e-32, -1.0]]
@test points β point_target
f = decompose(TriangleFace{Int}, Tesselation(sphere, 3))
face_target = TriangleFace{Int}[[1, 2, 5], [1, 5, 4], [2, 3, 6], [2, 6, 5], [4, 5, 8],
[4, 8, 7], [5, 6, 9], [5, 9, 8]]
@test f == face_target
circle = Circle(Point2f(0), 1.0f0)
points = decompose(Point2f, Tesselation(circle, 20))
@test length(points) == 20
tess_circle = Tesselation(circle, 32)
mesh = triangle_mesh(tess_circle)
@test decompose(Point2f, mesh) β decompose(Point2f, tess_circle)
end
@testset "Rectangles" begin
rect = Rect2f(0, 7, 20, 3)
@test (rect + 4) == Rect2f(4, 11, 20, 3)
@test (rect + Vec(2, -2)) == Rect2f(2, 5, 20, 3)
@test (rect - 4) == Rect2f(-4, 3, 20, 3)
@test (rect - Vec(2, -2)) == Rect2f(-2, 9, 20, 3)
base = Vec3f(1, 2, 3)
wxyz = Vec3f(-2, 4, 2)
rect = Rect3f(base, wxyz)
@test (rect + 4) == Rect3f(base .+ 4, wxyz)
@test (rect + Vec(2, -2, 3)) == Rect3f(base .+ Vec(2, -2, 3), wxyz)
@test (rect - 4) == Rect3f(base .- 4, wxyz)
@test (rect - Vec(2, -2, 7)) == Rect3f(base .- Vec(2, -2, 7), wxyz)
rect = Rect2f(0, 7, 20, 3)
@test (rect * 4) == Rect2f(0, 7 * 4, 20 * 4, 3 * 4)
@test (rect * Vec(2, -2)) == Rect2f(0, -7 * 2, 20 * 2, -3 * 2)
base = Vec3f(1, 2, 3)
wxyz = Vec3f(-2, 4, 2)
rect = Rect3f(base, wxyz)
@test (rect * 4) == Rect3f(base .* 4, wxyz .* 4)
@test (rect * Vec(2, -2, 3)) == Rect3f(base .* Vec(2, -2, 3), wxyz .* Vec(2, -2, 3))
rect1 = Rect(Vec(0.0, 0.0), Vec(1.0, 2.0))
rect2 = Rect(0.0, 0.0, 1.0, 2.0)
@test rect1 isa GeometryBasics.HyperRectangle{2,Float64}
@test rect1 == rect2
split1, split2 = GeometryBasics.split(rect1, 2, 1)
@test widths(split1) == widths(split2)
@test origin(split1) == Vec(0, 0)
@test origin(split2) == Vec(0, 1)
@test in(split1, rect1)
@test !in(rect1, split1)
prim = Rect(0.0, 0.0, 1.0, 1.0)
@test length(prim) == 2
@test width(prim) == 1.0
@test height(prim) == 1.0
b1 = Rect2(0.0, 0.0, 2.0, 2.0)
b2 = Rect2(0, 0, 2, 2)
@test isequal(b1, b2)
pt = Point(1.0, 1.0)
b1 = Rect(0.0, 0.0, 1.0, 1.0)
@test in(pt, b1)
rect = Rect(0.0, 0.0, 1.0, 1.0)
@test GeometryBasics.positive_widths(rect) isa GeometryBasics.HyperRectangle{2,Float64}
h1 = Rect(0.0, 0.0, 1.0, 1.0)
h2 = Rect(1.0, 1.0, 2.0, 2.0)
@test union(h1, h2) isa GeometryBasics.HyperRectangle{2,Float64}
@test GeometryBasics.diff(h1, h2) == h1
@test GeometryBasics.intersect(h1, h2) isa GeometryBasics.HyperRectangle{2,Float64}
b = Rect(0.0, 0.0, 1.0, 1.0)
v = Vec(1, 2)
@test update(b, v) isa GeometryBasics.HyperRectangle{2,Float64}
v = Vec(1.0, 2.0)
@test update(b, v) isa GeometryBasics.HyperRectangle{2,Float64}
p = Vec(5.0, 4.0)
rect = Rect(0.0, 0.0, 1.0, 1.0)
@test min_dist_dim(rect, p, 1) == 4.0
@test min_dist_dim(rect, p, 2) == 3.0
@test max_dist_dim(rect, p, 1) == 5.0
@test max_dist_dim(rect, p, 2) == 4.0
rect1 = Rect(0.0, 0.0, 1.0, 1.0)
rect2 = Rect(3.0, 1.0, 4.0, 2.0)
@test min_dist_dim(rect1, rect2, 1) == 2.0
@test min_dist_dim(rect1, rect2, 2) == 0.0
@test max_dist_dim(rect1, rect2, 1) == 7.0
@test max_dist_dim(rect1, rect2, 2) == 3.0
@test !before(rect1, rect2)
rect1 = Rect(0.0, 0.0, 1.0, 1.0)
rect2 = Rect(3.0, 2.0, 4.0, 2.0)
@test before(rect1, rect2)
@test !meets(rect1, rect2)
rect2 = Rect(1.0, 1.0, 4.0, 2.0)
@test meets(rect1, rect2)
rect1 = Rect(1.0, 1.0, 2.0, 2.0)
rect2 = Rect(0.0, 0.0, 2.0, 1.0)
@test !overlaps(rect1, rect2)
rect1 = Rect(1.0, 1.0, 2.0, 2.0)
rect2 = Rect(1.5, 1.5, 2.0, 2.0)
@test overlaps(rect1, rect2)
rect1 = Rect(1.0, 1.0, 2.0, 2.0)
rect2 = Rect(0.0, 0.0, 2.0, 1.0)
@test !GeometryBasics.starts(rect1, rect2)
rect2 = Rect(1.0, 1.0, 1.5, 1.5)
@test !GeometryBasics.starts(rect1, rect2)
rect2 = Rect(1.0, 1.0, 3.0, 3.0)
@test GeometryBasics.starts(rect1, rect2)
rect1 = Rect(1.0, 1.0, 2.0, 2.0)
rect2 = Rect(0.0, 0.0, 4.0, 4.0)
@test during(rect1, rect2)
rect1 = Rect(0.0, 0.0, 2.0, 3.0)
rect2 = Rect(1.0, 1.0, 4.0, 2.0)
@test !during(rect1, rect2)
rect1 = Rect(1.0, 1.0, 2.0, 2.0)
rect2 = Rect(0.0, 0.0, 4.0, 4.0)
@test !finishes(rect1, rect2)
rect1 = Rect(1.0, 0.0, 1.0, 1.0)
rect2 = Rect(0.0, 0.0, 2.0, 1.0)
@test !finishes(rect1, rect2)
rect1 = Rect(1.0, 1.0, 1.0, 2.0)
rect2 = Rect(0.0, 0.0, 2.0, 3.0)
@test finishes(rect1, rect2)
end
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
|
[
"MIT"
] | 0.4.11 | b62f2b2d76cee0d61a2ef2b3118cd2a3215d3134 | code | 29003 | using Test, Random, StructArrays, Tables, StaticArrays, OffsetArrays
using GeometryBasics
using LinearAlgebra
using GeometryBasics: attributes
using GeoInterface
using GeoJSON
using Extents
@testset "GeometryBasics" begin
@testset "algorithms" begin
cube = Rect(Vec3f(-0.5), Vec3f(1))
cube_faces = decompose(TriangleFace{Int}, faces(cube))
cube_vertices = decompose(Point{3,Float32}, cube)
@test area(cube_vertices, cube_faces) == 6
mesh = Mesh(cube_vertices, cube_faces)
@test GeometryBasics.volume(mesh) β 1
@test GeometryBasics.volume(cube) β 1
rect = Rect(1, 2, 7.5, 2.0)
@test GeometryBasics.area(rect) β 15
points_cwise = Point2f[(0,0), (0,1), (1,1)]
points_ccwise = Point2f[(0,0), (1,0), (1,1)]
@test area(points_cwise) β -0.5
@test area(points_ccwise) β 0.5
@test area(OffsetArray(points_cwise, -2)) β -0.5
points3d = Point3f[(0,0,0), (0,0,1), (0,1,1)]
@test area(OffsetArray(points3d, -2)) β 0.5
pm2d = [PointMeta(0.0, 0.0, a=:d), PointMeta(0.0, 1.0, a=:e), PointMeta(1.0, 0.0, a=:f)]
@test area(pm2d) β -0.5
pm3d = [PointMeta(0.0, 0.0, 0.0, a=:d), PointMeta(0.0, 1.0, 0.0, a=:e), PointMeta(1.0, 0.0, 0.0, a=:f)]
@test_broken area(pm3d) β 0.5 # Currently broken as zero(PointMeta(0.0, 0.0, 0.0, a=:d)) fails
end
@testset "embedding metadata" begin
@testset "Meshes" begin
@testset "per vertex attributes" begin
points = rand(Point{3, Float64}, 8)
tfaces = TetrahedronFace{Int}[(1, 2, 3, 4), (5, 6, 7, 8)]
normals = rand(SVector{3, Float64}, 8)
stress = LinRange(0, 1, 8)
mesh = Mesh(meta(points, normals = normals, stress = stress), tfaces)
@test hasproperty(coordinates(mesh), :stress)
@test hasproperty(coordinates(mesh), :normals)
@test coordinates(mesh).stress === stress
@test coordinates(mesh).normals === normals
@test coordinates(mesh).normals === normals
@test GeometryBasics.faces(mesh) === tfaces
@test propertynames(coordinates(mesh)) == (:position, :normals, :stress)
end
@testset "per face attributes" begin
# Construct a cube out of Quads
points = Point{3, Float64}[
(0.0, 0.0, 0.0), (2.0, 0.0, 0.0),
(2.0, 2.0, 0.0), (0.0, 2.0, 0.0),
(0.0, 0.0, 12.0), (2.0, 0.0, 12.0),
(2.0, 2.0, 12.0), (0.0, 2.0, 12.0)
]
facets = QuadFace{Cint}[
1:4,
5:8,
[1,5,6,2],
[2,6,7,3],
[3, 7, 8, 4],
[4, 8, 5, 1]
]
markers = Cint[-1, -2, 0, 0, 0, 0]
# attach some additional information to our faces!
mesh = Mesh(points, meta(facets, markers = markers))
@test hasproperty(GeometryBasics.faces(mesh), :markers)
# test with === to assert we're not doing any copies
@test GeometryBasics.faces(mesh).markers === markers
@test coordinates(mesh) === points
@test metafree(GeometryBasics.faces(mesh)) === facets
end
end
@testset "polygon with metadata" begin
polys = [Polygon(rand(Point{2, Float32}, 20)) for i in 1:10]
pnames = [randstring(4) for i in 1:10]
numbers = LinRange(0.0, 1.0, 10)
bin = rand(Bool, 10)
# create a polygon
poly = PolygonMeta(polys[1], name = pnames[1], value = numbers[1], category = bin[1])
# create a MultiPolygon with the right type & meta information!
multipoly = MultiPolygonMeta(polys, name = pnames, value = numbers, category = bin)
@test multipoly isa AbstractVector
@test poly isa GeometryBasics.AbstractPolygon
@test GeometryBasics.getcolumn(poly, :name) == pnames[1]
@test GeometryBasics.MetaFree(PolygonMeta) == Polygon
@test GeometryBasics.getcolumn(multipoly, :name) == pnames
@test GeometryBasics.MetaFree(MultiPolygonMeta) == MultiPolygon
meta_p = meta(polys[1], boundingbox=Rect(0, 0, 2, 2))
@test meta_p.boundingbox === Rect(0, 0, 2, 2)
@test metafree(meta_p) === polys[1]
attributes(meta_p) == Dict{Symbol, Any}(:boundingbox => meta_p.boundingbox,
:polygon => polys[1])
end
@testset "point with metadata" begin
p = Point(1.1, 2.2)
@test p isa AbstractVector{Float64}
pm = PointMeta(1.1, 2.2; a=1, b=2)
p1 = Point(2.2, 3.6)
p2 = [p, p1]
@test coordinates(p2) == p2
@test meta(pm) === (a=1, b=2)
@test metafree(pm) === p
@test propertynames(pm) == (:position, :a, :b)
@test GeometryBasics.MetaFree(typeof(pm)) == Point{2,Float64}
@test_broken zero(pm) == [0, 0]
end
@testset "MultiPoint with metadata" begin
p = collect(Point{2, Float64}(x, x+1) for x in 1:5)
@test p isa AbstractVector
mpm = MultiPointMeta(p, a=1, b=2)
@test coordinates(mpm) == mpm
@test meta(mpm) === (a=1, b=2)
@test metafree(mpm) == p
@test propertynames(mpm) == (:points, :a, :b)
end
@testset "LineString with metadata" begin
linestring = LineStringMeta(Point{2, Int}[(10, 10), (20, 20), (10, 40)], a = 1, b = 2)
@test linestring isa AbstractVector
@test meta(linestring) === (a = 1, b = 2)
@test metafree(linestring) == linestring
@test propertynames(linestring) == (:lines, :a, :b)
end
@testset "MultiLineString with metadata" begin
linestring1 = LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)])
linestring2 = LineString(Point{2, Int}[(40, 40), (30, 30), (40, 20), (30, 10)])
multilinestring = MultiLineString([linestring1, linestring2])
multilinestringmeta = MultiLineStringMeta([linestring1, linestring2]; boundingbox = Rect(1.0, 1.0, 2.0, 2.0))
@test multilinestringmeta isa AbstractVector
@test meta(multilinestringmeta) === (boundingbox = Rect(1.0, 1.0, 2.0, 2.0),)
@test metafree(multilinestringmeta) == multilinestring
@test propertynames(multilinestringmeta) == (:linestrings, :boundingbox)
end
@testset "Mesh with metadata" begin
m = triangle_mesh(Sphere(Point3f(0), 1))
m_meta = MeshMeta(m; boundingbox=Rect(1.0, 1.0, 2.0, 2.0))
@test meta(m_meta) === (boundingbox = Rect(1.0, 1.0, 2.0, 2.0),)
@test metafree(m_meta) === m
@test propertynames(m_meta) == (:mesh, :boundingbox)
end
end
@testset "embedding MetaT" begin
@testset "MetaT{Polygon}" begin
polys = [Polygon(rand(Point{2, Float32}, 20)) for i in 1:10]
multipol = MultiPolygon(polys)
pnames = [randstring(4) for i in 1:10]
numbers = LinRange(0.0, 1.0, 10)
bin = rand(Bool, 10)
# create a polygon
poly = MetaT(polys[1], name = pnames[1], value = numbers[1], category = bin[1])
# create a MultiPolygon with the right type & meta information!
multipoly = MetaT(multipol, name = pnames, value = numbers, category = bin)
@test multipoly isa MetaT
@test poly isa MetaT
@test GeometryBasics.getcolumn(poly, :name) == pnames[1]
@test GeometryBasics.getcolumn(multipoly, :name) == pnames
meta_p = MetaT(polys[1], boundingbox=Rect(0, 0, 2, 2))
@test meta_p.boundingbox === Rect(0, 0, 2, 2)
@test GeometryBasics.metafree(meta_p) == polys[1]
@test GeometryBasics.metafree(poly) == polys[1]
@test GeometryBasics.metafree(multipoly) == multipol
@test GeometryBasics.meta(meta_p) == (boundingbox = GeometryBasics.HyperRectangle{2,Int64}([0, 0], [2, 2]),)
@test GeometryBasics.meta(poly) == (name = pnames[1], value = 0.0, category = bin[1])
@test GeometryBasics.meta(multipoly) == (name = pnames, value = numbers, category = bin)
end
@testset "MetaT{Point}" begin
p = Point(1.1, 2.2)
@test p isa AbstractVector{Float64}
pm = MetaT(Point(1.1, 2.2); a=1, b=2)
p1 = Point(2.2, 3.6)
p2 = [p, p1]
@test coordinates(p2) == p2
@test pm.meta === (a=1, b=2)
@test pm.main === p
@test propertynames(pm) == (:main, :a, :b)
@test GeometryBasics.metafree(pm) == p
@test GeometryBasics.meta(pm) == (a = 1, b = 2)
end
@testset "MetaT{MultiPoint}" begin
p = collect(Point{2, Float64}(x, x+1) for x in 1:5)
@test p isa AbstractVector
mpm = MetaT(MultiPoint(p); a=1, b=2)
@test coordinates(mpm.main) == Point{2, Float64}[(x, x+1) for x in 1:5]
@test mpm.meta === (a=1, b=2)
@test mpm.main == p
@test propertynames(mpm) == (:main, :a, :b)
@test GeometryBasics.metafree(mpm) == p
@test GeometryBasics.meta(mpm) == (a = 1, b = 2)
end
@testset "MetaT{LineString}" begin
linestring = MetaT(LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)]), a = 1, b = 2)
@test linestring isa MetaT
@test linestring.meta === (a = 1, b = 2)
@test propertynames(linestring) == (:main, :a, :b)
@test GeometryBasics.metafree(linestring) == LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)])
@test GeometryBasics.meta(linestring) == (a = 1, b = 2)
end
@testset "MetaT{MultiLineString}" begin
linestring1 = LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)])
linestring2 = LineString(Point{2, Int}[(40, 40), (30, 30), (40, 20), (30, 10)])
multilinestring = MultiLineString([linestring1, linestring2])
multilinestringmeta = MetaT(MultiLineString([linestring1, linestring2]); boundingbox = Rect(1.0, 1.0, 2.0, 2.0))
@test multilinestringmeta isa MetaT
@test multilinestringmeta.meta === (boundingbox = Rect(1.0, 1.0, 2.0, 2.0),)
@test multilinestringmeta.main == multilinestring
@test propertynames(multilinestringmeta) == (:main, :boundingbox)
@test GeometryBasics.metafree(multilinestringmeta) == multilinestring
@test GeometryBasics.meta(multilinestringmeta) == (boundingbox = GeometryBasics.HyperRectangle{2,Float64}([1.0, 1.0], [2.0, 2.0]),)
end
#=
So mesh works differently for MetaT
since `MetaT{Point}` not subtyped to `AbstractPoint`
=#
@testset "MetaT{Mesh}" begin
@testset "per vertex attributes" begin
points = rand(Point{3, Float64}, 8)
tfaces = TetrahedronFace{Int}[(1, 2, 3, 4), (5, 6, 7, 8)]
normals = rand(SVector{3, Float64}, 8)
stress = LinRange(0, 1, 8)
mesh_nometa = Mesh(points, tfaces)
mesh = MetaT(mesh_nometa, normals = normals, stress = stress)
@test hasproperty(mesh, :stress)
@test hasproperty(mesh, :normals)
@test mesh.stress == stress
@test mesh.normals == normals
@test GeometryBasics.faces(mesh.main) == tfaces
@test propertynames(mesh) == (:main, :normals, :stress)
end
end
end
@testset "view" begin
@testset "TupleView" begin
x = [1, 2, 3, 4, 5, 6]
y = TupleView{2, 1}(x)
@test y == [(1, 2), (2, 3), (3, 4), (4, 5), (5, 6)]
y = TupleView{2}(x)
@test y == [(1, 2), (3, 4), (5, 6)]
y = TupleView{2, 3}(x)
@test y == [(1, 2), (4, 5)]
y = TupleView{3, 1}(x)
@test y == [(1, 2, 3), (2, 3, 4), (3, 4, 5), (4, 5, 6)]
y = TupleView{2, 1}(x, connect = true)
@test y == [(1, 2), (2, 3), (3, 4), (4, 5), (5, 6), (6, 1)]
end
@testset "connected views" begin
numbers = [1, 2, 3, 4, 5, 6]
x = connect(numbers, Point{2})
@test x == Point[(1, 2), (3, 4), (5, 6)]
line = connect(x, Line, 1)
@test line == [Line(Point(1, 2), Point(3, 4)), Line(Point(3, 4), Point(5, 6))]
triangles = connect(x, Triangle)
@test triangles == [Triangle(Point(1, 2), Point(3, 4), Point(5, 6))]
x = connect([1, 2, 3, 4, 5, 6, 7, 8], Point{2})
tetrahedra = connect(x, NSimplex{4})
@test tetrahedra == [Tetrahedron(x[1], x[2], x[3], x[4])]
@testset "matrix non-copy point views" begin
# point in row
points = [1 2; 1 4; 66 77]
comparison = [Point(1, 2), Point(1, 4), Point(66, 77)]
@test connect(points, Point{2}) == comparison
# point in column
points = [1 1 66; 2 4 77]
# huh, reinterpret array doesn't seem to like `==`
@test all(((a,b),)-> a==b, zip(connect(points, Point{2}), comparison))
end
end
@testset "face views" begin
numbers = [1, 2, 3, 4, 5, 6]
points = connect(numbers, Point{2})
faces = connect([1, 2, 3], TriangleFace)
triangles = connect(points, faces)
@test triangles == [Triangle(Point(1, 2), Point(3, 4), Point(5, 6))]
x = Point{3}(1.0)
triangles = connect([x], [TriangleFace(1, 1, 1)])
@test triangles == [Triangle(x, x, x)]
points = connect([1, 2, 3, 4, 5, 6, 7, 8], Point{2})
faces = connect([1, 2, 3, 4], SimplexFace{4})
triangles = connect(points, faces)
@test triangles == [Tetrahedron(points...)]
end
@testset "reinterpret" begin
numbers = collect(reshape(1:6, 2, 3))
points = reinterpret(Point{2, Int}, numbers)
@test points[1] === Point(1, 2)
@test points[2] === Point(3, 4)
numbers[4] = 0
@test points[2] === Point(3, 0)
end
end
@testset "constructors" begin
@testset "LineFace" begin
points = connect([1, 2, 3, 4, 5, 6], Point{2})
linestring = LineString(points)
@test linestring == [Line(points[1], points[2]), Line(points[2], points[3])]
points = rand(Point{2, Float64}, 4)
linestring = LineString(points, 2)
@test linestring == [Line(points[1], points[2]), Line(points[3], points[4])]
linestring = LineString([points[1] => points[2], points[2] => points[3]])
@test linestring == [Line(points[1], points[2]), Line(points[2], points[3])]
faces = [1, 2, 3]
linestring = LineString(points, faces)
@test linestring == LineString([points[1] => points[2], points[2] => points[3]])
a, b, c, d = Point(1, 2), Point(3, 4), Point(5, 6), Point(7, 8)
points = [a, b, c, d]; faces = [1, 2, 3, 4]
linestring = LineString(points, faces, 2)
@test linestring == LineString([a => b, c => d])
faces = [LineFace(1, 2)
, LineFace(3, 4)]
linestring = LineString(points, faces)
@test linestring == LineString([a => b, c => d])
end
@testset "Polygon" begin
points = connect([1, 2, 3, 4, 5, 6], Point{2})
polygon = Polygon(points)
@test polygon == Polygon(LineString(points))
points = rand(Point{2, Float64}, 4)
linestring = LineString(points, 2)
@test Polygon(points, 2) == Polygon(linestring)
faces = [1, 2, 3]
polygon = Polygon(points, faces)
@test polygon == Polygon(LineString(points, faces))
a, b, c, d = Point(1, 2), Point(3, 4), Point(5, 6), Point(7, 8)
points = [a, b, c, d]; faces = [1, 2, 3, 4]
polygon = Polygon(points, faces, 2)
@test polygon == Polygon(LineString(points, faces, 2))
faces = [LineFace(1, 2), LineFace(3, 4)]
polygon = Polygon(points, faces)
@test polygon == Polygon(LineString(points, faces))
@test ndims(polygon) === 2
end
@testset "Mesh" begin
numbers = [1, 2, 3, 4, 5, 6]
points = connect(numbers, Point{2})
mesh = Mesh(points, [1,2,3])
@test mesh == [Triangle(points...)]
x = Point{3}(1.0)
mesh = Mesh([x], [TriangleFace(1, 1, 1)])
@test mesh == [Triangle(x, x, x)]
points = connect([1, 2, 3, 4, 5, 6, 7, 8], Point{2})
faces = connect([1, 2, 3, 4], SimplexFace{4})
mesh = Mesh(points, faces)
@test mesh == [Tetrahedron(points...)]
points = rand(Point3f, 8)
tfaces = [GLTriangleFace(1, 2, 3), GLTriangleFace(5, 6, 7)]
normals = rand(Vec3f, 8)
uv = rand(Vec2f, 8)
mesh = Mesh(points, tfaces)
meshuv = Mesh(meta(points; uv=uv), tfaces)
meshuvnormal = Mesh(meta(points; normals=normals, uv=uv), tfaces)
@test mesh isa GLPlainMesh
@test meshuv isa GLUVMesh3D
@test meshuvnormal isa GLNormalUVMesh3D
t = Tesselation(Rect2f(0, 0, 2, 2), (30, 30))
m = GeometryBasics.mesh(t, pointtype=Point3f, facetype=QuadFace{Int})
m2 = GeometryBasics.mesh(m, facetype=QuadFace{GLIndex})
@test GeometryBasics.faces(m2) isa Vector{QuadFace{GLIndex}}
@test GeometryBasics.coordinates(m2) isa Vector{Point3f}
end
@testset "Multi geometries" begin
# coordinates from https://en.wikipedia.org/wiki/Well-known_text_representation_of_geometry#Geometric_objects
points = Point{2, Int}[(10, 40), (40, 30), (20, 20), (30, 10)]
multipoint = MultiPoint(points)
@test size(multipoint) === size(points)
@test multipoint[3] === points[3]
linestring1 = LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)])
linestring2 = LineString(Point{2, Int}[(40, 40), (30, 30), (40, 20), (30, 10)])
multilinestring = MultiLineString([linestring1, linestring2])
@test size(multilinestring) === (2,)
@test multilinestring[1] === linestring1
@test multilinestring[2] === linestring2
polygon11 = Polygon(Point{2, Int}[(30, 20), (45, 40), (10, 40), (30, 20)])
polygon12 = Polygon(Point{2, Int}[(15, 5), (40, 10), (10, 20), (5, 10), (15, 5)])
multipolygon1 = MultiPolygon([polygon11, polygon12])
@test size(multipolygon1) === (2,)
@test multipolygon1[1] === polygon11
@test multipolygon1[2] === polygon12
polygon21 = Polygon(Point{2, Int}[(40, 40), (20, 45), (45, 30), (40, 40)])
polygon22 = Polygon(LineString(Point{2, Int}[(20, 35), (10, 30), (10, 10), (30, 5), (45, 20), (20, 35)]),
[LineString(Point{2, Int}[(30, 20), (20, 15), (20, 25), (30, 20)])])
multipolygon2 = MultiPolygon([polygon21, polygon22])
@test size(multipolygon2) === (2,)
@test multipolygon2[1] === polygon21
@test multipolygon2[2] === polygon22
end
end
@testset "decompose/triangulation" begin
@test isempty(decompose(Vec3f, []))
@test decompose(Vec3f, []) isa Vector{Vec3f}
primitive = Sphere(Point3f(0), 1)
@test ndims(primitive) === 3
mesh = triangle_mesh(primitive)
@test decompose(Point, mesh) isa Vector{Point3f}
@test decompose(Point, primitive) isa Vector{Point3f}
primitive = Rect2(0, 0, 1, 1)
mesh = triangle_mesh(primitive)
@test decompose(Point, mesh) isa Vector{Point2f}
@test decompose(Point, primitive) isa Vector{Point2{Int}}
primitive = Rect3(0, 0, 0, 1, 1, 1)
triangle_mesh(primitive)
primitive = Sphere(Point3f(0), 1)
m_normal = normal_mesh(primitive)
@test normals(m_normal) isa Vector{Vec3f}
primitive = Rect2(0, 0, 1, 1)
m_normal = normal_mesh(primitive)
@test normals(m_normal) isa Vector{Vec3f}
primitive = Rect3(0, 0, 0, 1, 1, 1)
m_normal = normal_mesh(primitive)
@test normals(m_normal) isa Vector{Vec3f}
points = decompose(Point2f, Circle(Point2f(0), 1))
tmesh = triangle_mesh(points)
@test normals(tmesh) == nothing
m = GeometryBasics.mesh(Sphere(Point3f(0), 1))
@test normals(m) == nothing
m_normals = pointmeta(m, Normal())
@test normals(m_normals) isa Vector{Vec3f}
@test texturecoordinates(m) == nothing
r2 = Rect2(0.0, 0.0, 1.0, 1.0)
@test collect(texturecoordinates(r2)) == [(0.0, 1.0), (1.0, 1.0), (0.0, 0.0), (1.0, 0.0)]
r3 = Rect3(0.0, 0.0, 1.0, 1.0, 2.0, 2.0)
@test first(texturecoordinates(r3)) == Vec3(0, 0, 0)
uv = decompose_uv(m)
@test Rect(Point.(uv)) == Rect(0, 0, 1, 1)
points = decompose(Point2f, Circle(Point2f(0), 1))
m = GeometryBasics.mesh(points)
@test coordinates(m) === points
linestring = LineString(Point{2, Int}[(10, 10), (20, 20), (10, 40)])
pts = Point{2, Int}[(10, 10), (20, 20), (10, 40)]
linestring = LineString(pts)
pts_decomp = decompose(Point{2, Int}, linestring)
@test pts === pts_decomp
pts_ext = Point{2, Int}[(5, 1), (3, 3), (4, 8), (1, 2), (5, 1)]
ls_ext = LineString(pts_ext)
pts_int1 = Point{2, Int}[(2, 2), (3, 8),(5, 6), (3, 4), (2, 2)]
ls_int1 = LineString(pts_int1)
pts_int2 = Point{2, Int}[(3, 2), (4, 5),(6, 1), (1, 4), (3, 2)]
ls_int2 = LineString(pts_int2)
poly_ext = Polygon(ls_ext)
poly_ext_int = Polygon(ls_ext, [ls_int1, ls_int2])
@test decompose(Point{2, Int}, poly_ext) == pts_ext
@test decompose(Point{2, Int}, poly_ext_int) == [pts_ext..., pts_int1..., pts_int2...]
end
@testset "mesh" begin
primitive = Triangle(Point2f(0), Point2f(1), Point2f(1,0))
m = GeometryBasics.mesh(primitive)
@test length(faces(m)) == 1
end
@testset "convert mesh + meta" begin
m = uv_normal_mesh(Circle(Point2f(0), 1f0))
# for 2D primitives we dont actually calculate normals
@test !hasproperty(m, :normals)
end
@testset "convert mesh + meta" begin
m = uv_normal_mesh(Rect3f(Vec3f(-1), Vec3f(1, 2, 3)))
m_normal = normal_mesh(m)
# make sure we don't loose the uv
@test hasproperty(m_normal, :uv)
@test m == m_normal
# Make sure we don't create any copies
@test m.position === m_normal.position
@test m.normals === m_normal.normals
@test m.uv === m_normal.uv
m = GeometryBasics.mesh(Rect3f(Vec3f(-1), Vec3f(1, 2, 3));
uv=Vec2{Float64}, normaltype=Vec3{Float64}, pointtype=Point3{Float64})
m_normal = normal_mesh(m)
@test hasproperty(m_normal, :uv)
@test m.position !== m_normal.position
@test m.normals !== m_normal.normals
# uv stays untouched, since we don't specify the element type in normalmesh
@test m.uv === m_normal.uv
end
@testset "modifying meta" begin
xx = rand(10)
points = rand(Point3f, 10)
m = GeometryBasics.Mesh(meta(points, xx=xx), GLTriangleFace[(1,2,3), (3,4,5)])
color = rand(10)
m = pointmeta(m; color=color)
@test hasproperty(m, :xx)
@test hasproperty(m, :color)
@test_throws ErrorException GeometryBasics.MetaType(Simplex)
@test_throws ErrorException GeometryBasics.MetaFree(Simplex)
@test m.xx === xx
@test m.color === color
m, colpopt = GeometryBasics.pop_pointmeta(m, :color)
m, xxpopt = GeometryBasics.pop_pointmeta(m, :xx)
@test propertynames(m) == (:position,)
@test colpopt === color
@test xxpopt === xx
@testset "creating meta" begin
x = Point3f[(1,3,4)]
# no meta gets added, so should stay the same
@test meta(x) === x
@test meta(x, value=[1]).position === x
end
pos = Point2f[(10, 2)]
m = Mesh(meta(pos, uv=[Vec2f(1, 1)]), [GLTriangleFace(1, 1, 1)])
@test m.position === pos
end
@testset "mesh conversion" begin
s = Sphere(Point3(0.0), 1.0)
m = GeometryBasics.mesh(s)
@test m isa Mesh{3, Float64}
@test coordinates(m) isa Vector{Point{3, Float64}}
@test GeometryBasics.faces(m) isa Vector{GLTriangleFace}
# Check, that decompose isn't making a copy for matching eltype
@test coordinates(m) === decompose(Point{3, Float64}, m)
tmesh = triangle_mesh(m)
@test tmesh isa GLPlainMesh
@test coordinates(tmesh) === decompose(Point3f, tmesh)
nmesh = normal_mesh(m)
@test nmesh isa GLNormalMesh
@test metafree(coordinates(nmesh)) === decompose(Point3f, nmesh)
@test normals(nmesh) === decompose_normals(nmesh)
m = GeometryBasics.mesh(s, pointtype=Point3f)
@test m isa Mesh{3, Float32}
@test coordinates(m) isa Vector{Point3f}
@test GeometryBasics.faces(m) isa Vector{GLTriangleFace}
end
@testset "lines intersects" begin
a = Line(Point(0.0, 0.0), Point(4.0, 1.0))
b = Line(Point(0.0, 0.25), Point(3.0, 0.25))
c = Line(Point(0.0, 0.25), Point(0.5, 0.25))
d = Line(Point(0.0, 0.0), Point(0.0, 4.0))
e = Line(Point(1.0, 0.0), Point(0.0, 4.0))
f = Line(Point(5.0, 0.0), Point(6.0, 0.0))
@test intersects(a, b) === (true, Point(1.0, 0.25))
@test intersects(a, c) === (false, Point(0.0, 0.0))
@test intersects(d, d) === (false, Point(0.0, 0.0))
found, point = intersects(d, e)
@test found && point β Point(0.0, 4.0)
@test intersects(a, f) === (false, Point(0.0, 0.0))
# issue #168
# If these tests fail then you can increase the tolerance on the checks so
# long as you know what you're doing :)
line_helper(a, b, c, d) = Line(Point(a, b), Point(c, d))
b, loc = intersects(line_helper(-3.1, 15.588457268119894, 3.1, 15.588457268119894),
line_helper(2.0866025403784354, 17.37050807568877, -4.0866025403784505, 13.806406460551015))
@test b
@test loc β Point(-1.0000000000000058, 15.588457268119894)
b, loc = intersects(line_helper(5743.933982822018, 150.0, 5885.355339059327, -50.0),
line_helper(5760.0, 100.0, 5760.0, 140.0))
@test b
@test loc β Point(5760.0, 127.27922061357884)
end
@testset "Offsetintegers" begin
x = 1
@test GeometryBasics.raw(x) isa Int64
@test GeometryBasics.value(x) == x
x = ZeroIndex(1)
@test eltype(x) == Int64
x = OffsetInteger{0}(1)
@test typeof(x) == OffsetInteger{0,Int64}
x1 = OffsetInteger{0}(2)
@test GeometryBasics.pure_max(x, x1) == x1
@test promote_rule(typeof(x), typeof(x1)) == OffsetInteger{0,Int64}
x2 = 1
@test promote_rule(typeof(x2), typeof(x1)) == Int64
@test Base.to_index(x1) == 2
@test -(x1) == OffsetInteger{0,Int64}(-2)
@test abs(x1) == OffsetInteger{0,Int64}(2)
@test +(x, x1) == OffsetInteger{0,Int64}(3)
@test *(x, x1) == OffsetInteger{0,Int64}(2)
@test -(x, x1) == OffsetInteger{0,Int64}(-1)
#test for /
@test div(x, x1) == OffsetInteger{0,Int64}(0)
@test !==(x, x1)
@test !>=(x, x1)
@test <=(x, x1)
@test !>(x, x1)
@test <(x, x1)
end
@testset "MetaT and heterogeneous data" begin
ls = [LineString([Point(i, (i+1)^2/6), Point(i*0.86,i+5), Point(i/3, i/7)]) for i in 1:10]
mls = MultiLineString([LineString([Point(i+1, (i)^2/6), Point(i*0.75,i+8), Point(i/2.5, i/6.79)]) for i in 5:10])
poly = Polygon(Point{2, Int}[(40, 40), (20, 45), (45, 30), (40, 40)])
geom = [ls..., mls, poly]
prop = Any[(country_states = "India$(i)", rainfall = (i*9)/2) for i in 1:11]
push!(prop, (country_states = 12, rainfall = 1000)) # a pinch of heterogeneity
feat = [MetaT(i, j) for (i,j) = zip(geom, prop)]
sa = meta_table(feat)
@test nameof(eltype(feat)) == :MetaT
@test eltype(sa) === MetaT{Any,(:country_states, :rainfall),Tuple{Any,Float64}}
@test propertynames(sa) === (:main, :country_states, :rainfall)
@test getproperty(sa, :country_states) isa Array{Any}
@test getproperty(sa, :main) == geom
maintype, metanames, metatype = GeometryBasics.getnamestypes(typeof(feat[1]))
@test (metanames, metatype) == ((:country_states, :rainfall), Tuple{String,Float64})
@test StructArrays.createinstance(typeof(feat[1]), LineString([Point(1, (2)^2/6), Point(1*0.86,6), Point(1/3, 1/7)]), "Mumbai", 100) isa typeof(feat[1])
@test Base.getindex(feat[1], 1) isa Line
@test Base.size(feat[1]) == (2,)
end
@testset "StructArrays integration" begin
pt = meta(Point(0.0, 0.0), color="red", alpha=0.1)
@test StructArrays.component(pt, :position) == Point(0.0, 0.0)
@test StructArrays.component(pt, :color) == "red"
@test StructArrays.component(pt, :alpha) == 0.1
@test StructArrays.staticschema(typeof(pt)) ==
NamedTuple{(:position, :color, :alpha), Tuple{Point2{Float64}, String, Float64}}
@test StructArrays.createinstance(typeof(pt), Point(0.0, 0.0), "red", 0.1) == pt
s = StructArray([pt, pt])
@test StructArrays.components(s) == (
position = [Point(0.0, 0.0), Point(0.0, 0.0)],
color = ["red", "red"],
alpha = [0.1, 0.1]
)
s[2] = meta(Point(0.1, 0.1), color="blue", alpha=0.3)
@test StructArrays.components(s) == (
position = [Point(0.0, 0.0), Point(0.1, 0.1)],
color = ["red", "blue"],
alpha = [0.1, 0.3]
)
end
@testset "Tests from GeometryTypes" begin
include("geometrytypes.jl")
end
@testset "Point & Vec type" begin
include("fixed_arrays.jl")
end
@testset "GeoInterface" begin
include("geointerface.jl")
end
using Aqua
# Aqua tests
# Intervals brings a bunch of ambiquities unfortunately
Aqua.test_all(GeometryBasics; ambiguities=false)
end # testset "GeometryBasics"
| GeometryBasics | https://github.com/JuliaGeometry/GeometryBasics.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.