licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 2069 | module MetapopulationDynamics
using Base: @kwdef
using Distances
using Distributions
using StatsBase
using Term
using CodeTracking: @code_string
using UnicodePlots
abstract type AbstractSpace end
export AbstractSpace
include(joinpath("space", "patches.jl"))
include(joinpath("space", "raster.jl"))
include(joinpath("space", "spatialgraph.jl"))
export Raster, SpatialGraph, Patches, numsites, distancematrix
include(joinpath("models.jl"))
export Model,
AbstractLocalDynamics,
AbstractOccupancyDynamics,
AbstractAbundanceDynamics,
AbstractDispersalModel
include(joinpath("modelset.jl"))
export modelset, ModelSet
include(joinpath("dispersal", "kernels.jl"))
export DispersalKernel,
ExponentialDispersalKernel, GaussianDispersalKernel, DispersalPotential, kernelmatrix
include(joinpath("dispersal", "jump.jl"))
export StochasticJumpDispersalModel, DeterministicJumpDispersalModel
include(joinpath("localdynamics", "occupancy", "levins1967.jl"))
include(joinpath("localdynamics", "occupancy", "incidencefunction.jl"))
export Hanski1994, Levins1967
include(joinpath("localdynamics", "abundance", "ricker.jl"))
export RickerModel
include(joinpath("localdynamics", "abundance", "stochasticlogistic.jl"))
export StochasticLogistic
include(joinpath("simulate.jl"))
export simulate, simulate!
include(joinpath("outputs.jl"))
export AbstractOutput, AbundanceOutput, OccupancyOutput
include(joinpath("summarizers", "synchrony.jl"))
export computepcc
include(joinpath("environment", "layer.jl"))
export EnvironmentLayer
include(joinpath("environment", "model.jl"))
export OccupancyEnvironmentModel, AbundanceEnvironmentModel
include(joinpath("environment", "layerset.jl"))
export EnvironmentLayerSet, numlayers
include(joinpath("environment", "timeseries.jl"))
export EnvironmentTimeseries
# Load integrations
using Requires
function __init__()
@info "Loading NeutralLandscapes.jl support..."
@require NeutralLandscapes="71847384-8354-4223-ac08-659a5128069f" include(joinpath("integrations", "neutrallandscapes.jl"))
end
end # module
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 419 | """
Metapop capacity a la Hanski & Ovaskainen 2003:
If the lead eigenvalue of the matirx M < (c/e),
the system is not stable.
Where matrix M is defined differently in discrete and continuous time.
In continuous time, as M_ij = A_i * A_j * e^(-d_ij * \alpha)
where e^(-d_{ij} * alpha) is dispersal kernel.
"""
function capacity(sg::SpatialGraph, kern::DispersalKernel)
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 2284 | abstract type Model end
abstract type AbstractLocalDynamics <: Model end
abstract type AbstractOccupancyDynamics <: AbstractLocalDynamics end
abstract type AbstractAbundanceDynamics <: AbstractLocalDynamics end
abstract type AbstractDispersalModel <: Model end
abstract type AbstractEnvironmentModel <: Model end
@kwdef struct ModelSet{
L<:Union{AbstractLocalDynamics,Missing},
D<:Union{AbstractDispersalModel,Missing},
E<:Union{AbstractEnvironmentModel,Missing},
}
localdynamics::L
dispersal::D
environment::E
end
modelset(
l::L,
d::D,
e::E,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(
l::L,
e::E,
d::D,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(
d::D,
l::L,
e::E,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(
d::D,
e::E,
l::L,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(
e::E,
l::L,
d::D,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(
e::E,
d::D,
l::L,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, e)
modelset(e::E, d::D) where {D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(missing, d, e)
modelset(d::D, e::E) where {D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(missing, d, e)
modelset(
d::D,
l::L,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, missing)
modelset(
l::L,
d::D,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, d, missing)
modelset(
e::E,
l::L,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, missing, e)
modelset(
l::L,
e::E,
) where {L<:AbstractLocalDynamics,D<:AbstractDispersalModel,E<:AbstractEnvironmentModel} =
ModelSet(l, missing, e)
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1509 | abstract type AbstractOutput end
"""
AbundanceOutput
"""
struct AbundanceOutput
timeseries
end
Base.string(output::AbundanceOutput) = """
[bold]$(typeof(output)) <: $(supertype(typeof(output)))[/bold]
An ::AbundanceOutput output with $(size(output.timeseries,2)) time points.
"""
Base.show(io::IO, ::MIME"text/plain", output::AbundanceOutput) = print(
io,
string(
Panel(_abundanceplot(output))
),
)
function _abundanceplot(output::AbundanceOutput)
grid = output.timeseries
nt = size(grid, 2)
np = size(grid, 1)
totalabundance = zeros(Float64, nt)
for t in 1:nt
totalabundance[t] = sum(grid[:,t])
end
string(lineplot(1:nt, totalabundance, xlabel="Time", ylabel="N"))
end
"""
OccupancyOutput
"""
struct OccupancyOutput
timeseries
end
OccupancyOutput(t::Vector{T}) where {T<:Matrix} = OccupancyOutput(t)
Base.string(output::OccupancyOutput) = """
[bold]$(typeof(output)) <: $(supertype(typeof(output)))[/bold]
An ::OccupancyModel output with $(size(output.timeseries,2)) time points.
"""
Base.show(io::IO, ::MIME"text/plain", output::OccupancyOutput) = print(
io,
string(
Panel(_occupancyplot(output))
),
)
function _occupancyplot(output::OccupancyOutput)
grid = output.timeseries
nt = size(grid, 2)
np = size(grid, 1)
prop = zeros(Float64, nt)
for t in 1:nt
prop[t] = sum(grid[:,t])/np
end
string(lineplot(1:nt, prop, ylim=(0,1), xlabel="Time", ylabel="p"))
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 2024 | function simulate(
modelset::M,
space::S;
numtimesteps = 100,
) where {M<:ModelSet,S<:AbstractSpace}
output = zeros(Float32, numsites(space), numtimesteps)
simulate!(modelset, space, output)
AbundanceOutput(output)
end
function simulate(model::M, space::S; numtimesteps = 100) where {M,S<:AbstractSpace}
output = zeros(Float32, numsites(space), numtimesteps)
simulate!(model, space, output)
end
_initcondition(::AbstractAbundanceDynamics, space; λ = 100) =
rand(Poisson(λ), numsites(space))
_initcondition(::AbstractOccupancyDynamics, space; p = 0.3) =
rand(Bernoulli(p), numsites(space))
_sim!(::Missing, space, oldarray, newarray) = oldarray
function simulate!(modelset::M, space::S, output) where {M<:ModelSet,S<:AbstractSpace}
nt = size(output, 2)
init = _initcondition(modelset.localdynamics, space)
output[:, begin] .= init
for t = 2:nt
# need to think aobut order and memory here
output[:, t] .= _sim!(modelset.environment, space, output[:, t-1], output[:, t])
output[:, t] .= _sim!(modelset.localdynamics, space, output[:, t], output[:, t])
output[:, t] .= _sim!(modelset.dispersal, space, output[:, t], output[:, t])
end
end
function simulate!(
model::M,
space::S,
output;
init = rand(Bernoulli(0.5), numsites(space)),
) where {M<:AbstractOccupancyDynamics,S<:AbstractSpace}
nt = size(output, 2)
output[:, begin] .= init
for t = 2:nt
output[:, t] .= _sim!(model, space, output[:, t-1], output[:, t])
end
OccupancyOutput(output)
end
function simulate!(
model::M,
space::S,
output;
init = rand(Poisson(100), numsites(space)),
) where {M<:AbstractAbundanceDynamics,S<:AbstractSpace}
nt = size(output, 2)
output[:, begin] .= init
for t = 2:nt
output[:, t] .= _sim!(model, space, output[:, t-1], output[:, t])
end
# compute params
# params = (...)
# AbundanceOutput(output, model, space)
AbundanceOutput(output)
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 103 |
"""
AdjacentDispersalModel <: AbstractDispersalModel
Only for Gridded models, dispersal occurs
"""
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1900 |
@kwdef struct StochasticJumpDispersalModel{T} <: AbstractDispersalModel
prob::T = 0.1 # probability an individual disperses in it's lifetime before reproducing
potential::DispersalPotential =
DispersalPotential(ExponentialDispersalKernel(), SpatialGraph())
end
params(model::StochasticJumpDispersalModel) = model.prob, model.potential
function _sim!(
model::StochasticJumpDispersalModel,
sg::SpatialGraph,
prevstate::Vector{T},
newstate::Vector{T},
) where {T<:Number}
delta = zeros(size(newstate))
p, ϕ = params(model)
for (i, istate) in enumerate(prevstate)
if istate > 0 && sum(ϕ.matrix[i, :]) > 0
num_leaving_i = min(rand(Binomial(floor(istate), p)), Int32(floor(istate)))
realizedtargs = rand(Categorical(ϕ.matrix[i, :]), num_leaving_i)
#@info num_leaving_i, realizedtargs
for j in realizedtargs
delta[j] += 1
end
delta[i] -= num_leaving_i
end
end
newstate = prevstate .+ delta
newstate
end
@kwdef struct DeterministicJumpDispersalModel{T} <: AbstractDispersalModel
prob::T = 0.1 # probability an individual disperses in it's lifetime before reproducing
potential::DispersalPotential =
DispersalPotential(ExponentialDispersalKernel(), SpatialGraph())
end
params(model::DeterministicJumpDispersalModel) = model.prob, model.potential
function _sim!(
model::DeterministicJumpDispersalModel,
sg::SpatialGraph,
prevstate::Vector{T},
newstate::Vector{T},
) where {T<:Number}
np = length(newstate)
p, ϕ = params(model)
diffusionmatrix = zeros(np, np)
for (i, istate) in enumerate(prevstate)
for (j, jstate) in enumerate(prevstate)
diffusionmatrix[i, j] = i == j ? 1 - p : ϕ[i, j] * p
end
end
newstate = diffusionmatrix * prevstate
newstate
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1962 | abstract type AbstractDispersalKernel end
@kwdef struct DispersalKernel
func::Function = (x, decay) -> exp(-x * decay)# a function mapping (x, decay) to a value in [0,1]
decay = 1.0 # a positive real number
threshold = 0.01 # cutoff threshold for a value of func to be considered 0
end
Base.string(kern::DispersalKernel) = """
[bold]Kernel: [/bold][green]$(kern.func)[/green]
[bold]Decay: [/bold][yellow]$(kern.decay)[/yellow]
[bold]Threshold: [/bold][yellow]$(kern.threshold)[/yellow]
"""
Base.show(io::IO, ::MIME"text/plain", kern::DispersalKernel) = print(
io,
string(
Panel(
string(kern);
title = string(typeof(kern)),
style = "#a686eb dim",
title_style = "default #a686eb bold",
width = 25,
padding = (2, 2, 1, 1),
),
),
)
function (dk::DispersalKernel)(x)
f = dk.func(x, dk.decay)
f > dk.threshold ? f : 0
end
ExponentialDispersalKernel(;
func = (x, decay) -> exp(-x * decay),
decay = 1.0,
threshold = 0.01,
) = DispersalKernel(; func = func, decay = decay, threshold = threshold)
GaussianDispersalKernel(;
func = (x, decay) -> exp(-(x * decay)^2),
decay = 1.0,
threshold = 0.01,
) = DispersalKernel(; func = func, decay = decay, threshold = threshold)
function kernelmatrix(space, kernel)
distmat = distancematrix(space)
broadcast(x -> x == 0 ? 0 : kernel(x), distmat)
end
struct DispersalPotential
matrix::Any
end
function DispersalPotential(kernel::DispersalKernel, space::T) where {T<:AbstractSpace}
ns = numsites(space)
kernmat = kernelmatrix(space, kernel)
mat = zeros(Float32, size(kernmat))
for i = 1:ns, j = 1:ns
if (sum(kernmat[i, :]) > 0)
mat[i, j] = kernmat[i, j] / sum(kernmat[i, :])
end
end
DispersalPotential(mat)
end
Base.getindex(potential::DispersalPotential, i::T, j::T) where {T<:Integer} =
potential.matrix[i, j]
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1146 | struct EnvironmentLayer{S,T}
space::S
values::T
end
function EnvironmentLayer(sg::SpatialGraph; dist = MidpointDisplacement(0.5))
ns = numsites(sg)
layer = rand(dist, (100,100))
values = []
EnvironmentLayer(sg, values)
end
EnvironmentLayer(raster::Raster) = EnvironmentLayer(raster, raster.matrix)
EnvironmentLayer(mat::Matrix) = EnvironmentLayer(Raster(similar(mat)), mat)
Base.string(el::EnvironmentLayer{ST,ET}) where {ST,ET} = """
An [bold]environmental layer[/bold] with [bold][yellow]$(length(el.values))[/yellow][/bold] locations
based on a $(ST).
"""
function layer_plot(layer)
plt = heatmap(layer.values, xlabel="x", label="y")
io = IOBuffer()
print(IOContext(io, :color => true), plt)
return String(take!(io))
end
Base.show(io::IO, ::MIME"text/plain", el::EnvironmentLayer) = print(
io,
string(
"\n",
Panel(
string(el),
layer_plot(el),
title = string(typeof(el)),
style = "green dim",
title_style = "default green bold",
padding = (2, 2, 1, 1),
width = 60,
),
),
)
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 174 | struct EnvironmentLayerSet
layers::Vector{EnvironmentLayer}
end
Base.size(els::EnvironmentLayerSet) = length(els.layers)
numlayers(els::EnvironmentLayerSet) = size(els)
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 857 |
"""
OccupancyEnvironmentModel
layer:
`niche`: A function that is defined over
(min(layer), max(layer)) that sum to 1 over
that interval and at a particular value on
that domain, x, niche(x) returns the probability
that an observed occurrence would occur at a location
with environment value x.
"""
struct OccupancyEnvironmentModel{T<:AbstractSpace}
layer::EnvironmentLayer
niche::Function
end
"""
AbundanceEnvironmentalModel
layer:
`niche`: A function that is defined over
(min(layer), max(layer)) that gives a
multiplicative constant on [0, ∞) of what the actual
growth in a location with environmental value x is, e.g.
True Growth at location i = niche(x) * Intrinsic Growth
"""
struct AbundanceEnvironmentalModel{T<:AbstractSpace}
layer::EnvironmentLayer
niche::Function
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 902 | struct EnvironmentTimeseries{T<:Union{EnvironmentLayer,EnvironmentLayerSet}}
timeseries::Vector{T}
end
Base.length(lt::EnvironmentTimeseries) = length(lt.timeseries)
Base.getindex(lt::EnvironmentTimeseries, i) = lt.timeseries[i]
Base.iterate(et, i=1) = i <= length(et) ? (et[i],i+1) : nothing
Base.string(et::EnvironmentTimeseries) = """
[bold]$(typeof(et))[/bold]
An [bold]timeseries[/bold] of environmental layers with [bold][yellow]$(length(et.timeseries))[/yellow][/bold] timesteps.
"""
Base.show(io::IO, ::MIME"text/plain", et::EnvironmentTimeseries) = begin
print(
io,
string(
Panel(
string(et),
title = "EnvironmentTimeseries",
style = "blue dim",
title_style = "default blue bold",
padding = (2, 2, 1, 1),
width = 70,
),
),
)
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 905 | # WARNING this file is only loaded if NeutralLandscapes.jl is also active
# This all happens thanks to the Requires.jl package
using NeutralLandscapes
const NLMaker = NeutralLandscapes.NeutralLandscapeMaker
const NLUpdater = NeutralLandscapes.NeutralLandscapeUpdater
EnvironmentLayer(maker::NLMaker; dims=(50,50)) = EnvironmentLayer(Raster(rand(maker, dims)))
NeutralLandscapes.update(up::T, layer::EnvironmentLayer) where T<:NLUpdater = begin
EnvironmentLayer(NeutralLandscapes.update(up, layer.values))
end
NeutralLandscapes.update(up::T, layer::EnvironmentLayer, n::I) where {T<:NLUpdater,I<:Integer} = begin
EnvironmentTimeseries(EnvironmentLayer.(NeutralLandscapes.update(up, layer.values, n)))
end
NeutralLandscapes.normalize(layers::Vector{L}) where {L<:EnvironmentLayer} = begin
EnvironmentTimeseries(EnvironmentLayer.(NeutralLandscapes.normalize[l.values for l in layers]))
end | MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 569 |
abstract type RickerModelType end
struct DemographicStochasticity end
struct DemographicHeterogeneity end
@kwdef struct RickerModel <: AbstractAbundanceDynamics
λ = 1.5
α = 0.003
end
params(rm::RickerModel) = rm.λ, rm.α
function _sim!(
model::RickerModel,
space::S,
prevstate::Vector{T},
newstate::Vector{T},
) where {S<:AbstractSpace,T<:Real}
λ, α = params(model)
for (i, st) in enumerate(prevstate)
mn = st * λ * exp(-α * st)
tmp = mn > 0 ? rand(Poisson(mn)) : 0
newstate[i] = tmp
end
newstate
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 642 |
@kwdef struct StochasticLogistic <: AbstractAbundanceDynamics
λ = 2.5
K = 100.0
σ = 0.1
dt = 0.01
end
params(sl::StochasticLogistic) = sl.λ, sl.K, sl.σ, sl.dt
function _sim!(
model::StochasticLogistic,
space::S,
prevstate::Vector{T},
newstate::Vector{T},
) where {S<:AbstractSpace,T<:Real}
λ, K, σ, dt = params(model)
for (i, N) in enumerate(prevstate)
logisticgrowth = N > 0 ? λ * N * (1 - (N / K)) : 0
drift = N > 0 ? rand(Normal(0, N * σ)) : 0
delta = dt * (drift + logisticgrowth)
tmp = delta + N
newstate[i] = tmp > 0 ? tmp : 0
end
newstate
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 54 | """
Arbitrary C_i function supplied by user
"""
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 2746 | @kwdef struct Hanski1994{T<:Real} <: AbstractOccupancyDynamics
c::T = 0.1
e::T = 0.05
α::T = 10.0
x::T = 1.0
numlocations = 20
areadistribution=Exponential(1.5)
A::Vector{T} = rand(areadistribution, numlocations)
kernel = ExponentialDispersalKernel(; decay = α, threshold = 0.01)
end
Base.string(ifm::Hanski1994) = """
[bold]$(typeof(ifm)) <: $(supertype(typeof(ifm)))[/bold]
[bold] $(supertype(supertype(typeof(ifm)))) <: $(supertype(supertype(supertype(typeof(ifm)))))[/bold]
An [bold]incidence function[/bold] model with parameters:
[bold]c: [/bold][yellow]$(ifm.c)[/yellow]
[bold]e: [/bold][yellow]$(ifm.e)[/yellow]
[bold]α: [/bold][yellow]$(ifm.α)[/yellow]
[bold]x (area-extinction dependence):[/bold] [yellow]$(ifm.x)[/yellow]
[bold]A (areas vector):[/bold] Vector of [yellow]::$(typeof(ifm.A[begin]))[/yellow] of length [red]$(length(ifm.A))[/red]
with [bold]mean[/bold] [green]$(round(mean(ifm.A),digits=3))[/green] and [bold]variance[/bold] [green]$(round(var(ifm.A), digits=3))[/green]
[bold]Kernel:[/bold]
"""
Base.show(io::IO, ::MIME"text/plain", ifm::Hanski1994) = print(
io,
string(
Panel(
string(ifm),
Panel(
string(ifm.kernel);
title = string(typeof(ifm.kernel)),
style = "yellow dim",
title_style = "default yellow bold",
width = 24,
);
title = string(typeof(ifm)),
style = "blue dim",
title_style = "default bright_blue bold",
padding = (2, 2, 1, 1),
width = 60,
),
),
)
params(model::M) where {M<:Hanski1994} =
model.c, model.e, model.α, model.x, model.A, model.kernel
function _hanski1994_extinction(extprob, A_i, x)
A_i < extprob^(1 / x) ? 1 : extprob / (A_i^x)
end
function _hanski1994_colonization(oldstate, i, c, A, kernmat)
nsites = length(oldstate)
S_i = 0.0
for j = 1:nsites
if i != j && oldstate[j] == 1
S_i += A[j] * kernmat[j, i]
end
end
C_i = S_i^2 / ((S_i)^2 + (1 / c))
return C_i
end
function _sim!(
model::M,
space::S,
prevstate::Vector{T},
newstate::Vector{T},
) where {M<:Hanski1994,S<:AbstractSpace,T<:Real}
@assert numsites(space) == length(model.A)
c, e, α, x, A, kern = params(model)
kernmat = kernelmatrix(space, kern)
for (i, st) in enumerate(prevstate)
if st == 1
extprob = _hanski1994_extinction(e, A[i], x)
newstate[i] = rand() < extprob ? 0 : 1
elseif st == 0
colprob = _hanski1994_colonization(prevstate, i, c, A, kernmat)
newstate[i] = rand() < colprob
end
end
newstate
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1123 | struct Levins1967{T<:Real} <: AbstractOccupancyDynamics
c::T
e::T
end
Base.string(lv::Levins1967) = """
[bold]$(typeof(lv)) <: $(supertype(typeof(lv)))[/bold]
[bold] <: $(supertype(supertype(typeof(lv)))) <: $(supertype(supertype(supertype(typeof(lv)))))[/bold]
An [bold]Levin's metapopulation[/bold] model with parameters:
[bold]c: [/bold][yellow]$(lv.c)[/yellow]
[bold]e: [/bold][yellow]$(lv.e)[/yellow]
"""
Base.show(io::IO, ::MIME"text/plain", lv::Levins1967) = print(
io,
string(
Panel(
string(lv),
title = string(typeof(lv)),
style = "blue dim",
title_style = "default bright_blue bold",
padding = (2, 2, 1, 1),
width = 60,
),
),
)
params(model::M) where {M<:Levins1967} = model.c, model.e
function _sim!(
model::M,
space::S,
prevstate::Vector{T},
newstate::Vector{T},
) where {M<:Levins1967,S<:AbstractSpace,T<:Real}
c, e = params(model)
for (i, st) in enumerate(prevstate)
ns = st == 1 ? !(rand() <= e) : rand() <= c
newstate[i] = ns
end
newstate
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 393 | """
Should patches be a type wrapper than converts
to a raster with explicit grain?
Should there be generators for patches with
area distribution and distance to patch dist?
I think develop raster and spatial graph first and
then implement this and decide whether it should
be its own package.
"""
struct Patches <: AbstractSpace end
numsites() = nothing
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1045 | struct Raster <: AbstractSpace
matrix::Matrix
end
Base.size(r::Raster) = size(r.matrix)
numsites(r::Raster) = prod(size(r.matrix))
coordinates(r::Raster) = CartesianIndices(size(r))
function distancematrix(r::Raster; distance = Euclidean())
distmat = zeros(numsites(r), numsites(r))
ci = coordinates(r)
for i = 1:numsites(r), j = 1:numsites(r)
x,y = ci[i], ci[j]
distmat[i, j] = evaluate(distance, (x[1], x[2]), (y[1], y[2]))
end
distmat
end
Base.string(r::Raster) = """
[bold]$(typeof(r)) <: $(supertype(typeof(r)))[/bold]
An [bold]spatial graph[/bold] with [bold][yellow]$(numsites(r)))[/yellow][/bold] locations.
"""
Base.show(io::IO, ::MIME"text/plain", r::Raster) = print(
io,
string(
Panel(
#string(r),
title = string(typeof(r)),
style = "green dim",
title_style = "default green bold",
padding = (2, 2, 1, 1),
width = 60,
#Panel(r.matrix, style = "green", width = 60),
),
),
)
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 1498 | @kwdef struct SpatialGraph{T} <: AbstractSpace where {T}
coordinates::Vector{Tuple{T,T}} = [(rand(), rand()) for _ = 1:20]
end
SpatialGraph(n::Integer) = SpatialGraph([(rand(), rand()) for _ = 1:n])
function _spatialgraph_to_text(sg)
str = string(
scatterplot(
[x[1] for x in sg.coordinates],
[x[2] for x in sg.coordinates],
xlim = (0, 1),
ylim = (0, 1),
),
)
replace(str, "." => "[green].[/green]")
end
Base.string(sg::SpatialGraph) = """
[bold]$(typeof(sg)) <: $(supertype(typeof(sg)))[/bold]
An [bold]spatial graph[/bold] with [bold][yellow]$(length(sg.coordinates))[/yellow][/bold] locations.
"""
Base.show(io::IO, ::MIME"text/plain", sg::SpatialGraph) = print(
io,
string(
Panel(
string(sg),
title = string(typeof(sg)),
style = "green dim",
title_style = "default green bold",
padding = (2, 2, 1, 1),
width = 60,
Panel(_spatialgraph_to_text(sg), style = "green", width = 60),
),
),
)
coordinates(sg::SpatialGraph) = sg.coordinates
numsites(sg::SpatialGraph) = length(coordinates(sg))
function distancematrix(sg::SpatialGraph; distance = Euclidean())
distmat = zeros(numsites(sg), numsites(sg))
for i = 1:numsites(sg), j = 1:numsites(sg)
x, y = sg.coordinates[i], sg.coordinates[j]
distmat[i, j] = evaluate(distance, (x[1], x[2]), (y[1], y[2]))
end
distmat
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 409 | function computepcc(timeseries)
n_pops = length(timeseries[:, 1])
mean_cc::Float64 = 0.0
s::Float64 = 0.0
ct::Int64 = 0
for p1 = 1:n_pops
for p2 = (p1+1):n_pops
v1 = timeseries[p1, :]
v2 = timeseries[p2, :]
cc = crosscor((v1), (v2), [0])
s += cc[1]
ct += 1
end
end
mean_cc = s / ct
return mean_cc
end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 99 | using MetapopulationDynamics
using Test
r = Raster(zeros(50,50))
@test typeof(r) <: AbstractSpace | MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 720 | using Test, SafeTestsets
@time @safetestset "spatial graphs" begin include("spatialgraphs.jl") end
@time @safetestset "rasters" begin include("rasters.jl") end
@time @safetestset "Levins 1967" begin include("levins1967.jl") end
@time @safetestset "Hanski 1994" begin include("hanski1994.jl") end
@time @safetestset "Occupancy environmental model" begin include("environment_occupancy.jl") end
@time @safetestset "Abundance environmental model" begin include("environment_abundance.jl") end
@time @safetestset "Ricker model" begin include("ricker.jl") end
@time @safetestset "Stochastic logistic model" begin include("stochasticlogistic.jl") end
@time @safetestset "Jump dispersal" begin include("jumpdispersal.jl") end
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 95 | using MetapopulationDynamics
using Test
sg = SpatialGraph()
@test typeof(sg) <: AbstractSpace | MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | code | 40 | using MetapopulationDynamics
using Test
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | docs | 284 | # MetapopulationDynamics.jl
WIP package for simulating metapopulation dynamics (both
occupancy and abundance) across different geometries
(rasters, patches, spatial graphs).
---
Future features:
Interface with DiffEq.jl to plug ODE/SDEs at local sites
connected via dispersal
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | docs | 880 | # NeutralLandscapes.jl
Population dynamics across space, in Julia.
Includes high-performance implementations of classic and modern metapopulation models,
raster simulations of populations and occurrence, selection on environmental variables,
stochasticity of demography and environment, and more.
```@docs
AbstractSpace
Raster
SpatialGraph
Patches
numsites
distancematrix
Model
AbstractLocalDynamics
AbstractOccupancyDynamics
AbstractAbundanceDynamics
AbstractDispersalModel
modelset
ModelSet
DispersalKernel
ExponentialDispersalKernel
GaussianDispersalKernel
DispersalPotential
StochasticJumpDispersalModel
DeterministicJumpDispersalModel
Hanski1994
Levins1967
RickerModel
StochasticLogistic
simulate
simulate!
AbstractOutput
AbundanceOutput
OccupancyOutput
computepcc
EnvironmentLayer
OccupancyEnvironmentModel
AbundanceEnvironmentModel
numlayers
EnvironmentTimeseries
```
| MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.0.1 | e2da55f31e0a03f2fc04109ea5534108163d4eee | docs | 377 | ***Occupancy models***:
- Levins metapopulation (Levins 1967)
- Incidence function (Hanski 1994)
- Spatially explicit occupancy models (spoms; Hanski & Ovaskainen 2003)
***Abundance models***:
- Rickers, Different froms:
- Demographic
- Environmental
- Demographic heterogeneity
- Sex stochasticity
- Logistic
- Deterministic logistic
- Stochastic logistic | MetapopulationDynamics | https://github.com/EcoJulia/MetapopulationDynamics.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | code | 487 | using PixelArt
using Documenter
makedocs(;
modules=[PixelArt],
authors="Yuchi Yamaguchi",
repo="https://github.com/abap34/PixelArt.jl/blob/{commit}{path}#L{line}",
sitename="PixelArt.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://abap34.github.io/PixelArt.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
)
deploydocs(;
repo="github.com/abap34/PixelArt.jl",
)
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | code | 108 | using Images
using PixelArt
img = load("img.jpg")
img_pixel = pixel(img)
save("img_pixel.jpg", img_pixel)
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | code | 62 | module PixelArt
include("functions.jl")
export pixel
end
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | code | 696 | using Images
using Clustering
function img_to_arr(img)
return convert(Array{Float64}, channelview(img))
end
"""
pixel(img, [, n_color, w, h]) -> Array{RGB{Float64}}
# Examples
```jldoctest
julia> using PixelArt
julia> using Images
julia> img = load("img.jpg");
julia> img_pixel = pixel(img);
julia> save("img_pixel.jpg", img_pixel)
```
"""
function pixel(img::AbstractArray; n_color=5, w=64, h=64)
img = imresize(img, (w, h))
img = reshape(img_to_arr(img), (3, :))
color_class = kmeans(img, n_color)
img = hcat((x -> color_class.centers[:, x]).(color_class.assignments)...)
return convert(Array{RGB{Float64}}, colorview(RGB, reshape(img, (3, w, h))))
end
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | code | 243 | using PixelArt
using Test
using TestImages
@testset "PixelArt.jl" begin
img = testimage("l")
@test typeof(pixel(img)) <: AbstractArray
# A wrong path often causes passing nothing.
@test_throws MethodError pixel(nothing)
end
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | docs | 677 | # PixelArt.jl
[](https://abap34.github.io/PixelArt.jl/stable)
[](https://abap34.github.io/PixelArt.jl/dev)
[](https://travis-ci.com/abap34/PixelArt.jl)
🔥 **Automatically generate retro and stylish pixel art.**
<img src="example/img.jpg" width="200">
⏬⏬⏬⏬⏬⏬⏬⏬⏬⏬⏬⏬
<img src="example/img_pixel.jpg" width="200">
# Usage
```julia
julia> using Images
julia> using PixelArt
julia> img = load("img.jpg");
julia> img_pixel = pixel(img);
julia> save("img_pixel.jpg", img_pixel)
```
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.0 | 9d97185495ed9fb981b9c820aa70357ead4e58c3 | docs | 104 | ```@meta
CurrentModule = PixelArt
```
# PixelArt
```@index
```
```@autodocs
Modules = [PixelArt]
```
| PixelArt | https://github.com/abap34/PixelArt.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 2282 | using Documenter
# Get Smesh.jl root directory
smesh_root_dir = dirname(@__DIR__)
# Fix for https://github.com/trixi-framework/Trixi.jl/issues/668
if (get(ENV, "CI", nothing) != "true") && (get(ENV, "SMESH_DOC_DEFAULT_ENVIRONMENT", nothing) != "true")
push!(LOAD_PATH, smesh_root_dir)
end
using Smesh
# Define module-wide setups such that the respective modules are available in doctests
DocMeta.setdocmeta!(Smesh, :DocTestSetup, :(using Smesh); recursive=true)
# Copy some files from the top level directory to the docs and modify them
# as necessary
open(joinpath(@__DIR__, "src", "index.md"), "w") do io
# Point to source file
println(io, """
```@meta
EditURL = "https://github.com/trixi-framework/Smesh.jl/blob/main/README.md"
```
""")
# Write the modified contents
for line in eachline(joinpath(smesh_root_dir, "README.md"))
line = replace(line, "[LICENSE.md](LICENSE.md)" => "[License](@ref)")
println(io, line)
end
end
open(joinpath(@__DIR__, "src", "license.md"), "w") do io
# Point to source file
println(io, """
```@meta
EditURL = "https://github.com/trixi-framework/Smesh/blob/main/LICENSE.md"
```
""")
# Write the modified contents
println(io, "# License")
println(io, "")
for line in eachline(joinpath(smesh_root_dir, "LICENSE.md"))
println(io, "> ", line)
end
end
# Make documentation
makedocs(
# Specify modules for which docstrings should be shown
modules = [Smesh],
# Set sitename to Trixi.jl
sitename="Smesh.jl",
# Provide additional formatting options
format = Documenter.HTML(
# Disable pretty URLs during manual testing
prettyurls = get(ENV, "CI", nothing) == "true",
# Set canonical URL to GitHub pages URL
canonical = "https://trixi-framework.github.io/Smesh.jl/stable"
),
# Explicitly specify documentation structure
pages = [
"Home" => "index.md",
"Advanced topics & developers" => [
"Release management" => "release-management.md",
],
"API reference" => "reference.md",
"License" => "license.md"
],
)
deploydocs(;
repo = "github.com/trixi-framework/Smesh.jl",
devbranch = "main",
push_preview = true
)
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 892 | using Smesh
# Create data points
coordinates_min = [0.0, 0.0]
coordinates_max = [1.0, 1.0]
n_elements_x = 5
n_elements_y = 5
data_points = mesh_bisected_rectangle(coordinates_min, coordinates_max, n_elements_x, n_elements_y,
symmetric_shift = true)
# Create triangulation
vertices = build_delaunay_triangulation(data_points; verbose = false, shuffle = false)
neighbors = delaunay_compute_neighbors(data_points, vertices)
mesh_type = :centroids
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval = build_polygon_mesh(data_points, vertices, mesh_type=mesh_type)
voronoi_neighbors = voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates,
voronoi_vertices, voronoi_vertices_interval,
neighbors, periodicity = (true, true))
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 387 | using Smesh
# Create data points
coordinates_min = [0.0, 0.0]
coordinates_max = [1.0, 1.0]
n_points_x = 4
n_points_y = 5
data_points = mesh_basic(coordinates_min, coordinates_max, n_points_x, n_points_y)
# Create triangulation
vertices = build_delaunay_triangulation(data_points; verbose = true)
neighbors = delaunay_compute_neighbors(data_points, vertices, periodicity=(true, true))
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 1262 | using Smesh
# Create data points
coordinates_min = [0.0, 0.0]
coordinates_max = [1.0, 1.0]
n_points_x = 4
n_points_y = 5
data_points = mesh_basic(coordinates_min, coordinates_max, n_points_x, n_points_y)
# Create triangulation
vertices = build_delaunay_triangulation(data_points; verbose = false)
neighbors = delaunay_compute_neighbors(data_points, vertices)
# different options for the mesh type
# :standard_voronoi => standard Voronoi, but use centroid if the circumcenter lies outside the triangle
# :centroids => not an actual Voronoi, always use centroids and not circumcenters as vertices for the mesh
# :incenters => not an actual Voronoi, always use incenters and not circumcenters as vertices for the mesh
# :pure_voronoi => pure Voronoi mesh (just for experiments, should not be used for computation)
mesh_type = :centroids
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval = build_polygon_mesh(data_points, vertices, mesh_type=mesh_type)
voronoi_neighbors = voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates,
voronoi_vertices, voronoi_vertices_interval,
neighbors, periodicity = (true, true))
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 19007 | module Smesh
using Preferences: @load_preference
using smesh_jll: smesh_jll
using LinearAlgebra: normalize
export build_delaunay_triangulation, delaunay_compute_neighbors
export build_polygon_mesh, voronoi_compute_neighbors
export mesh_basic, mesh_bisected_rectangle
const libsmesh = @load_preference("libsmesh", smesh_jll.libsmesh)
"""
build_delaunay_triangulation(data_points; shuffle = false, verbose = false)
"""
function build_delaunay_triangulation(data_points; shuffle = false, verbose = false)
# Pre-allocate output array
npoints = size(data_points, 2)
ve_max = @ccall libsmesh.delaunay_triangulation_temparray_size_c(npoints::Cint)::Cint
ve_out = Matrix{Cint}(undef, 3, ve_max)
# Perform triangulation
ntriangles = @ccall libsmesh.build_delaunay_triangulation_c(ve_out::Ref{Cint},
data_points::Ref{Float64},
npoints::Cint,
ve_max::Cint,
shuffle::Cint,
verbose::Cint)::Cint
# Resize array to appropriate size
ve_out = ve_out[:, 1:ntriangles]
return ve_out
end
"""
delaunay_compute_neighbors(data_points, vertices; periodicity = (false, false))
Calculates the neighbor connectivity for a delaunay triangulation created with
`build_delaunay_triangulation`.
- `data_points` is an array of size 2 × (number of points) with `[coordinate, point]`.
- `vertices` of size 3 × (number of triangles) describes the triangulation with the
structure `[point_index, triangle_index]`
- `periodicity` indicates whether the mesh is periodic in x or y direction.
Note: The feature of periodic meshes is experimental. Right now, it only supports straight
boundaries which are parallel to the specific axis.
"""
function delaunay_compute_neighbors(data_points, vertices; periodicity = (false, false))
n_nodes = size(data_points, 2)
n_elements = size(vertices, 2)
neighbors = Matrix{Cint}(undef, 3, n_elements)
@ccall libsmesh.delaunay_compute_neighbors_c(neighbors::Ref{Cint}, vertices::Ref{Cint},
n_elements::Cint, n_nodes::Cint)::Cvoid
# Periodic neighbors
delaunay_compute_periodic_neighbors!(neighbors, periodicity, data_points, vertices)
return neighbors
end
function delaunay_compute_periodic_neighbors!(neighbors, periodicity, data_points, vertices)
# Add neighboring elements if there are periodic boundaries
if !any(periodicity)
return nothing
end
standard_normal_vector_left = [[-1.0, 0.0], [0.0, -1.0]]
standard_normal_vector_right = [[1.0, 0.0], [0.0, 1.0]]
for dim in 1:2
if periodicity[dim]
# Initialize lists for boundary elements
boundary_elements_left = Int[]
boundary_faces_left = Int[]
boundary_elements_right = Int[]
boundary_faces_right = Int[]
for element in axes(vertices, 2)
for face_index in 1:3
if neighbors[face_index, element] == 0 # Boundary face
node1 = vertices[face_index % 3 + 1, element]
node2 = vertices[(face_index + 1) % 3 + 1, element]
# Get face vector
x_node1 = @views data_points[:, node1]
x_node2 = @views data_points[:, node2]
face = normalize(x_node2 - x_node1)
# Normal vector is face vector rotated clockwise by pi/2
normal = [face[2], -face[1]]
# Add element and face to list if normal vector is valid.
if all(isapprox.(normal, standard_normal_vector_left[dim]))
push!(boundary_elements_left, element)
push!(boundary_faces_left, face_index)
elseif all(isapprox.(normal, standard_normal_vector_right[dim]))
push!(boundary_elements_right, element)
push!(boundary_faces_right, face_index)
end
end
end
end
# Check whether there are the same number of elements on both sides
@assert length(boundary_elements_left) == length(boundary_elements_right) "Different number of elements at boundaries in $dim-th direction!"
@assert length(boundary_elements_left) != 0 "No detected boundary edge in $dim-th direction!"
# Get coordinates for sorting
# Note: In vertices the points are ordered counterclockwise:
# To get the lowest point on the left/bottom, we use the point with index `face_index + 2`.
# To get the lowest point on the right/top, we use the point with index `face_index + 1`.
coord_elements_left = [data_points[dim % 2 + 1, vertices[(boundary_faces_left[i] + 1) % 3 + 1, boundary_elements_left[i]]]
for i in eachindex(boundary_elements_left)]
coord_elements_right = [data_points[dim % 2 + 1, vertices[boundary_faces_right[i] % 3 + 1, boundary_elements_right[i]]]
for i in eachindex(boundary_elements_right)]
p_left = sortperm(coord_elements_left)
p_right = sortperm(coord_elements_right)
boundary_elements_left = boundary_elements_left[p_left]
boundary_elements_right = boundary_elements_right[p_right]
boundary_faces_left = boundary_faces_left[p_left]
boundary_faces_right = boundary_faces_right[p_right]
# Check whether boundary faces have the same length
coord_elements_left = coord_elements_left[p_left]
coord_elements_right = coord_elements_right[p_right]
for i in 1:(length(boundary_elements_left) - 1)
face_length_left = abs(coord_elements_left[i] - coord_elements_left[i + 1])
face_length_right = abs(coord_elements_right[i] - coord_elements_right[i + 1])
@assert isapprox(face_length_left, face_length_right, atol=eps()) "Length of boundary faces in $dim-th direction do not match!"
end
# Check length of last boundary face
face_length_left = abs(coord_elements_left[end] - data_points[dim % 2 + 1, vertices[boundary_faces_left[end] % 3 + 1, boundary_elements_left[end]]])
face_length_right = abs(coord_elements_right[end] - data_points[dim % 2 + 1, vertices[(boundary_faces_right[end] + 1) % 3 + 1, boundary_elements_right[end]]])
@assert isapprox(face_length_left, face_length_right, atol=eps()) "Length of boundary faces in $dim-th direction do not match!"
# Add neighboring elements to neighbor data structure
for i in eachindex(boundary_elements_left)
element_left = boundary_elements_left[i]
element_right = boundary_elements_right[i]
face_left = boundary_faces_left[i]
face_right = boundary_faces_right[i]
@assert neighbors[face_left, element_left] == 0
@assert neighbors[face_right, element_right] == 0
neighbors[face_left, element_left] = element_right
neighbors[face_right, element_right] = element_left
end
end
end
return nothing
end
"""
build_polygon_mesh(data_points, triangulation_vertices; mesh_type=:centroids, orthogonal_boundary_edges=true)
There are three different mesh types:
- `:standard_voronoi` => standard voronoi, but use centroid if the circumcenter lies outside the triangle
- `:centroids` => not an actual voronoi, always use centroids and not circumcenters as vertices for the mesh
- `:incenters` => not an actual voronoi, always use incenters and not circumcenters as vertices for the mesh
- `:pure_voronoi` => pure Voronoi mesh (just for experiments, should not be used for computation)
"""
function build_polygon_mesh(data_points, triangulation_vertices; mesh_type=:centroids, orthogonal_boundary_edges=true)
mesh_type_dict = Dict(:pure_voronoi => Cint(-1), :standard_voronoi => Cint(0), :centroids => Cint(1), :incenters => Cint(2))
array_sizes = Vector{Cint}(undef, 3) # npt_voronoi, nve_voronoi, nelem_voronoi==nnode
npt_delaunay = size(data_points, 2)
nelem_delaunay = size(triangulation_vertices, 2)
nnode = npt_delaunay
orthogonal_boundary_edges_bool = orthogonal_boundary_edges ? 1 : 0
@ccall libsmesh.polygon_mesh_temparray_size_c(array_sizes::Ref{Cint},
triangulation_vertices::Ref{Cint},
data_points::Ref{Float64},
mesh_type_dict[mesh_type]::Cint,
orthogonal_boundary_edges_bool::Cint,
npt_delaunay::Cint,
nelem_delaunay::Cint,
nnode::Cint)::Cvoid
npt_voronoi, nve_voronoi, nelem_voronoi = array_sizes
voronoi_vertices_coordinates = Matrix{Cdouble}(undef, 2, npt_voronoi)
voronoi_vertices = Array{Cint}(undef, nve_voronoi)
voronoi_vertices_interval = Matrix{Cint}(undef, 2, nelem_voronoi)
@ccall libsmesh.build_polygon_mesh_c(voronoi_vertices_coordinates::Ref{Float64},
voronoi_vertices::Ref{Cint},
voronoi_vertices_interval::Ref{Cint},
triangulation_vertices::Ref{Cint},
data_points::Ref{Float64},
mesh_type_dict[mesh_type]::Cint,
orthogonal_boundary_edges_bool::Cint,
npt_delaunay::Cint,
nelem_delaunay::Cint,
npt_voronoi::Cint,
nve_voronoi::Cint,
nelem_voronoi::Cint)::Cvoid
return voronoi_vertices_coordinates, voronoi_vertices, voronoi_vertices_interval
end
"""
voronoi_compute_periodic_neighbors!(vertices, voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval, delaunay_neighbors;
periodicity = (false, false))
Calculates the neighbor connectivity for a polygon mesh created with `build_polygon_mesh`.
- `vertices` defines the structure of the triangulation. An array of size 3 × (number of triangles) with `[point_index, triangle_index]`.
- `voronoi_vertices_coordinates` contains the coordinates of Voronoi vertices in `voronoi_vertices`.
- `voronoi_vertices`: All points within the polygon mesh are sorted counterclockwise for each element.
- `voronoi_vertices_interval` is an array of size 2 × (number of elements) and contains the
starting and ending point index for every element in `voronoi_vertices`.
- `delaunay_neighbors` is the connectivity data structure created by `delaunay_compute_neighbors`.
- `periodicity` indicates whether the mesh is periodic in x or y direction.
Note: The feature of periodic meshes is experimental. Right now, it only supports straight
boundaries which are parallel to the specific axis.
"""
function voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval, delaunay_neighbors;
periodicity = (false, false))
n_vertices_voronoi = length(voronoi_vertices)
n_elements_voronoi = size(voronoi_vertices_interval, 2)
n_element_delaunay = size(delaunay_neighbors, 2)
voronoi_neighbors = Vector{Cint}(undef, n_vertices_voronoi)
@ccall libsmesh.voronoi_compute_neighbors_c(voronoi_neighbors::Ref{Cint},
vertices::Ref{Cint},
voronoi_vertices::Ref{Cint},
voronoi_vertices_interval::Ref{Cint},
n_element_delaunay::Cint,
n_vertices_voronoi::Cint,
n_elements_voronoi::Cint)::Cvoid
# Periodic neighbors
voronoi_compute_periodic_neighbors!(voronoi_neighbors, periodicity,
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval)
return voronoi_neighbors
end
"""
voronoi_compute_periodic_neighbors!(voronoi_neighbors, periodicity,
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval)
"""
function voronoi_compute_periodic_neighbors!(voronoi_neighbors, periodicity,
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval)
# Add neighboring elements if there are periodic boundaries
if !any(periodicity)
return nothing
end
standard_normal_vector_left = [[-1.0, 0.0], [0.0, -1.0]]
standard_normal_vector_right = [[1.0, 0.0], [0.0, 1.0]]
for dim in 1:2
if periodicity[dim]
# Initialize lists for boundary elements
boundary_elements_left = Int[]
boundary_faces_left = Int[]
boundary_elements_right = Int[]
boundary_faces_right = Int[]
for element in axes(voronoi_vertices_interval, 2)
face_index_start = voronoi_vertices_interval[1, element]
face_index_end = voronoi_vertices_interval[2, element]
for face_index in face_index_start:face_index_end
if voronoi_neighbors[face_index] == 0 # Boundary face
node1 = voronoi_vertices[face_index]
if face_index < face_index_end
node2 = voronoi_vertices[face_index + 1]
else
node2 = voronoi_vertices[face_index_start]
end
# Get face vector
x_node1 = @views voronoi_vertices_coordinates[:, node1]
x_node2 = @views voronoi_vertices_coordinates[:, node2]
face = normalize(x_node2 - x_node1)
# Normal vector is face vector rotated clockwise by pi/2
normal = [face[2], -face[1]]
# Add element and face to list if normal vector is valid.
if all(isapprox.(normal, standard_normal_vector_left[dim]))
push!(boundary_elements_left, element)
push!(boundary_faces_left, face_index)
elseif all(isapprox.(normal, standard_normal_vector_right[dim]))
push!(boundary_elements_right, element)
push!(boundary_faces_right, face_index)
end
end
end
end
# Check whether there are the same number of elements on both sides
@assert length(boundary_elements_left) == length(boundary_elements_right) "Different number of elements at boundaries in $dim-th direction!"
@assert length(boundary_elements_left) != 0 "No detected boundary edge in $dim-th direction!"
# Get coordinates for sorting
# Note: In voronoi_vertices the points are ordered counterclockwise:
# To get the lowest point on the left/bottom, we use the end point of the face.
# To get the lowest point on the right/top, we use the start point of the face.
coord_elements_left = [voronoi_vertices_coordinates[dim % 2 + 1, voronoi_vertices[boundary_faces_left[i] + 1]]
for i in eachindex(boundary_elements_left)]
coord_elements_right = [voronoi_vertices_coordinates[dim % 2 + 1, voronoi_vertices[boundary_faces_right[i]]]
for i in eachindex(boundary_elements_right)]
# Get sorting permutation
p_left = sortperm(coord_elements_left)
p_right = sortperm(coord_elements_right)
# Permute lists
boundary_elements_left = boundary_elements_left[p_left]
boundary_elements_right = boundary_elements_right[p_right]
boundary_faces_left = boundary_faces_left[p_left]
boundary_faces_right = boundary_faces_right[p_right]
# Check whether boundary faces have the same length
coord_elements_left = coord_elements_left[p_left]
coord_elements_right = coord_elements_right[p_right]
for i in 1:(length(boundary_elements_left) - 1)
face_length_left = abs(coord_elements_left[i] - coord_elements_left[i + 1])
face_length_right = abs(coord_elements_right[i] - coord_elements_right[i + 1])
@assert isapprox(face_length_left, face_length_right, atol=eps()) "Length of boundary faces in $dim-th direction do not match!"
end
# Check length of last boundary face.
face_length_left = abs(coord_elements_left[end] - voronoi_vertices_coordinates[dim % 2 + 1, voronoi_vertices[boundary_faces_left[end]]])
face_length_right = abs(coord_elements_right[end] - voronoi_vertices_coordinates[dim % 2 + 1, voronoi_vertices[boundary_faces_right[end] + 1]])
@assert isapprox(face_length_left, face_length_right, atol=eps()) "Length of boundary faces in $dim-th direction do not match!"
# Add neighboring elements to neighbor data structure
for i in eachindex(boundary_elements_left)
element_left = boundary_elements_left[i]
element_right = boundary_elements_right[i]
face_left = boundary_faces_left[i]
face_right = boundary_faces_right[i]
@assert voronoi_neighbors[face_left] == 0 && voronoi_neighbors[face_right] == 0
voronoi_neighbors[face_left] = element_right
voronoi_neighbors[face_right] = element_left
end
end
end
return nothing
end
include("standard_meshes.jl")
end # module Smesh
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 5970 | """
mesh_basic(coordinates_min, coordinates_max, n_points_x, n_points_y)
Creates points for a regular grid. Shifting every second column of points to avoid a simple
mesh with bisected rectangles. This results in a unique triangulation.
"""
function mesh_basic(coordinates_min, coordinates_max, n_points_x, n_points_y)
@assert n_points_x > 1 "n_points_x has to be at least 2."
@assert n_points_y > 1 "n_points_y has to be at least 2."
dx = (coordinates_max[1] - coordinates_min[1]) / (n_points_x - 1)
dy = (coordinates_max[2] - coordinates_min[2]) / (n_points_y - 1)
# Number of points:
# Tensorproduct: n_points_x * n_points_y
# Add for half the rows (rounded off) one point each
n_points = n_points_x * n_points_y + div(n_points_y - n_points_y % 2, 2)
points = Matrix{eltype(coordinates_min)}(undef, 2, n_points)
count = 1
for j in 1:n_points_y
for i in 1:n_points_x
points[1, count] = coordinates_min[1] + (i - 1) * dx
points[2, count] = coordinates_min[2] + (j - 1) * dy
if j % 2 == 0 && i != 1
points[1, count] -= 0.5dx
end
count += 1
if j % 2 == 0 && i == n_points_x
points[1, count] = points[1, count - 1] + 0.5dx
points[2, count] = points[2, count - 1]
count += 1
end
end
end
return points
end
"""
mesh_bisected_rectangle(coordinates_min, coordinates_max, n_points_x, n_points_y;
symmetric_shift = false)
Creates points in a regular manner. The resulting non-unique triangulation consists of bisected
rectangles. To allow periodic boundaries for the resulting polygon mesh, it is possible to enable
a symmetric shift.
"""
function mesh_bisected_rectangle(coordinates_min, coordinates_max, n_elements_x, n_elements_y;
symmetric_shift = false)
@assert n_elements_x > 0 "n_elements_x has to be at least 1."
@assert n_elements_y > 0 "n_elements_y has to be at least 1."
dx = (coordinates_max[1] - coordinates_min[1]) / n_elements_x
dy = (coordinates_max[2] - coordinates_min[2]) / n_elements_y
n_points = (n_elements_x + 1) * (n_elements_y + 1)
points = Matrix{eltype(coordinates_min)}(undef, 2, n_points)
for j in 0:n_elements_y
for i = 0:n_elements_x
k = j * (n_elements_x + 1) + i + 1
points[:, k] = [coordinates_min[1] + i * dx, coordinates_min[2] + j * dy]
end
end
# Symmetric shift to get unique triangulation and therefore possible periodic boundaries in
# the polygon mesh
if symmetric_shift
domain_center = 0.5 * [coordinates_min[1] + coordinates_max[1],
coordinates_min[2] + coordinates_max[2]]
s = [dx, dy]
for i in axes(points, 2)
# Do not move boundary points with boundary_distance <= 10^-6
boundary_distance = min(abs(coordinates_min[1] - points[1, i]),
abs(coordinates_max[1] - points[1, i]),
abs(coordinates_min[2] - points[2, i]),
abs(coordinates_max[2] - points[2, i]))
if boundary_distance > 1.0e-8 # inner point
d = sqrt(sum((domain_center .- points[:,i]).^2))
points[:, i] .+= 1.0e-6 * d * s .* (domain_center .- points[:, i])
end
end
if isodd(n_elements_x)
for i in axes(points, 2)
# Do not move boundary points with boundary_distance <= 10^-6
boundary_distance = min(abs(coordinates_min[1] - points[1, i]),
abs(coordinates_max[1] - points[1, i]),
abs(coordinates_min[2] - points[2, i]),
abs(coordinates_max[2] - points[2, i]))
if boundary_distance > 1.0e-8 # inner point
# Only move the two most inner points columns
distance_center_x = abs(domain_center[1] - points[1, i])
if distance_center_x <= dx
points[1, i] += 1.0e-6 * dx
end
end
end
end
if isodd(n_elements_y)
for i in axes(points, 2)
# Do not move boundary points with boundary_distance <= 10^-6
boundary_distance = min(abs(coordinates_min[1] - points[1, i]),
abs(coordinates_max[1] - points[1, i]),
abs(coordinates_min[2] - points[2, i]),
abs(coordinates_max[2] - points[2, i]))
if boundary_distance > 1.0e-8 # inner point
# Only move the two most inner points rows
distance_center_y = abs(domain_center[2] - points[2, i])
if distance_center_y <= dy
points[2, i] += 1.0e-6 * dy
end
end
end
end
end
# This directly creates the connectivity of a triangulation. Every rectangle is bisected
# in the same direction.
# n_triangles = 2 * n_elements_x * n_elements_y
# vertices = Matrix{Cint}(undef, 3, n_triangles)
# k = 0
# for j in 1:n_elements_y
# for i in 1:n_elements_x
# k = k + 1
# vertices[:, k] .= [(j - 1) * (n_elements_x + 1) + i,
# (j - 1) * (n_elements_x + 1) + i + 1,
# j * (n_elements_x + 1) + i]
# k = k + 1
# vertices[:, k] .= [(j - 1) * (n_elements_x + 1) + i + 1,
# j * (n_elements_x + 1) + i + 1,
# j * (n_elements_x + 1) + i]
# end
# end
return points
end | Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 144 | using Test
@time @testset verbose=true showtiming=true "Smesh.jl tests" begin
include("test_unit.jl")
include("test_examples.jl")
end
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 622 | module TestExamples
using Test
using Smesh
@testset verbose=true showtiming=true "test_examples.jl" begin
@testset verbose=true showtiming=true "examples/build_delaunay_triangulation.jl" begin
@test_nowarn include("../examples/build_delaunay_triangulation.jl")
end
@testset verbose=true showtiming=true "examples/build_polygon_mesh.jl" begin
@test_nowarn include("../examples/build_polygon_mesh.jl")
end
@testset verbose=true showtiming=true "examples/build_bisected_rectangle.jl" begin
@test_nowarn include("../examples/build_bisected_rectangle.jl")
end
end # @testset "test_examples.jl"
end # module
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | code | 4874 | module TestUnit
using Test
using Smesh
@testset verbose=true showtiming=true "test_unit.jl" begin
@testset verbose=true showtiming=true "meshes" begin
coordinates_min = [0.0, 0.0]
coordinates_max = [1.0, 1.0]
@testset verbose=true showtiming=true "mesh_basic" begin
points = mesh_basic(coordinates_min, coordinates_max, 2, 2)
@test points == [0.0 1.0 0.0 0.5 1.0; 0.0 0.0 1.0 1.0 1.0]
end
@testset verbose=true showtiming=true "mesh_bisected_rectangle" begin
points = mesh_bisected_rectangle(coordinates_min, coordinates_max, 1, 1)
@test points == [0.0 1.0 0.0 1.0; 0.0 0.0 1.0 1.0]
end
end
@testset verbose=true showtiming=true "build_delaunay_triangulation" begin
data_points = collect([0.0 0.0
1.0 0.0
1.0 1.0
0.0 1.0]')
@test build_delaunay_triangulation(data_points) == [3 1; 1 3; 2 4]
end
@testset verbose=true showtiming=true "delaunay_compute_neighbors" begin
data_points = mesh_basic([0.0, 0.0], [1.0, 1.0], 2, 3)
vertices = Cint[5 1 3 4 3 6; 7 2 1 2 4 4; 4 4 4 5 6 7]
@testset "non-periodic" begin
neighbors = delaunay_compute_neighbors(data_points, vertices)
@test neighbors == [6 4 2 0 6 1;
4 3 5 1 0 0;
0 0 0 2 3 5]
end
@testset "periodic" begin
neighbors = delaunay_compute_neighbors(data_points, vertices, periodicity = (true, false))
@test neighbors == [6 4 2 3 6 1;
4 3 5 1 1 0;
5 0 4 2 3 5]
neighbors = delaunay_compute_neighbors(data_points, vertices, periodicity = (false, true))
@test neighbors == [6 4 2 0 6 1;
4 3 5 1 0 2;
0 6 0 2 3 5]
neighbors = delaunay_compute_neighbors(data_points, vertices, periodicity = (true, true))
@test neighbors == [6 4 2 3 6 1;
4 3 5 1 1 2;
5 6 4 2 3 5]
end
end
@testset verbose=true showtiming=true "build_polygon_mesh" begin
data_points = collect([0.0 0.0
1.0 0.0
1.0 1.0
0.0 1.0]')
vertices = Cint[3 1; 1 3; 2 4]
neighbors = Cint[0 0; 0 0; 2 1]
voronoi_vertices_coordinates, voronoi_vertices, voronoi_vertices_interval = build_polygon_mesh(data_points, vertices)
@test voronoi_vertices_interval == [1 7 12 18; 5 10 16 21]
end
@testset verbose=true showtiming=true "voronoi_compute_neighbors" begin
data_points = collect([0.0 0.0
1.0 0.0
1.0 1.0
0.0 1.0]')
vertices = Cint[3 1; 1 3; 2 4]
neighbors = Cint[0 0; 0 0; 2 1]
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval = build_polygon_mesh(data_points, vertices)
@testset "non-periodic" begin
voronoi_neighbor = voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates,
voronoi_vertices, voronoi_vertices_interval,
neighbors)
@test voronoi_neighbor == [3, 4, 0, 0, 2, 0, 1, 0, 0, 3, 0, 1, 2, 0, 0, 4, 0, 3, 0, 0, 1, 0]
end
@testset "periodic - no valid mesh" begin
@test_throws AssertionError begin
voronoi_neighbor = voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates,
voronoi_vertices, voronoi_vertices_interval,
neighbors, periodicity = (true, true))
end
end
@testset "periodic" begin
data_points = mesh_basic([0.0, 0.0], [1.0, 1.0], 2, 3)
vertices = Cint[5 1 3 4 3 6; 7 2 1 2 4 4; 4 4 4 5 6 7]
neighbors = Cint[6 4 2 0 6 1; 4 3 5 1 0 0; 0 0 0 2 3 5]
voronoi_vertices_coordinates, voronoi_vertices,
voronoi_vertices_interval = build_polygon_mesh(data_points, vertices, mesh_type = :centroids)
voronoi_neighbor = voronoi_compute_neighbors(vertices, voronoi_vertices_coordinates,
voronoi_vertices, voronoi_vertices_interval,
neighbors, periodicity = (true, true))
@test voronoi_neighbor == [4, 3, 2, 6, 2, 0, 4, 1, 7, 1, 5, 0, 4, 6, 5, 5, 1, 0, 6, 3, 1,
2, 5, 7, 4, 2, 3, 3, 7, 0, 4, 7, 1, 7, 3, 0, 4, 5, 6, 2, 6, 0]
end
end
end # @testset "test_unit.jl"
end # module
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | docs | 4143 | # Smesh.jl
[](https://trixi-framework.github.io/Smesh.jl/stable)
[](https://trixi-framework.github.io/Smesh.jl/dev)
[](https://github.com/trixi-framework/Smesh.jl/actions?query=workflow%3ACI)
[](https://coveralls.io/github/trixi-framework/Smesh.jl)
[](https://codecov.io/gh/trixi-framework/Smesh.jl)
[](https://opensource.org/license/mit/)
[](https://doi.org/10.5281/zenodo.10581816)
Smesh.jl is a Julia wrapper packagae for [smesh](https://github.com/trixi-framework/smesh),
a simple Fortran package for generating and handling unstructured triangular and polygonal
meshes.
## Getting started
### Prerequisites
If you have not yet installed Julia, please [follow the instructions for your
operating system](https://julialang.org/downloads/platform/).
[Smesh.jl](https://github.com/trixi-framewor/Smesh.jl) works with Julia v1.8
and later on Linux, macOS and Windows platforms.
*Note: On pre-Apple Silicon systems with macOS, Julia v1.10 or later is required.*
### Installation
Since Smesh.jl is a registered Julia package, you can install it by executing
the following command in the Julia REPL:
```julia
julia> import Pkg; Pkg.add("Smesh")
```
By default, Smesh.jl uses pre-compiled binaries of the smesh package that will get
automatically installed when obtaining Smesh.jl. However, you can also make use of a local
smesh build. For this, create a `LocalPreferences.toml` file next to your `Project.toml`
for the project in which you use Smesh.jl. It should have the following content:
* On Linux:
```toml
[Smesh]
libsmesh = "<smesh-install-prefix>/lib/libsmesh.so"
```
* On macOS:
```toml
[Smesh]
libsmesh = "<smesh-install-prefix>/lib/libsmesh.dylib"
```
* On Windows:
```toml
[Smesh]
libsmesh = "<smesh-install-prefix>/bin/libsmesh.dll"
```
Where `<smesh-install-prefix>` is where you have installed the local smesh build.
### Usage
The easiest way to get started is to run one of the examples from the
[`examples`](https://github.com/trixi-framework/Smesh.jl/tree/main/examples) directory by
`include`ing them in Julia, e.g.,
```
julia> using Smesh
julia> include(joinpath(pkgdir(Smesh), "examples", "build_delaunay_triangulation.jl"))
Computing Delaunay triangulation.
Triangulation elements: 2
Total flipped edges: 0
Average search time: 1.25
Flips/triangle: 0.00
Flips/node: 0.00
3×2 Matrix{Int64}:
3 1
1 3
2 4
```
## Referencing
If you use Smesh.jl in your own research, please cite this repository as follows:
```bibtex
@misc{chiocchetti2024smesh_jl,
title={Smesh.jl: {A} {J}ulia wrapper for the Fortran package smesh to generate and handle unstructured meshes},
author={Chiocchetti, Simone and Bolm, Benjamin and Schlottke-Lakemper, Michael},
year={2024},
howpublished={\url{https://github.com/trixi-framework/Smesh.jl}},
doi={10.5281/zenodo.10581816}
}
```
Please also consider citing the upstream package
[smesh](https://github.com/trixi-framework/smesh) (`doi:10.5281/zenodo.10579422`) itself.
## Authors
Smesh.jl was initiated by
[Simone Chiocchetti](https://www.mi.uni-koeln.de/NumSim/dr-simone-chiocchetti/)
(University of Cologne, Germany),
[Benjamin Bolm](https://www.mi.uni-koeln.de/NumSim/benjamin-bolm/)
(University of Cologne, Germany), and
[Michael Schlottke-Lakemper](https://lakemper.eu) (RWTH Aachen University/High-Performance
Computing Center Stuttgart (HLRS), Germany)
who are also its principal maintainers.
## License and contributing
Smesh.jl and smesh itself are available under the MIT license (see [LICENSE.md](LICENSE.md)).
Contributions by the community are very welcome!
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | docs | 88 | # API reference
```@meta
CurrentModule = Smesh
```
```@autodocs
Modules = [Smesh]
```
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.3 | 468f06564310613dda3a584c4508d709fa1de025 | docs | 2931 | # Release management
To create a new release for Smesh.jl, perform the following steps:
1) Make sure that all PRs and changes that you want to go into the release are merged to
`main` and that the latest commit on `main` has passed all CI tests.
2) Determine the currently released version of Smesh.jl, e.g., on the
[release page](https://github.com/trixi-framework/Smesh.jl/releases). For this manual,
we will assume that the latest release was `v0.2.3`.
3) Decide on the next version number. We follow [semantic versioning](https://semver.org/),
thus each version is of the form `vX.Y.Z` where `X` is the major version, `Y` the minor
version, and `Z` the patch version. In this manual, we assume that the major version is
always `0`, thus the decision process on the new version is as follows:
* If the new release contains *breaking changes* (i.e., user code might not work as
before without modifications), increase the *minor* version by one and set the
*patch* version to zero. In our example, the new version should thus be `v0.3.0`.
* If the new release only contains minor modifications and/or bug fixes, the *minor*
version is kept as-is and the *patch* version is increased by one. In our example, the
new version should thus be `v0.2.4`.
4) Edit the `version` string in the
[`Project.toml`](https://github.com/trixi-framework/Smesh.jl/blob/main/Project.toml)
and set it to the new version. Push/merge this change to `main`.
5) Go to GitHub and add a comment to the commit that you would like to become the new
release (typically this will be the commit where you just updated the version). You can
comment on a commit by going to the
[commit overview](https://github.com/trixi-framework/Smesh.jl/commits/main/) and clicking
on the title of the commit. The comment should contain the following text:
```
@JuliaRegistrator register
```
6) Wait for the magic to happen! Specifically, JuliaRegistrator will create a new PR to the
Julia registry with the new release information. After a grace period of ~15 minutes,
this PR will be merged automatically. A short while after,
[TagBot](https://github.com/trixi-framework/Smesh.jl/blob/main/.github/workflows/TagBot.yml)
will create a new release of Smesh.jl in our GitHub repository.
7) Once the new release has been created, the new version can be obtained through the Julia
package manager as usual.
8) To make sure people do not mistake the latest state of `main` as the latest release, we
set the version in the `Project.toml` to a *development* version. The development version
should be the latest released version, with the patch version incremented by one, and the
`-dev` suffix added. For example, if you just released `v0.3.0`, the new development
version should be `v0.3.1-dev`. If you just released `v0.2.4`, the new development
version should be `v0.2.5-dev`.
| Smesh | https://github.com/trixi-framework/Smesh.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 201 | # Weave readme
using Pkg
cd("c:/git/CuCountMap")
Pkg.activate("c:/git/CuCountMap/readme-env")
using Weave
weave("README.jmd", out_path=:pwd, doctype="github")
if false
tangle("README.jmd")
end
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 2524 | using Pkg
Pkg.activate("c:/git/CuCountmap")
using CuArrays
CuArrays.allowscalar(false)
using CUDAnative
using CUDAnative: atomic_add!
using StatsBase
import StatsBase: countmap
function cucountmap!(buffer, v)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x
stride = gridDim().x * blockDim().x
for j = i:stride:length(v)
b = Int(v[j]) + Int(32769)
@atomic buffer[b] = buffer[b] + UInt32(1)
end
return
end
function countmap(v::CuArray{T, N, NN}; threads = 256, blocks = 1024) where {T, N, NN}
buffer = CuArrays.zeros(UInt32, 2^16)
CuArrays.@sync @cuda threads = threads blocks = blocks cucountmap!(buffer, v)
buffer
end
function cucountmap2!(buffer, v)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x
stride = gridDim().x * blockDim().x
for j = i:stride:length(v)
b = Int(v[j]) + Int(32769)
@atomic buffer[b, blockIdx().x] = buffer[b, blockIdx().x] + UInt32(1)
end
return
end
function countmap2(v::CuArray{T, N, NN}; threads = 256, blocks = 1024) where {T, N, NN}
buffer = CuArrays.zeros(UInt32, 2^16, blocks)
CuArrays.@sync @cuda threads = threads blocks = blocks cucountmap2!(buffer, v)
for i in 2:blocks
buffer[:, 1] .+= buffer[:, i]
end
buffer[:, 1]
end
v = CuArray(rand(Int16, 100_000_000))
################################################################
# basic check v2
################################################################
buffer = CuArrays.zeros(UInt32, 2^16, 1024)
CuArrays.@sync @cuda threads = 512 blocks = 1024 cucountmap2!(buffer, v)
@time countmap2(v)
@time countmap(v)
vc = collect(v)
################################################################
# basic check
################################################################
buffer = CuArrays.zeros(UInt32, 2^16)
CuArrays.@sync @cuda threads = 512 blocks = 1024 cucountmap!(buffer, v)
countmap(v)
vc = collect(v)
################################################################
# detail testing
################################################################
res = @time countmap(collect(v))
buffer_check = zeros(Int, 2^16)
for (k, i) in collect(res)
buffer_check[k+32769] = i
end
collect(buffer) == buffer_check
#@device_code_warntype cucountmap!(buffer, v)
################################################################
# benchmark
################################################################
using BenchmarkTools
vc = collect(v)
@benchmark countmap($vc)
@benchmark countmap($v)
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 1272 | using CUDA
using CuCountMap
using StatsBase: countmap
v = rand(Int16, 100_000_000);
using BenchmarkTools
@benchmark gpu_countmap = cucountmap($v)
# BenchmarkTools.Trial:
# memory estimate: 4.17 MiB
# allocs estimate: 190
# --------------
# minimum time: 41.275 ms (0.00% GC)
# median time: 44.494 ms (0.00% GC)
# mean time: 52.756 ms (0.48% GC)
# maximum time: 297.796 ms (3.98% GC)
# --------------
# samples: 95
# evals/sample: 1
@benchmark cpu_countmap = countmap($v)
# BenchmarkTools.Trial:
# memory estimate: 4.17 MiB
# allocs estimate: 37
# --------------
# minimum time: 132.618 ms (0.00% GC)
# median time: 134.176 ms (0.00% GC)
# mean time: 134.874 ms (0.19% GC)
# maximum time: 145.168 ms (6.76% GC)
# --------------
# samples: 38
# evals/sample: 1
cuv = CUDA.cu(v)
@benchmark gpu_countmap2 = countmap(cuv)
# BenchmarkTools.Trial:
# memory estimate: 4.17 MiB
# allocs estimate: 97
# --------------
# minimum time: 5.472 ms (0.00% GC)
# median time: 5.768 ms (0.00% GC)
# mean time: 6.125 ms (3.91% GC)
# maximum time: 201.707 ms (96.90% GC)
# --------------
# samples: 816
# evals/sample: 1 | CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 874 | # using Pkg
# Pkg.activate("c:/git/CuCountmap")
using SortingAlgorithms
using CuCountMap
v = rand(UInt64, 100_000_000);
@benchmark sorted_v = gpuradixsort($v)
# BenchmarkTools.Trial:
# memory estimate: 1.49 GiB
# allocs estimate: 2550
# --------------
# minimum time: 3.351 s (0.10% GC)
# median time: 3.404 s (1.82% GC)
# mean time: 3.404 s (1.82% GC)
# maximum time: 3.457 s (3.48% GC)
# --------------
# samples: 2
# evals/sample: 1
@benchmark sorted_v_cpu = sort($v, alg=RadixSort)
# BenchmarkTools.Trial:
# memory estimate: 1.49 GiB
# allocs estimate: 18
# --------------
# minimum time: 3.648 s (0.08% GC)
# median time: 3.708 s (1.75% GC)
# mean time: 3.708 s (1.75% GC)
# maximum time: 3.767 s (3.36% GC)
# --------------
# samples: 2
# evals/sample: 1 | CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 127 | module CuCountMap
export countmap, cucountmap, cucountmap!
include("countmap.jl")
# include("gpuradixsort.jl")
end # module
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 1704 | """Attempting to implement bitonic sort"""
using CUDA
# sort kernel
bisort!(shared) = begin
k = UInt16(2)
NUM = length(cushared)
while (k <= NUM)
j = div(k, 2)
while j >= 1
tid = UInt(((blockIdx().x - 1) * blockDim().x + threadIdx().x)-1)
ixj = tid⊻UInt(j)
if ixj > tid
if (tid & k) == 0
if shared[tid+1] > shared[ixj+1]
tmp = shared[ixj+1]
shared[ixj+1] = shared[tid+1]
shared[tid+1] = tmp
end
else
if shared[tid+1] < shared[ixj+1]
tmp = shared[ixj+1]
shared[ixj+1] = shared[tid+1]
shared[tid+1] = tmp
end
end
end
end
k *= 2
end
return
end
bitonicsort!(cushared, NUM) = begin
nblocks = ceil(Int, NUM/256)
@cuda threads = 256 blocks = 1024 bisort!(cushared)
end
using SortingAlgorithms, BenchmarkTools
shared = rand(Float32, 2^26)
cpusort = @belapsed sort!($shared, alg=RadixSort) #0.788
shared = rand(Float32, 2^26)
measure_gpu_sort(shared) = begin
res = Float64[]
for i = 1:3
cushared = cu(shared)
# sorted_shared = sort(shared, alg=RadixSort)
# println("exp false;")
# println("got $(collect(cushared) |> issorted)")
t = Base.@elapsed begin
bitonicsort!(cushared, length(shared))
CUDA.synchronize()
end
# xx = collect(cushared)
# println("exp true;")
# println("got $(xx |> issorted); max error: $(1_000_000_000maximum(xx .- sorted_shared))")
# push!(res, t)
end
res
end
@time measure_gpu_sort(shared)
# 6.457073086
# 2.774147852
# 2.771599214
# 2.770980271
# 2.778133025
# 2.769555927
# 2.799603755
# 2.774497496
# 2.790657341
# 2.790034242
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 903 | export countmap, cucountmap, cucountmap!
using CUDA
using StatsBase
import StatsBase: countmap
cucountmap(v) = cu(v) |> countmap
# The winner of bencharmks/bencharmks-countmap
function cucountmap!(buffer, v::CuDeviceArray{T, 1, NN2}) where {T, NN2} #(buffer::CuArray{UInt32, 1, NN1}, v::CuArray{T, 1, NN2}) where {NN1, T, NN2}
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x
stride = gridDim().x * blockDim().x
addnum = 1 - Int(typemin(T))
for j = i:stride:length(v)
b = Int(v[j]) + addnum
CUDA.@atomic buffer[b] = buffer[b] + 1
end
return
end
function countmap(v::CuArray{T}; threads = 256, blocks = 1024) where {T}
st = sizeof(T)
@assert st <= 2
buffer = CUDA.zeros(Int, 2^(8st))
CUDA.@sync @cuda threads = threads blocks = blocks cucountmap!(buffer, v)
values = typemin(T) : typemax(T)
Dict(zip(values, collect(buffer)))
end
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 2072 | export gpuradixsort, gpuradixsort!
using CUDA
# CUDA.allowscalar(false)
const RADIX_SIZE = 11
const RADIX_MASK = UInt16(2^RADIX_SIZE-1)
# The winner of bencharmks/bencharmks-countmap
function radixhist!(buffer, v, number_of_radix)
i = (blockIdx().x - 1) * blockDim().x + threadIdx().x
stride = gridDim().x * blockDim().x
local_buffer = CUDA.zeros(2^RADIX_SIZE, number_of_radix)
for k in 1:number_of_radix
for j = i:stride:length(v)
bits_to_shift = (k-1)*RADIX_SIZE
vj_shifted = v[j] >> bits_to_shift
b = Int(vj_shifted & RADIX_MASK) + 1
local_buffer[b, k] += 1
end
end
for k in 1:number_of_radix
for i in 1:2^RADIX_SIZE
CUDA.@atomic buffer[i, k] = buffer[i, k] + local_buffer[i, k]
end
end
return
end
# count
function radixhist(v::CuArray{T}; threads = 256, blocks = 1024) where {T}
number_of_radix = ceil(Int, 8sizeof(T) / RADIX_SIZE)
buffer = CUDA.zeros(Int, 2^RADIX_SIZE, number_of_radix)
CUDA.@sync @cuda threads = threads blocks = blocks radixhist!(buffer, v, number_of_radix)
for i in 1:number_of_radix
buffer[:, i] = cumsum(buffer[:, i])
end
buffer
end
gpuradixsort(v) = gpuradixsort!(copy(v))
function gpuradixsort!(vs::AbstractVector{T}) where T
bin = collect(radixhist(cu(vs)))
ts = similar(vs)
# use the histogram to sort the data
hi = length(vs)
lo = 1
len = hi-lo+1
iters = ceil(Int, 8sizeof(T) / RADIX_SIZE)
for j = 1:iters
# Unroll first data iteration, check for degenerate case
v = vs[hi]
idx = Int((v >> ((j-1)*RADIX_SIZE)) & RADIX_MASK) + 1
cbin = bin[:,j]
ci = cbin[idx]
ts[ci] = vs[hi]
cbin[idx] -= 1
# Finish the loop...
@inbounds for i in hi-1:-1:lo
v = vs[i]
idx = Int((v >> ((j-1)*RADIX_SIZE)) & RADIX_MASK) + 1
ci = cbin[idx]
ts[ci] = vs[i]
cbin[idx] -= 1
end
vs,ts = ts,vs
end
vs
end
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | code | 465 | # using Pkg
# Pkg.activate("c:/git/CuCountmap")
using CuCountMap
using SortingAlgorithms
using Test
@testset "CuCountMap.jl - countmap" begin
v = rand(UInt16, 1_000_000);
@time a = cucountmap(v);
@time aa = countmap(v);
@test a == aa
end
# @testset "CuCountMap.jl - sort" begin
# v = rand(UInt64, 1_000_000);
# @time sorted_v = gpuradixsort(v);
# @time sorted_v_cpu = sort(v, alg=RadixSort);
# @test sorted_v == sorted_v_cpu
# end
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | docs | 1272 | ## CuCountmap
`cucountmap` is a faster `countmap` equivalent utilizing CUDA.jl for `Vector{T}` where `isbits(T)` and `sizeof(T) <= 2`.
### Usage
```julia
using CuCountMap
v = rand(Int16, 1_000_000)
cucountmap(v) # converts v to cu(v) and then run countmap
using CUDA: cu
cuv = cu(v)
countmap(cuv) # StatsBase.countmap is overloaded for CuArrays
```
### Example & Benchmarks
```julia
using CUDA
using CuCountMap
using StatsBase: countmap
v = rand(Int16, 10_000_000);
using BenchmarkTools
cpu_to_gpu_benchmark = @benchmark gpu_countmap = cucountmap($v)
```
```julia
cpu_to_cpu_benchmark = @benchmark cpu_countmap = countmap($v)
```
```julia
cuv = CUDA.cu(v)
gpu_to_gpu_benchmark = @benchmark gpu_countmap2 = countmap(cuv)
```
#### Benchmark Plot
```julia
using Plots
using Statistics: mean
cpu_to_gpu = mean(cpu_to_gpu_benchmark.times)/1000/1000
gpu_to_gpu = mean(gpu_to_gpu_benchmark.times)/1000/1000
cpu_to_cpu = mean(cpu_to_cpu_benchmark.times)/1000/1000
plot(
["CPU Array on CPU \n countmap(v)", "convert CPU Array to GPU array on GPU \n cucountmap(cu(v))", "GPU array on GPU \n cucountmap(cuv)"],
[cpu_to_cpu, cpu_to_gpu, gpu_to_gpu],
seriestypes = :bar, title="CuCountMap.cucountmap vs StatsBase.countmap", label="ms",
legendtitle="Mean time")
```
| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.1.2 | cb769e3bbecbc06c4acc615dcb37410d0aa64715 | docs | 3318 | ## CuCountmap
`cucountmap` is a faster `countmap` equivalent utilizing CUDA.jl for `Vector{T}` where `isbits(T)` and `sizeof(T) <= 2`.
### Usage
```julia
using CuCountMap
v = rand(Int16, 1_000_000)
cucountmap(v) # converts v to cu(v) and then run countmap
using CUDA: cu
cuv = cu(v)
countmap(cuv) # StatsBase.countmap is overloaded for CuArrays
```
```
Dict{Int16, Int64} with 65536 entries:
-23731 => 18
29965 => 13
30270 => 14
1703 => 17
7685 => 16
-7029 => 16
3406 => 12
-30706 => 18
28804 => 10
27640 => 14
-17985 => 18
-28261 => 12
-2851 => 19
2015 => 12
-25023 => 15
31375 => 16
-13631 => 15
-8219 => 19
28165 => 14
⋮ => ⋮
```
### Example & Benchmarks
```julia
using CUDA
using CuCountMap
using StatsBase: countmap
v = rand(Int16, 10_000_000);
using BenchmarkTools
cpu_to_gpu_benchmark = @benchmark gpu_countmap = cucountmap($v)
```
```
BenchmarkTools.Trial: 954 samples with 1 evaluation.
Range (min … max): 4.374 ms … 13.528 ms ┊ GC (min … max): 0.00% … 52.22%
Time (median): 4.814 ms ┊ GC (median): 0.00%
Time (mean ± σ): 5.151 ms ± 1.145 ms ┊ GC (mean ± σ): 3.24% ± 8.90%
██
▅██▇▇▇▆▅▄▄▄▄▃▃▃▃▃▂▂▂▂▁▂▂▂▁▁▁▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▂▂▂▂▂▂ ▃
4.37 ms Histogram: frequency by time 11.3 ms <
Memory estimate: 2.38 MiB, allocs estimate: 103.
```
```julia
cpu_to_cpu_benchmark = @benchmark cpu_countmap = countmap($v)
```
```
BenchmarkTools.Trial: 291 samples with 1 evaluation.
Range (min … max): 15.864 ms … 26.632 ms ┊ GC (min … max): 0.00% … 18.12
%
Time (median): 16.843 ms ┊ GC (median): 0.00%
Time (mean ± σ): 17.195 ms ± 1.444 ms ┊ GC (mean ± σ): 1.72% ± 5.17
%
▃ ▅██▆▄▁
▃█▇▆████████▇▇▄▃▃▂▂▂▂▁▁▁▁▂▁▂▁▂▂▁▁▁▁▂▂▁▂▁▄▁▁▁▂▁▁▂▁▂▂▁▁▁▄▁▃▁▂ ▃
15.9 ms Histogram: frequency by time 22.6 ms <
Memory estimate: 4.17 MiB, allocs estimate: 37.
```
```julia
cuv = CUDA.cu(v)
gpu_to_gpu_benchmark = @benchmark gpu_countmap2 = countmap(cuv)
```
```
BenchmarkTools.Trial: 2242 samples with 1 evaluation.
Range (min … max): 1.799 ms … 9.377 ms ┊ GC (min … max): 0.00% … 73.02
%
Time (median): 1.995 ms ┊ GC (median): 0.00%
Time (mean ± σ): 2.150 ms ± 643.115 μs ┊ GC (mean ± σ): 3.50% ± 8.43
%
█▃
▄███▄▅▅▄▄▃▃▂▂▂▂▂▂▂▁▁▂▂▁▁▁▂▁▁▁▁▁▁▂▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▂▂▂▂▂ ▂
1.8 ms Histogram: frequency by time 5.82 ms <
Memory estimate: 2.38 MiB, allocs estimate: 134.
```
#### Benchmark Plot
```julia
using Plots
using Statistics: mean
cpu_to_gpu = mean(cpu_to_gpu_benchmark.times)/1000/1000
gpu_to_gpu = mean(gpu_to_gpu_benchmark.times)/1000/1000
cpu_to_cpu = mean(cpu_to_cpu_benchmark.times)/1000/1000
plot(
["CPU Array on CPU \n countmap(v)", "convert CPU Array to GPU array on GPU \n cucountmap(cu(v))", "GPU array on GPU \n cucountmap(cuv)"],
[cpu_to_cpu, cpu_to_gpu, gpu_to_gpu],
seriestypes = :bar, title="CuCountMap.cucountmap vs StatsBase.countmap", label="ms",
legendtitle="Mean time")
```
```
[ Info: Precompiling Plots [91a5bcdd-55d7-5caf-9e0b-520d859cae80]
```

| CuCountMap | https://github.com/xiaodaigh/CuCountMap.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 900 | using AbstractPermutations
using Documenter
DocMeta.setdocmeta!(
AbstractPermutations,
:DocTestSetup,
:(using AbstractPermutations);
recursive = true,
)
makedocs(;
modules = [AbstractPermutations],
authors = "Marek Kaluba <[email protected]>",
repo = Documenter.Remotes.GitHub("kalmarek", "AbstractPermutations.jl"),
sitename = "AbstractPermutations.jl",
format = Documenter.HTML(;
prettyurls = get(ENV, "CI", "false") == "true",
canonical = "https://kalmarek.github.io/AbstractPermutations.jl",
edit_link = "main",
assets = String[],
),
pages = [
"Home" => "index.md",
"`AbstractPermutation` interface" => "abstract_api.md",
"Other functions" => "misc.md",
],
warnonly = [:missing_docs],
)
deploydocs(;
repo = "github.com/kalmarek/AbstractPermutations.jl",
devbranch = "main",
)
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 329 | module AbstractPermutations
import GroupsCore
import GroupsCore: GroupElement, order, InterfaceNotImplemented # only these two are extended
include("abstract_perm.jl")
include("cycle_decomposition.jl")
include("io.jl")
include("arithmetic.jl")
include("perm_functionality.jl")
include("ordering.jl")
include("parsing.jl")
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 6225 | """
AbstractPermutation
Abstract type representing bijections of positive integers `ℕ = {1,2,…}`
finitely supported. That is, we treat permutations as functions `ℕ → ℕ` such
that for every permutation `σ` there are only finitely many `k` different from
their image under `σ`.
# Mandatory interface
Subtypes `APerm <: AbstractPermutation` must implement the following functions:
* `APerm(images::AbstractVector{<:Integer}[; check::Bool=true])` - a
constructor of a `APerm` from a vector of images. Optionally the keyword
argument `check` may be set to `false` when the caller knows that `images`
constitute a honest permutation.
* [`Base.:^(i::Integer, σ::APerm)`](@ref ^(::Integer, ::AbstractPermutation))
the customary notation for the image of `i` under `σ`.
* [`degree(σ::APerm)`](@ref degree) the minimal `d ≥ 0` such that `σ` fixes all
`k ≥ d`.
!!! note
There is no formal requirement that the `APerm(images)` constructor actually
returns a `APerm`. Any `AbstractPermutation` object would do. This may be
useful if constructing permutation from images is not technically feasible.
!!! note
If `APerm` is not constructable from type one needs to implement `one(::APerm)`.
!!! warn
Even though `AbstractPermutation <: GroupsCore.GroupElement` they don't
necessarily implement the whole of `GroupElement` interface, e.g. it is
possible to implement `parent`-less permutations.
# Optional interface
* [`perm(σ::APerm)`](@ref perm) by default returns `σ` - the "simplest"
(implementation-wise) permutation underlying `σ`.
* [`inttype(::Type{<:APerm})`](@ref inttype) by default returns `UInt32`.
* [`__unsafe_image(i::Integer, σ::APerm)`](@ref __unsafe_image) defaults to `i^σ`.
"""
abstract type AbstractPermutation <: GroupsCore.GroupElement end
"""
degree(σ::AbstractPermutation)
Return a minimal number `n ≥ 0` such that `k^σ == k` for all `k > n`.
Such number `n` can be understood as a _degree_ of a permutation, since we can
regard `σ` as an element of `Sym(n)` (and not of `Sym(n-1)`).
!!! note
By this convention `degree` of the identity permutation is equal to `0`
and it is the only permutation with this property.
Also by this convention there is no permutation with `degree` equal to `1`.
"""
function degree(σ::AbstractPermutation)
throw(
GroupsCore.InterfaceNotImplemented(
:AbstractPermutation,
"AbstractPermutations.degree(::$(typeof(σ)))",
),
)
end
"""
^(i::Integer, σ::AbstractPermutation)
Return the image of `i` under `σ` preserving the type of `i`.
We consider `σ` as a permutation of `ℕ` (the positive integers), with finite
support, so `k^σ = k` for all `k > degree(σ)`.
!!! warn
The behaviour of `i^σ` for `i ≤ 0` is undefined and can not be relied upon.
"""
function Base.:^(::Integer, σ::AbstractPermutation)
throw(
GroupsCore.InterfaceNotImplemented(
:AbstractPermutation,
"Base.:^(::Integer, ::$(typeof(σ)))",
),
)
end
"""
__unsafe_image(i::Integer, σ::AbstractPermutation)
The same as `i^σ`, but assuming that `i ∈ Base.OneTo(degree(σ))`.
!!! warn
The caller is responsible for checking the assumption.
Failure to do so may (and probably will) lead to segfaults in the best
case scenario and to silent data corruption in the worst!.
"""
__unsafe_image(i::Integer, σ::AbstractPermutation) = i^σ
"""
perm(p::AbstractPermutation)
Return the "bare-metal" permutation (unwrap). Return `σ` by default.
!!! warn
**For internal use only.**
Provide access to wrapped permutation object. For "bare-metal" permutations this
method needs to return the identical (i.e. ``===`) object.
The intention of this method is to provide an un-wrapped permutations to
computationally intensive algorithms, so that the external wrappers (if present)
do not hinder the performance.
"""
perm(p::AbstractPermutation) = p
"""
inttype(σ::Type{<:AbstractPermutation})
Return the underlying "storage" integer.
!!! warn
**For internal use only.**
The intention is to provide optimal storage type when the `images` vector
constructor is used (to save allocations and memory copy).
For example a hypothetic permutation `Perm8` of elements up to length `255`
may alter the default to `UInt8`.
The default is `UInt32`.
"""
inttype(::Type{P}) where {P<:AbstractPermutation} = UInt32
function inttype(σ::AbstractPermutation)
τ = perm(σ)
return τ === σ ? inttype(typeof(σ)) : inttype(τ)
end
# utilities for Abstract Permutations
function __images_vector(p::AbstractPermutation)
img = let ^ = __unsafe_image
inttype(p)[i^p for i in Base.OneTo(degree(p))]
end
return img
end
function Base.convert(
::Type{P},
p::AbstractPermutation,
) where {P<:AbstractPermutation}
return P(__images_vector(p); check = false)
end
Base.convert(::Type{P}, p::P) where {P<:AbstractPermutation} = p
function Base.one(::Type{P}) where {P<:AbstractPermutation}
return P(inttype(P)[]; check = false)
end
Base.one(σ::AbstractPermutation) = one(typeof(σ))
Base.isone(σ::AbstractPermutation) = degree(σ) == 0
function _copy_by_images(p::AbstractPermutation)
return typeof(p)(__images_vector(p); check = false)
end
Base.copy(p::AbstractPermutation) = _copy_by_images(p)
function Base.:(==)(σ::AbstractPermutation, τ::AbstractPermutation)
degree(σ) ≠ degree(τ) && return false
deg = degree(σ)
deg < 2 && return true
let ^ = __unsafe_image
ans = true
k = ifelse(ispow2(deg), deg, prevpow(2, deg))
for i in Base.OneTo(k)
ans &= i^σ == i^τ
end
ans || return false
@simd for i in (k+1):degree(σ)
i^σ != i^τ && return false
end
end
return true
end
function Base.hash(σ::AbstractPermutation, h::UInt)
h = hash(AbstractPermutation, h)
h = let ^ = __unsafe_image
foldl((h, i) -> hash(i^σ, h), Base.OneTo(degree(σ)); init = h)
end
return h
end
Base.broadcastable(p::AbstractPermutation) = Ref(p)
"""
cycles(g::AbstractPermutation)
Return an iterator over cycles in the disjoint cycle decomposition of `g`.
"""
cycles(σ::AbstractPermutation) = CycleDecomposition(σ)
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 4949 | function Base.inv(σ::AbstractPermutation)
img = Vector{inttype(σ)}(undef, degree(σ))
let ^ = __unsafe_image
for i in Base.OneTo(degree(σ))
k = i^σ
@inbounds img[k] = i
end
end
return typeof(σ)(img; check = false)
end
function Base.:(*)(σ::AbstractPermutation, τ::AbstractPermutation)
img = Vector{inttype(σ)}(undef, max(degree(σ), degree(τ)))
let ^ = __unsafe_image
if degree(σ) ≤ degree(τ)
for i in Base.OneTo(degree(σ))
k = (i^σ)^τ
@inbounds img[i] = k
end
for i in degree(σ)+1:degree(τ)
k = i^τ
@inbounds img[i] = k
end
else # degree(σ) > degree(τ)
for i in Base.OneTo(degree(σ))
k = i^σ
if k ≤ degree(τ)
k = k^τ
end
@inbounds img[i] = k
end
end
end
return typeof(σ)(img; check = false)
end
function Base.:(*)(
σ::AbstractPermutation,
τ::AbstractPermutation,
ρ::AbstractPermutation,
)
degσ, degτ, degρ = degree(σ), degree(τ), degree(ρ)
deg = max(degσ, degτ, degρ)
img = Vector{inttype(σ)}(undef, deg)
let ^ = __unsafe_image
if degσ ≤ degτ ≤ degρ
for i in Base.OneTo(degσ)
k = ((i^σ)^τ)^ρ
@inbounds img[i] = k
end
for i in degσ+1:degτ
k = (i^τ)^ρ
@inbounds img[i] = k
end
for i in degτ+1:degρ
k = i^ρ
@inbounds img[i] = k
end
elseif degσ ≤ degτ # either degσ ≤ degρ < degτ OR degρ < degσ ≤ dτ
for i in Base.OneTo(degσ)
k = (i^σ)^τ
if k ≤ degρ
k = k^ρ
end
@inbounds img[i] = k
end
for i in (degσ+1):degτ
k = i^τ
if k ≤ degρ
k = k^ρ
end
@inbounds img[i] = k
end
elseif degτ < degσ ≤ degρ
for i in Base.OneTo(degσ)
k = i^σ
if k ≤ degτ
k = k^τ
end
k = k^ρ
@inbounds img[i] = k
end
for i in degσ+1:degρ
k = i^ρ
@inbounds img[i] = k
end
elseif degτ < degσ # either degτ ≤ degρ < degσ OR degρ < degτ < degσ
for i in Base.OneTo(degσ)
k = i^σ
if k ≤ degτ
k = k^τ
end
if k ≤ degρ
k = k^ρ
end
@inbounds img[i] = k
end
end
end
return typeof(σ)(img; check = false)
end
function Base.:(*)(σ::AbstractPermutation, τs::AbstractPermutation...)
isempty(τs) && return σ
deg = max(degree(σ), maximum(degree, τs))
img = Vector{inttype(σ)}(undef, deg)
for i in Base.OneTo(deg)
j = (i^σ)
for τ in τs
j = j^τ
end
@inbounds img[i] = j
end
return typeof(σ)(img; check = false)
end
function Base.:^(σ::AbstractPermutation, τ::AbstractPermutation)
deg = max(degree(σ), degree(τ))
img = Vector{inttype(σ)}(undef, deg)
for i in Base.OneTo(deg)
img[i^τ] = (i^σ)^τ
end
P = typeof(σ)
return P(img; check = false)
end
function Base.:^(σ::AbstractPermutation, n::Integer)
if n == 0 || isone(σ)
return one(σ)
elseif n == -1
return inv(σ)
elseif n == 1
return copy(σ)
elseif n < 0
return inv(σ)^-n
elseif n == 2
return σ * σ
elseif n == 3
return σ * σ * σ
elseif n == 4
σ² = σ * σ
return σ² * σ²
elseif n == 5
σ² = σ * σ
return σ² * σ² * σ
elseif n == 6
σ³ = σ * σ * σ
return σ³ * σ³
elseif n == 7
σ³ = σ * σ * σ
return σ³ * σ³ * σ
elseif n == 8
σ² = σ * σ
σ⁴ = σ² * σ²
return σ⁴ * σ⁴
elseif degree(σ) ≤ 64 || 2count_ones(n) > log2(degree(σ))
power_by_cycles(σ, n)
else
Base.power_by_squaring(σ, n)
end
end
function power_by_cycles(σ::AbstractPermutation, n::Integer)
if n == 0 || isone(σ)
return one(σ)
elseif n == -1
return inv(σ)
elseif n == 1
return copy(σ)
elseif n < 0
return power_by_cycles(inv(σ), -n)
else
img = Vector{inttype(σ)}(undef, degree(σ))
@inbounds for cycle in cycles(σ)
l = length(cycle)
k = n % l
for (idx, j) in enumerate(cycle)
idx += k
idx = ifelse(idx > l, idx - l, idx)
img[j] = cycle[idx]
end
end
return typeof(σ)(img; check = false)
end
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 1866 | struct CycleDecomposition{T<:Integer}
cycles::Vector{T} # cycles, concatenated
cycles_ptrs::Vector{T} # pointers to the starts of the cycles
end
Base.length(cd::CycleDecomposition) = length(cd.cycles_ptrs) - 1
function Base.eltype(::Type{CycleDecomposition{T}}) where {T}
return SubArray{T,1,Vector{T},Tuple{UnitRange{Int64}},true}
end
function Base.iterate(cd::CycleDecomposition, state = 1)
state == length(cd.cycles_ptrs) && return nothing
from = cd.cycles_ptrs[state]
to = cd.cycles_ptrs[state+1] - 1
return @inbounds @view(cd.cycles[from:to]), state + 1
end
function Base.show(io::IO, cd::CycleDecomposition)
print(io, "Cycle Decomposition: ")
for c in cd
print(io, '(')
join(io, c, ',')
print(io, ')')
end
end
function CycleDecomposition(σ::AbstractPermutation)
T = inttype(σ)
deg = degree(σ)
# allocate vectors of the expected size
cycles = Vector{T}(undef, deg)
visited = falses(deg)
# the upper bound for the number of cycles
cyclesptr = zeros(T, deg + 1)
cptr_idx = 1
cidx = 0
cyclesptr[cptr_idx] = cidx + 1
let ^ = __unsafe_image
for idx in Base.OneTo(deg)
@inbounds visited[idx] && continue
first_pt = idx
cidx += 1
@inbounds cycles[cidx] = first_pt
@inbounds visited[first_pt] = true
next_pt = first_pt^σ
while next_pt ≠ first_pt
cidx += 1
@inbounds cycles[cidx] = next_pt
@inbounds visited[next_pt] = true
next_pt = next_pt^σ
end
cptr_idx += 1 # we finished the cycle
@inbounds cyclesptr[cptr_idx] = cidx + 1
end
end
resize!(cycles, cidx)
resize!(cyclesptr, cptr_idx)
return CycleDecomposition{T}(cycles, cyclesptr)
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 1622 | # IO
function Base.show(io::IO, ::MIME"text/plain", g::AbstractPermutation)
r, c = displaysize(io)
ioc = IOContext(
io,
:available_width =>
get(io, :limit, false) ? (r ÷ 2) * (c - 5) : typemax(Int),
)
k = __print_perm(ioc, g)
if !iszero(k)
print(ioc, '\n', lpad("[output truncated]", c - 5))
end
end
function Base.show(io::IO, g::AbstractPermutation)
_, c = displaysize(io)
ioc = IOContext(
io,
:available_width => get(io, :limit, false) ? c - 5 : typemax(Int),
)
return __print_perm(ioc, g)
end
function __print_perm(io::IOContext, p::AbstractPermutation;)
available_width = get(io, :available_width, typemax(Int))
limit = get(io, :limit, false)
compact = get(io, :compact, false)
if !(get(io, :typeinfo, Nothing) <: AbstractPermutation) &&
!(limit || compact)
str = sprint(show, typeof(p))
print(io, str, " ")
available_width -= length(str) + 1
end
if isone(p)
print(io, "()")
else
for (i, c) in enumerate(cycles(p))
trunc, available_width = __print_cycle(io, c, available_width)
trunc && return i
end
end
return 0
end
function __print_cycle(io::IO, cycle, available_width)
length(cycle) == 1 && return false, available_width
str = join(cycle, ',')
truncated = length(str) + 2 > available_width
if truncated
print(io, '(', SubString(str, 1, available_width - 5), " … )")
else
print(io, '(', str, ')')
end
return truncated, available_width - (length(str) + 2)
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 1655 | function __unsafe_lex_compare(
p::AbstractPermutation,
q::AbstractPermutation,
deg,
)
let ^ = __unsafe_image
for i in Base.OneTo(deg)
ip = i^p
iq = i^q
if ip < iq
return true
elseif ip > iq
return false
end
end
end
end
"""
Lex <: Base.Order.Ordering
Lexicographical ordering of permutations.
The comparison of permutations `σ` and `τ` in Lexicographical ordering returns
`true` when there exists `k ≥ 1` such that
* `i^σ == i^τ` for all `i < k` and
* `k^σ < k^τ`
and `false` otherwise.
The method `isless(σ::AbstractPermutation, τ::AbstractPermutation)` defaults to
the lexicographical order, i.e. calling `Base.lt(Lex(), σ, τ)`.
See also [`DegLex`](@ref).
"""
struct Lex <: Base.Order.Ordering end
"""
DegLex <: Base.Order.Ordering
Degree-then-lexicographical ordering of permutations.
The comparison of `σ` and `τ` is made by comparing [`degree`s](@ref degree)
first, and by the [lexicographical ordering](@ref Lex) among permutations
of the same `degree`.
See also [`Lex`](@ref).
"""
struct DegLex <: Base.Order.Ordering end
function Base.isless(p::AbstractPermutation, q::AbstractPermutation)
return Base.lt(Lex(), p, q)
end
function Base.lt(::Lex, p::AbstractPermutation, q::AbstractPermutation)
res = __unsafe_lex_compare(p, q, min(degree(p), degree(q)))
return something(res, degree(p) < degree(q))
end
function Base.lt(::DegLex, p::AbstractPermutation, q::AbstractPermutation)
degree(p) < degree(q) && return true
return something(__unsafe_lex_compare(p, q, degree(p)), false)
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 1964 | function _parse_cycles(str::AbstractString)
cycles = Vector{Vector{Int}}()
if occursin(r"\d\s+\d", str)
throw(ArgumentError("parse string as cycles: spaces between digits"))
end
str = replace(str, r"\s+" => "")
str = replace(str, "()" => "")
cycle_regex = r"\(\d+(,\d+)*\)?"
parsed_size = 0
for m in eachmatch(cycle_regex, str)
cycle_str = m.match
parsed_size += sizeof(cycle_str)
cycle = [parse(Int, a) for a in split(cycle_str[2:end-1], ",")]
push!(cycles, cycle)
end
if parsed_size != sizeof(str)
throw(
ArgumentError(
"parse string as cycles: parsed size differs from string",
),
)
end
return cycles
end
function Base.parse(
::Type{P},
str::AbstractString,
) where {P<:AbstractPermutation}
cycles = _parse_cycles(str)
deg = mapreduce(
c -> length(c) > 1 ? maximum(c) : convert(eltype(c), (1)),
max,
cycles;
init = 1,
)
images = Vector{inttype(P)}(undef, deg)
for idx in Base.OneTo(deg)
k = idx
for cycle in cycles
length(cycle) == 1 && continue
i = findfirst(==(k), cycle)
k = isnothing(i) ? k : cycle[mod1(i + 1, length(cycle))]
end
images[idx] = k
end
return P(images; check = true)
end
"""
@perm P cycles_string
Macro to parse cycles decomposition as a string into a permutation of type `P`.
Strings for the output of e.g. GAP could be copied directly into `@perm`, as long as
they are not elided. Cycles of length `1` are not necessary, but can be included.
# Examples:
Using the exemplary implementation from `test/perms_by_images.jl`
```julia
julia> p = @perm Perm{UInt16} "(1,3)(2,4)"
(1,3)(2,4)
julia> typeof(p)
Perm{UInt16}
julia> q = @perm Perm "(1,3)(2,4)(3,5)(8)"
(1,5,3)(2,4)
```
"""
macro perm(type, str)
return :(Base.parse($(esc(type)), $str))
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 2961 | """
isodd(g::AbstractPermutation) -> Bool
Return `true` if g is an odd permutation and `false` otherwise.
An odd permutation decomposes into an odd number of transpositions.
"""
Base.isodd(σ::AbstractPermutation) = __isodd(σ)
Base.isodd(cd::CycleDecomposition) = isodd(count(iseven ∘ length, cd))
"""
isodd(g::AbstractPermutation) -> Bool
Return `true` if g is an even permutation and `false` otherwise.
An even permutation decomposes into an even number of transpositions.
"""
Base.iseven(σ::AbstractPermutation) = !isodd(σ)
Base.iseven(cd::CycleDecomposition) = !isodd(cd)
function __isodd(σ::AbstractPermutation)
to_visit = trues(degree(σ))
parity = false
k = 1
@inbounds while any(to_visit)
k = findnext(to_visit, k)
to_visit[k] = false
next = k^σ
while next != k
parity = !parity
to_visit[next] = false
next = next^σ
end
end
return parity
end
"""
sign(g::AbstractPermutation)
Return the sign of a permutation as an integer `±1`.
`sign` represents the homomorphism from the permutation group to the unit group
of `ℤ` whose kernel is the alternating group.
"""
Base.sign(σ::AbstractPermutation) = ifelse(isodd(σ), -1, 1)
"""
permtype(g::AbstractPermutation)
Return the group-theoretic type of permutation `g`, i.e. the vector of lengths
of cycles in the (disjoint) cycle decomposition of `g`.
The lengths are sorted in decreasing order and cycles of length `1` are
omitted. `permtype(g)` fully determines the conjugacy class of `g` in the full
symmetric group.
"""
function permtype(σ::AbstractPermutation)
return sort!([length(c) for c in cycles(σ) if length(c) > 1]; rev = true)
end
"""
firstmoved(g::AbstractPermutation, range)
Return the first point from `range` that is moved by `g`, or `nothing`
if `g` fixes `range` point-wise.
"""
function firstmoved(σ::AbstractPermutation, range)
all(>(degree(σ)), range) && return nothing
for i in range
if i^σ ≠ i
return i
end
end
return nothing
end
"""
fixedpoints(g::AbstractPermutation, range)
Return the vector of points in `range` fixed by `g`.
"""
function fixedpoints(σ::AbstractPermutation, range)
all(>(degree(σ)), range) && return collect(range)
return [i for i in range if i^σ == i]
end
"""
nfixedpoints(g::AbstractPermutation, range)
Return the number of points in `range` fixed by `g`.
"""
function nfixedpoints(σ::AbstractPermutation, range)
all(>(degree(σ)), range) && return length(range)
return count(i -> i^σ == i, range; init = 0)
end
function GroupsCore.order(::Type{T}, σ::AbstractPermutation) where {T}
isone(σ) && return one(T)
return GroupsCore.order(T, cycles(σ))
end
GroupsCore.order(cd::CycleDecomposition) = GroupsCore.order(BigInt, cd)
function GroupsCore.order(::Type{T}, cd::CycleDecomposition) where {T}
return convert(T, mapreduce(length, lcm, cd; init = 1))
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 6160 | using Test
import AbstractPermutations as AP
function abstract_perm_interface_test(P::Type{<:AP.AbstractPermutation})
@testset "AbstractPermutation API test: $P" begin
@test P([1]) isa AP.AbstractPermutation
try
P([2])
@warn "$P doesn't perform image vector validation, use it with care!"
catch e
if !(e isa ArgumentError)
rethrow(e)
end
end
try
P([1, 2, 3, 1])
@warn "$P doesn't perform image vector validation, use it with care!"
catch e
if !(e isa ArgumentError)
rethrow(e)
end
end
@testset "the identity permutation" begin
id = P([1, 2, 3])
@test isone(id)
@test one(id) isa AP.AbstractPermutation
@test id == one(id)
@test isone(one(id))
@test AP.degree(id) == 0
@test collect(AP.cycles(id)) == Vector{Int}[]
@test all(i -> i^id == i, 1:5)
end
@testset "same permutations" begin
vec = [3, 1, 2, 4]
a = P(vec)
a_ = P(vec[1:3])
@test !isone(a)
@test !isone(a_)
@test AP.degree(a) == 3
@test AP.degree(a_) == 3
@test a == a_
@test a_ == P(vec)
@test hash(a) == hash(a_)
@test length(unique([a, a_])) == 1
@test inv(a) isa AP.AbstractPermutation
@test isone(inv(a) * a)
@test isone(a * inv(a))
@test isone(inv(a_) * a)
@test isone(inv(a) * P(vec))
end
@testset "group arithmetic" begin
a = P([2, 1, 3]) # (1,2)
b = P([2, 3, 1]) # (1,2,3)
c = P([1, 2, 3, 5, 4]) # (4,5)
@test a * b == P([3, 2, 1]) # (1,2)*(1,2,3) == (1,3)
@test b * a == P([1, 3, 2]) # (1,2,3)*(1,2) == (2,3)
@test isone(a * a)
@test isone(b * b * b)
@test a^b == P([1, 3, 2]) # (1,2)^(1,2,3) == (2,3)
@test b^a == P([3, 1, 2]) # (1,2,3)^(1,2) == (1,3,2)
@test *(b) == b
@test b * b == b^2
@test b * b * a * a * b == one(b)
@test b * b * b * b == b^4
@test Set((b * c)^i for i in -12:12) ==
Set([(b * c)^i for i in 0:5])
end
@testset "actions on 1:n" begin
id = P([1]) # ()
a = P([2, 1, 3]) # (1,2)
b = P([2, 3, 1]) # (1,2,3)
c = P([1, 2, 3, 5, 4]) # (4,5)
# correctness of action
@test 1^a == 2
@test 2^a == 1
@test (3:7) .^ a == 3:7
@test (1:5) .^ b == [2, 3, 1, 4, 5]
@test (1:10) .^ id == 1:10
# action preserves type
@test UInt128(1)^a isa UInt128
@test UInt32(100)^a isa UInt32
@test UInt8(100)^id isa UInt8
@test AP.firstmoved(a, 1:AP.degree(a)) == 1
@test AP.firstmoved(b, 2:5) == 2
@test AP.firstmoved(c, 1:3) === nothing
@test AP.firstmoved(c, 1:5) == 4
@test AP.firstmoved(id, 5:10) === nothing
@test AP.nfixedpoints(id, 1:AP.degree(id)) == 0
@test AP.nfixedpoints(b, 1:AP.degree(b)) == 0
@test AP.nfixedpoints(b, 2:5) == 2
@test AP.nfixedpoints(c, 1:AP.degree(c)) == 3
@test AP.nfixedpoints(c, 4:5) == 0
@test AP.fixedpoints(b, 1:AP.degree(b)) == Int[]
@test AP.fixedpoints(b, 2:5) == [4, 5]
@test AP.fixedpoints(c, 1:3) == [1, 2, 3]
@test AP.fixedpoints(c, 2:4) == [2, 3]
@test AP.fixedpoints(id, 5:7) == 5:7
end
@testset "permutation functions" begin
id = P([1]) # ()
a = P([2, 1, 3]) # (1,2)
b = P([2, 3, 1]) # (1,2,3)
c = P([1, 2, 3, 5, 4]) # (4,5)
@test AP.permtype(id) == Int[]
@test AP.permtype(a) == [2]
@test AP.permtype(b) == [3]
@test AP.permtype(b * c) == [3, 2]
@test sign(id) == 1
@test sign(a) == -1
@test sign(b) == 1
@test sign(c) == -1
@test sign(a * b) == -1
@test sign(a * b * c) == 1
@test isodd(id) == false == !iseven(id)
@test isodd(a) == true == !iseven(a)
@test isodd(b) == false == !iseven(b)
@test isodd(a * b) == true == !iseven(a * b)
@test isodd(a * b * c) == false == !iseven(a * b * c)
@test iseven(AP.cycles(id))
@test isodd(AP.cycles(a))
@test iseven(AP.cycles(b))
@test isodd(AP.cycles(a * b))
@test iseven(AP.cycles(a * b * c))
@test AP.order(id) == 1
@test AP.order(a) == 2
@test AP.order(b) == 3
@test AP.order(c) == 2
@test AP.order(b * c) == 6
@test AP.order(a * b) == 2
@test AP.order(a * b * c) == 2
@test collect(AP.cycles(a)) == [[1, 2]]
@test collect(AP.cycles(b)) == [[1, 2, 3]]
@test collect(AP.cycles(a * b)) == [[1, 3], [2]]
@test collect(AP.cycles(b * c)) == [[1, 2, 3], [4, 5]]
end
@testset "io/show, parsing and deepcopy" begin
p = P([1]) # ()
a = P([2, 1, 3]) # (1,2)
b = P([2, 3, 1]) # (1,2,3)
c = P([1, 2, 3, 5, 4]) # (4,5)
@test sprint(show, AP.cycles(b)) == "Cycle Decomposition: (1,2,3)"
@test sprint(show, AP.cycles(b * c)) ==
"Cycle Decomposition: (1,2,3)(4,5)"
@test parse(P, "(1,3)(2,4,6)(3,5)") isa AP.AbstractPermutation
@test parse(P, "(1,3)(2,4,6)(3,5)") == P([5, 4, 1, 6, 3, 2])
x = [c, c]
@test x[1] === x[2]
y = deepcopy(x)
@test x == y
@test x !== y
@test y[1] === y[2]
if !isbitstype(P)
@test y[1] !== x[1]
end
end
end
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 1034 | module APerms
using Test
import AbstractPermutations as AP
export APerm
struct APerm <: AP.AbstractPermutation
images::Vector{Int}
APerm(images; check::Bool = true) = new(images) # no checks :)
end
@testset "Implementing AbstractPermutation interface" begin
@test one(APerm) isa AP.AbstractPermutation
@test_throws AP.InterfaceNotImplemented AP.degree(one(APerm))
function AP.degree(p::APerm)
return something(findlast(i -> p.images[i] ≠ i, eachindex(p.images)), 0)
end
@test AP.degree(one(APerm)) == 0
@test_throws AP.InterfaceNotImplemented 5^one(APerm)
function Base.:^(i::Integer, p::APerm)
return 1 ≤ i ≤ AP.degree(p) ? oftype(i, p.images[i]) : i
end
@test 5^one(APerm) == 5
@test AP.inttype(one(APerm)) == UInt32
# but actually it'd be better to have it as Int64
a = APerm([1, 2, 3])
b = APerm([2, 3, 1])
a * b
k1 = @allocated a * b
AP.inttype(::Type{APerm}) = Int
a * b
k2 = @allocated a * b
@test k2 < k1
end
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 4299 | @testset "ExamplePerms" begin
abstract_perm_interface_test(EP.Perm{UInt16})
abstract_perm_interface_test(EP.CyclePerm{Int})
@testset "io/show methods" begin
id = EP.Perm(Int[1]) # ()
a = EP.Perm([2, 1, 3]) # (1,2)
b = EP.Perm([2, 3, 1]) # (1,2,3)
c = EP.Perm([1, 2, 3, 5, 4]) # (4,5)
replstr(x) = sprint(
(io, x) -> show(
IOContext(io, :limit => true, :displaysize => (10, 80)),
MIME("text/plain"),
x,
),
x,
)
showstr(x) = sprint(
(io, x) -> show(
IOContext(io, :limit => true, :displaysize => (10, 80)),
x,
),
x,
)
showstr_nolimit(x) = sprint(
(io, x) -> show(IOContext(io, :displaysize => (10, 80)), x),
x,
)
tstr = "Main.ExamplePerms.Perm{UInt16}"
mime = MIME"text/plain"()
@test sprint(show, mime, id) == "$tstr ()"
@test sprint(show, mime, a) == "$tstr (1,2)"
@test sprint(show, mime, b) == "$tstr (1,2,3)"
@test sprint(show, mime, c) == "$tstr (4,5)"
@test sprint(show, mime, b * c) == "$tstr (1,2,3)(4,5)"
@test sprint((io, x) -> show(IOContext(io, :compact => true), x), id) ==
"()"
@test sprint((io, x) -> show(IOContext(io, :limit => true), x), id) ==
"()"
@test replstr(id) == "()"
@test replstr(a) == "(1,2)"
@test replstr(b) == "(1,2,3)"
@test replstr(c) == "(4,5)"
@test replstr(b * c) == "(1,2,3)(4,5)"
p = EP.Perm(Random.randperm(64))
q = EP.Perm(Random.randperm(128))
r = EP.Perm(Random.randperm(1256))
@test replstr(r) isa String
@test contains(replstr(r), "[output truncated]")
@test contains(replstr(r), "…")
@test showstr(r) isa String
@test !contains(showstr(r), "[output truncated]")
@test contains(showstr(r), "…")
@test showstr_nolimit(r) isa String
@test !contains(showstr_nolimit(r), "[output truncated]")
@test !contains(showstr_nolimit(r), "…")
end
@testset "optimized definitions for *" begin
# let seed = 1234
@testset "$seed" for seed in (1234,)
Random.seed!(seed)
p = EP.Perm(Random.randperm(64))
q = EP.Perm(Random.randperm(128))
r = EP.Perm(Random.randperm(1256))
@test p * q isa EP.Perm
@test isperm((p * q).images)
@test p * q == EP.Perm([(i^p)^q for i in 1:128])
@test q * p isa EP.Perm
@test isperm((q * p).images)
@test q * p == EP.Perm([(i^q)^p for i in 1:128])
@test p * q * r == p * (q * r)
@test p * r * q == p * (r * q)
@test r * p * q == r * (p * q)
@test q * p * r == q * (p * r)
@test q * r * p == q * (r * p)
@test r * q * p == r * (q * p)
@test p * q * q * p == (p * q) * (q * p)
@test p * q * r * p == (p * q) * (r * p)
c = AP.cycles(r)
if AP.order(c) == 11
@test r == r^12
else
@test r ≠ r^12
end
b = EP.Perm([2, 3, 1]) # (1,2,3)
c = EP.Perm([1, 2, 3, 5, 4]) # (4,5)
@test Set(AP.power_by_cycles(b * c, i) for i in -12:12) ==
Set([(b * c)^i for i in 0:5])
end
end
@testset "ordering" begin
id = EP.Perm(Int[1]) # ()
a = EP.Perm([2, 1, 3]) # (1,2)
b = EP.Perm([2, 3, 1]) # (1,2,3)
c = EP.Perm([1, 2, 3, 5, 4]) # (4,5)
@test !(id < id)
@test id < a
@test id < b
@test id < c
@test a < b
@test b > a
@test c < a
@test c < b
@test !(c < c)
@test !Base.lt(AP.DegLex(), id, id)
@test Base.lt(AP.DegLex(), id, a)
@test Base.lt(AP.DegLex(), id, b)
@test Base.lt(AP.DegLex(), id, c)
@test Base.lt(AP.DegLex(), a, b)
@test !Base.lt(AP.DegLex(), b, a)
@test Base.lt(AP.DegLex(), a, c)
@test Base.lt(AP.DegLex(), b, c)
@test !Base.lt(AP.DegLex(), c, c)
end
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 2077 | @testset "parsing cycles" begin
@test AP._parse_cycles("()") == Int[]
@test AP._parse_cycles("(1)(2)(3)") == [[1], [2], [3]]
@test AP._parse_cycles("(1)(2,3)") == [[1], [2, 3]]
@test AP._parse_cycles("(1)(\n2, 3)") == [[1], [2, 3]]
@test AP._parse_cycles("(3,2,1)(4,5)") == [[3, 2, 1], [4, 5]]
@test_throws ArgumentError AP._parse_cycles("(a,b)")
@test_throws ArgumentError AP._parse_cycles("(1 2)")
s = """
( 1, 22,73,64,78,81, 24 ,89,90,54,51,82,91,53, 18
,38,19,52,44,77,62,95,94,50,43,42,
10,67,87,60,36,12)(2,57,34,88)(3,92,76,17,99,96,30,55,45,41,98)(4,56,59,97,49,
21,15,9,26,86,83,29,27,66,6,58,28,5,68,40,72,7,84,93,39,79,23,46,63,32,61,100,
11)(8,80,71,75,35,14,85,25,20,70,65,16,48,47,37,74,33,13,31,69)
"""
s2 = """
(1,22,73,64,78,81,24,89,90,54,51,82,91,53,18,38,19,52,44,77,62,95,94,50,43,42,\n10,67,87,60,36,12)(2,57,34,88)(3,92,76,17,99,96,30,55,45,41,98)(4,56,59,97,49,\n21,15,9,26,86,83,29,27,66,6,58,28,5,68,40,72,7,84,93,39,79,23,46,63,32,61,100,\n11)(8,80,71,75,35,14,85,25,20,70,65,16,48,47,37,74,33,13,31,69)
"""
@test AP._parse_cycles(s) == AP._parse_cycles(s2)
end
@testset "@perm macro" begin
P = EP.Perm
@test_throws ArgumentError parse(P, "(1,2,3")
@test_throws ArgumentError parse(P, "(1,2,3),(4,5)")
@test_throws ArgumentError parse(P, "(1,2,3),(4 5)")
images = [2, 3, 1]
@test parse(P{UInt8}, "(1,2,3)(5)(10)") == P{UInt8}(images)
@test parse(P{UInt32}, "(1,2,3)(5)(10)") == P{UInt32}(images)
@test AP.@perm(P{UInt16}, "(1,2,3)(5)(10)") isa AP.AbstractPermutation
@test AP.@perm(P{UInt16}, "(1,2,3)(5)(10)") isa P
@test AP.@perm(P{UInt16}, "(1,2,3)(5)(10)") isa P{UInt16}
@test AP.@perm(P{Int8}, "(1,2,3)(5)(10)") isa P{Int8}
@test AP.degree(AP.@perm(P{UInt16}, "(1,2,3)(5)(10)")) == 3
@test AP.@perm(P{UInt16}, "(1,2,3,4,5)") == P([2, 3, 4, 5, 1])
@test AP.@perm(P{UInt16}, "(3,2,1)(4,5)") == P([3, 1, 2, 5, 4])
@test eltype([AP.@perm(P{UInt16}, "(1,2)"), P([2, 3, 4, 5, 1])]) ==
P{UInt16}
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 2990 | module ExamplePerms
import AbstractPermutations as AP
function __degree(images::AbstractVector{<:Integer})
@inbounds for i in lastindex(images):-1:firstindex(images)
images[i] ≠ i && return i
end
return zero(firstindex(images))
end
struct Perm{T<:Integer} <: AP.AbstractPermutation
images::Vector{T}
function Perm{T}(
images::AbstractVector{<:Integer};
check::Bool = true,
) where {T}
Base.require_one_based_indexing(images)
if check && (!isperm(images) || isempty(images))
throw(ArgumentError("images do not constitute a permutation!"))
end
deg = __degree(images)
# we take the ownership of `images`, when possible
if images isa Vector
images = resize!(images, deg)
return new{T}(images) # no copy
else # fallback, copies view into a new vector
return new{T}(@view images[Base.OneTo(deg)])
end
end
end
AP.degree(σ::Perm) = length(σ.images)
function Base.:^(n::Integer, σ::Perm)
return n in eachindex(σ.images) ? oftype(n, @inbounds σ.images[n]) : n
end
# this would be enough; to squeeze more performance we also define:
AP.inttype(::Type{Perm{T}}) where {T} = T
@inline AP.__unsafe_image(n::Integer, σ::Perm) =
oftype(n, @inbounds σ.images[n])
# this is only for our convienience, NOT REQUIRED
function Perm(images::AbstractVector{<:Integer}; check = true)
return Perm{UInt16}(images; check = check)
end
# to make use of lazy-caching of cycle decomposition the following pattern
# could be used:
#=
mutable struct Perm{T<:Integer} <: AP.AbstractPermutation
images::Vector{T}
cycles::AP.CycleDecomposition{T}
# __same__ (hence incomplete) constructor as above
end
function AP.cycles(σ::Perm)
if !isdefined(σ, :cycles)
cdec = AP.CycleDecomposition(σ)
σ.cycles = cdec
end
return σ.cycles
end
function AP.isodd(σ::Perm)
isdefined(σ, :cycles) && return AP.isodd(AP.cycles(σ))
return AP.__isodd(σ)
end
=#
# some other performance overloads that are possible
# Base.copy(σ::Perm) = Perm(copy(σ.images), false)
struct CyclePerm{T} <: AP.AbstractPermutation
cycledec::AP.CycleDecomposition{T}
end
function CyclePerm{T}(
images::AbstractVector{<:Integer};
check::Bool = true,
) where {T}
σ = Perm{T}(images; check = check) # being lazy
return CyclePerm(AP.CycleDecomposition(σ))
end
AP.degree(σ::CyclePerm) = length(σ.cycledec.cycles)
function Base.:^(n::Integer, σ::CyclePerm)
cd = σ.cycledec
k = findfirst(==(n), cd.cycles)
isnothing(k) && return n
idx = searchsortedlast(cd.cycles_ptrs, k)
next = if cd.cycles_ptrs[idx+1] != k + 1
cd.cycles[k+1]
else
cd.cycles[cd.cycles_ptrs[idx]]
end
return oftype(n, next)
end
# this would be enough; for performance we also define those
AP.inttype(::Type{<:CyclePerm{T}}) where {T} = T
AP.cycles(σ::CyclePerm) = σ.cycledec
end # of module Perms
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | code | 699 | using Test
import Random
import AbstractPermutations as AP
import AbstractPermutations
include(joinpath(pkgdir(AbstractPermutations), "test", "abstract_perm_API.jl"))
include(joinpath(pkgdir(AbstractPermutations), "test", "perms_by_images.jl"))
import .ExamplePerms as EP
@testset "AbstractPermutations.jl" begin
include("example_perms_tests.jl")
@testset "incomplete implementation" begin
include("aperm_interface_check.jl")
import .APerms.APerm as APerm
abstract_perm_interface_test(APerm)
@test convert(APerm, EP.Perm([2, 3, 1])) isa APerm
@test convert(APerm, EP.Perm([2, 3, 1])) == EP.Perm([2, 3, 1])
end
include("parsing.jl")
end
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | docs | 2863 | # AbstractPermutations
[](https://kalmarek.github.io/AbstractPermutations.jl/stable/)
[](https://kalmarek.github.io/AbstractPermutations.jl/dev/)
[](https://github.com/kalmarek/AbstractPermutations.jl/actions/workflows/CI.yml)
[](https://codecov.io/gh/kalmarek/AbstractPermutations.jl)
This julia package provide a basis for inteoperability between different implementations of permutations in julia.
The interface is based on four preconditions:
* subtyping `AbstractPermutations.AbstractPermutation`, and
* implementing a constructor from vector of `images`, and
* implementing methods for
* `AbstractPermutations.degree(::AbstractPermutation)` and
* `Base.^(::Integer, ::AbstractPermutation)`,
and two conventions:
* `AbstractPermutations` are finitely supported bijections of `N` (the positive integers)
* `AbstractPermutations` act on `N` from the right (and therefore `(1,2)·(1,2,3) == (1,3)`).
With implementing the interface one receives not only consistent arithmetic **across** different implementations of the interface but also the possibility to run permutation groups algorithms from package following the interface
The packages following `AbstractPermutation` interface:
* [`PermutationGroups.jl`](https://github.com/kalmarek/PermutationGroups.jl)
* [`PermGroups.jl`](https://github.com/jmichel7/PermGroups.jl/) (to be confirmed).
> Note that [`Permutations.jl`](https://github.com/scheinerman/Permutations.jl) **do not** implement the `AbstractPermutations.jl` interface due to the fact that they act on integers **on the left**. See [these](https://github.com/scheinerman/Permutations.jl/issues/42#issuecomment-1826868005) [comments](https://github.com/scheinerman/Permutations.jl/issues/42#issuecomment-1830242636).
## Testing of the interface
We provide test suite for the interface. Example implementations [`ExamplePerms.Perm`](https://github.com/kalmarek/AbstractPermutations.jl/blob/main/test/perms_by_images.jl) can be tested via the following.
```julia
julia> using AbstractPermutations
julia> include(joinpath(pkgdir(AbstractPermutations), "test", "abstract_perm_API.jl"))
abstract_perm_interface_test (generic function with 1 method)
julia> include(joinpath(pkgdir(AbstractPermutations), "test", "perms_by_images.jl")) # include your own implementation
Main.ExamplePerms
julia> import .ExamplePerms
julia> abstract_perm_interface_test(ExamplePerms.Perm{UInt16});
Test Summary: | Pass Total Time
AbstractPermutation API test: Main.ExamplePerms.Perm | 95 95 0.3s
```
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | docs | 3311 | ```@meta
CurrentModule = AbstractPermutations
```
# The `AbstractPermutation` interface
The `AbstractPermutation` interface consists of just three mandatory functions.
Note that none of them is exported, hence it is safe to `import`/`using`
the package without introducing any naming conflicts with other packages.
## Mandatory methods
The three mandatory methods are:
* a constructor,
* `AbstractPermutations.degree` and
* `Base.^`.
!!! note
The meaning of `degree` doesn't have a well established tradition in
mathematics. This is still ok, as long as we define its meaning with care
for precision and use it in a consistent and predictable way.
```@docs
AbstractPermutation
```
```@docs
degree
```
```@docs
^(::Integer, ::AbstractPermutation)
```
## Suplementary methods
Moreover there are three internal, suplementary functions that may be overloaded
by the implementer, if needed (mostly for performance reasons).
```@docs
inttype
perm
__unsafe_image
```
## Example implementation
For an example, very simple implementation of the `AbstractPermutation`
interface you may find in `ExamplePerms` module defined in
[`perms_by_images.jl`](https://github.com/kalmarek/AbstractPermutations.jl/blob/main/test/perms_by_images.jl).
Here we provide an alternative implementation which keeps the internal
storage at fixed length.
### Implementing mandatory methods
```@example APerm
import AbstractPermutations
struct APerm{T} <: AbstractPermutations.AbstractPermutation
images::Vector{T}
degree::Int
function APerm{T}(images::AbstractVector{<:Integer}; check::Bool=true) where T
if check
isperm(images) || throw(ArgumentError("`images` vector is not a permutation"))
end
deg = something(findlast(i->images[i] ≠ i, eachindex(images)), 0)
return new{T}(images, deg)
end
end
nothing # hide
```
Above we defined permutations by storing the vector of their images together
with the computed degree `deg`.
Now we need to implement the remaining two functions which will be simple enough:
```@example APerm
AbstractPermutations.degree(p::APerm) = p.degree
function Base.:^(i::Integer, p::APerm)
deg = AbstractPermutations.degree(p)
# make sure that we return something of the same type as `i`
return 1 ≤ i ≤ deg ? oftype(i, p.images[i]) : i
end
nothing # hide
```
With this the interface is implementation is complete. To test whether the implementation
follows the specification a test suite is provided:
```@example APerm
include(joinpath(pkgdir(AbstractPermutations), "test", "abstract_perm_API.jl"))
abstract_perm_interface_test(APerm{UInt16})
nothing # hide
```
### Suplementary Methods
Since in `APerm{T}` we store images as a `Vector{T}`, to avoid spurious
allocations we may define
```julia
AbstractPermutations.inttype(::Type{APerm{T}}) where T = T
```
There is no need to define `AbstractPermutations.perm` as `APerm` is already
very low level and suitable for high performance code-paths.
Finally to squeeze even more performance one could define `__unsafe_image`
with the same semantics as `n^σ` under the assumption that `n` belongs to
`Base.OneTo(degree(σ))`:
```julia
@inline function AbstractPermutations.__unsafe_image(n::Integer, σ::APerm)
return oftype(n, @inbounds σ.images[n])
end
```
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | docs | 1629 | ```@meta
CurrentModule = AbstractPermutations
```
# AbstractPermutations
The package defines an interface for abstract permutations.
The general assumptions are as follows:
We consider `AbstractPermutations` as bijective self-maps of
``\mathbb{N} = \{1,2,\ldots\}``, i.e. the **positive integers** which are
**finitely supported**. That means that for every permutation
``\sigma \colon \mathbb{N} \to \mathbb{N}`` there are only finitely many
``k\in \mathbb{N}`` such that the value of ``\sigma`` at ``k`` is different
from ``k``.
In practical terms this means that each permutation can be uniquely determined
by inspecting a vector of it's values on set ``\{1, 2, \ldots, n\}`` for some
``n``. By standard mathematical convention we will denote **the image** of
``k`` under ``\sigma`` by ``k^{\sigma}``, to signify that the set of bijections
_acts_ on ``\mathbb{N}``
[**on the right**](https://en.wikipedia.org/wiki/Group_action#Right_group_action).
For the description of the julia interface see the next section.
## The packages following `AbstractPermutation` interface
* [`PermutationGroups.jl`](https://github.com/kalmarek/PermutationGroups.jl)
* [`PermGroups.jl`](https://github.com/jmichel7/PermGroups.jl/) (to be confirmed).
> Note that [`Permutations.jl`](https://github.com/scheinerman/Permutations.jl) **do not** implement the `AbstractPermutations.jl` interface due to the fact that they act on integers **on the left**. See [these](https://github.com/scheinerman/Permutations.jl/issues/42#issuecomment-1826868005) [comments](https://github.com/scheinerman/Permutations.jl/issues/42#issuecomment-1830242636).
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 0.3.1 | 26fcfa76767d3ff545f8362c2da62f550be4d477 | docs | 347 | ```@meta
CurrentModule = AbstractPermutations
```
## Permutation specific functions
```@docs
isodd(::AbstractPermutation)
iseven(::AbstractPermutation)
sign(::AbstractPermutation)
permtype
cycles
Lex
DegLex
```
## Function specific to actions on `1:n`
```@docs
firstmoved
fixedpoints
nfixedpoints
```
## The `@perm` macro
```@docs
@perm
```
| AbstractPermutations | https://github.com/kalmarek/AbstractPermutations.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 804 | using AxisKeys: KeyedArray
using FullNetworkSystems
using InlineStrings: String15, String31
using Documenter
DocMeta.setdocmeta!(FullNetworkSystems, :DocTestSetup, :(using FullNetworkSystems); recursive=true)
makedocs(;
modules=[FullNetworkSystems],
authors="Invenia Technical Computing Corporation",
repo="https://github.com/invenia/FullNetworkSystems.jl/blob/{commit}{path}#{line}",
sitename="FullNetworkSystems.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://invenia.github.io/FullNetworkSystems.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
checkdocs=:exports,
strict=true,
)
deploydocs(;
repo="github.com/invenia/FullNetworkSystems.jl",
devbranch="main",
)
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 1362 | module FullNetworkSystems
using AxisKeys
using Dates
using Dictionaries
using DocStringExtensions
using InlineStrings
using LinearAlgebra
using SparseArrays
export System, SystemDA, SystemRT
export Zone, Generator, Bus, Branch
export Zones, Generators, Buses, Branches
export GeneratorTimeSeries, GeneratorStatus, GeneratorStatusDA, GeneratorStatusRT
export gens_per_zone, branches_by_breakpoints, get_datetimes
export get_zones, get_buses, get_generators, get_branches, get_lines, get_transformers
export get_regulation_requirements, get_operating_reserve_requirements, get_good_utility_requirements
export get_gens_per_bus, get_loads_per_bus, get_incs_per_bus, get_decs_per_bus, get_psls_per_bus
export compute_ptdf, get_ptdf, retrieve_ptdf, compute_lodf, get_lodfs
export get_initial_commitment, get_initial_downtime, get_initial_uptime
export get_increments, get_decrements, get_virtuals, get_price_sensitive_loads
export get_availability, get_must_run
export get_initial_generation, get_loads, get_offer_curve
export get_pmin, get_pmax, get_regulation_min, get_regulation_max
export get_regulation_offers, get_spinning_offers, get_on_supplemental_offers, get_off_supplemental_offers
export get_commitment, get_regulation_commitment
include("system.jl")
include("accessors.jl")
include("block_inv.jl")
include("matrices.jl")
include("deprecated.jl")
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 7499 | """
get_datetimes(system)
Extract datetimes from a `System`.
"""
function get_datetimes(system::System)
# use offer_curve axiskeys because all subtypes of System have offer_curve
return axiskeys(system.generator_time_series.offer_curve, 2)
end
get_zones(system::System) = system.zones
"Returns a `Dictionary` with zonal regulation requirements indexed by zone number."
function get_regulation_requirements(system::System)
return map(system.zones) do zone
zone.regulation
end
end
"Returns a `Dictionary` with zonal operating reserve requirements indexed by zone number."
function get_operating_reserve_requirements(system::System)
return map(system.zones) do zone
zone.operating_reserve
end
end
"Returns a `Dictionary` with zonal good utility practice requirements indexed by zone number."
function get_good_utility_requirements(system::System)
return map(system.zones) do zone
zone.good_utility
end
end
"Returns a `Dictionary` of `Bus` objects in the `System` indexed by bus name."
get_buses(system::System) = system.buses
"Returns a `Dictionary` of `Generator` objects in the `System` indexed by unit code."
get_generators(system::System) = system.generators
"Returns a `Dictionary` of `Branch` objects in the `System` indexed by branch name."
get_branches(system::System) = system.branches
"Returns a `Dictionary` of branches that are not transformers in the `System` indexed by name."
get_lines(system::System) = filter(br -> !br.is_transformer, system.branches)
"Returns a `Dictionary` of transformers in the `System` indexed by name."
get_transformers(system::System) = filter(br -> br.is_transformer, system.branches)
"Returns a `Dictionary` of unit codes at each bus."
get_gens_per_bus(system::System) = system.gens_per_bus
"Returns a `Dictionary` of load names at each bus."
get_loads_per_bus(system::System) = system.loads_per_bus
"Returns the power transfer distribution factor of the system."
get_ptdf(system::System) = system.ptdf
"Returns the power transfer distribution factor of the system. Calculates it if missing from system"
retrieve_ptdf(system::System) = coalesce(get_ptdf(system), compute_ptdf(system))
"Returns a `Dictionary` of the line outage distribution factor matrices for the `System` indexed by contingencies."
get_lodfs(system::System) = system.lodfs
"Returns the generation of the generator at the start of the time period (pu)"
get_initial_generation(system::System) = system.generator_time_series.initial_generation
"Returns time series data of the fixed loads in the system"
get_loads(system::System) = system.loads
"Returns time series data of the generator offer curves"
get_offer_curve(system::System) = system.generator_time_series.offer_curve
"Returns time series data of minimum generator output (pu)"
get_pmin(system::System) = system.generator_time_series.pmin
"Returns time series data of maximum generator output (pu)"
get_pmax(system::System) = system.generator_time_series.pmax
"Returns time series data of minimum generator output in the ancillary services market (pu)"
get_regulation_min(system::System) = system.generator_time_series.regulation_min
"Returns time series data of maximum generator output in the ancillary services market (pu)"
get_regulation_max(system::System) = system.generator_time_series.regulation_max
"Returns time series data of offer prices for ancillary servives regulation reserves (\$ /pu)"
get_regulation_offers(system::System) = system.generator_time_series.regulation_offers
"Returns time series data of offer prices for ancillary servives spinning reserves (\$ /pu)"
get_spinning_offers(system::System) = system.generator_time_series.spinning_offers
"Returns time series data of offer prices for ancillary servives online supplemental reserves (\$ /pu)"
get_on_supplemental_offers(system::System) = system.generator_time_series.on_supplemental_offers
"Returns time series data of offer prices for ancillary servives offline supplemental reserves (\$ /pu)"
get_off_supplemental_offers(system::System) = system.generator_time_series.off_supplemental_offers
"Returns a flag indicating whether each generator was on at the start of the day."
function get_initial_commitment(system::SystemDA)
return map(system.generator_time_series.initial_generation) do i
i == 0.0 ? false : true
end
end
"Returns the number of hours each generator was on at the start of the day."
function get_initial_uptime(system::SystemDA)
return system.generator_status.hours_at_status .* get_initial_commitment(system)
end
"Returns the number of hours each generator was off at the start of the day."
function get_initial_downtime(system::SystemDA)
return system.generator_status.hours_at_status .* .!get_initial_commitment(system)
end
"Returns a `Dictionary` of increment bids at each bus."
get_incs_per_bus(system::SystemDA) = system.incs_per_bus
"Returns a `Dictionary` of decrement bids at each bus."
get_decs_per_bus(system::SystemDA) = system.decs_per_bus
"Returns a `Dictionary` of price sensitive load bids at each bus."
get_psls_per_bus(system::SystemDA) = system.psls_per_bus
"Returns time series data of increment bids."
get_increments(system::SystemDA) = system.increments
"Returns time series data of decrement bids."
get_decrements(system::SystemDA) = system.decrements
"Returns time series data of price sensitive load bids."
get_price_sensitive_loads(system::SystemDA) = system.price_sensitive_loads
"Returns time series data of both increment bids and decrement bids."
get_virtuals(system::SystemDA) = vcat(system.increments, system.decrements)
"Returns time series data of flags indicating if the generator is available to be committed in each hour"
get_availability(system::SystemDA) = system.generator_status.availability
"Returns time series data of flags indicating if the generator must be committed in each hour"
get_must_run(system::SystemDA) = system.generator_status.must_run
"Returns time series data of generator commitment status in each hour"
get_commitment(system::SystemRT) = system.generator_status.commitment
"Returns time series data of generator regulation commitment status in each hour"
get_regulation_commitment(system::SystemRT) = system.generator_status.regulation_commitment
"""
gens_per_zone(system::System)
Returns a `Dict` with keys of `Zone` numbers and values of generator names in that zone.
"""
function gens_per_zone(system::System)
gens_per_zone = Dict{Int, Vector{Int}}()
for gen in system.generators
if haskey(gens_per_zone, gen.zone)
push!(gens_per_zone[gen.zone], gen.unit_code)
else
gens_per_zone[gen.zone] = [gen.unit_code]
end
end
gens_per_zone[MARKET_WIDE_ZONE] = collect(keys(system.generators))
return gens_per_zone
end
"""
branches_by_breakpoints(system::System) -> NTuple{3, Vector{$BranchName}}
Returns three vectors containing of the names of branches which have 0, 1, and 2 breakpoints.
"""
function branches_by_breakpoints(system::System)
zero_bp, one_bp, two_bp = BranchName[], BranchName[], BranchName[]
for branch in system.branches
if branch.is_monitored
if all(iszero, branch.break_points)
push!(zero_bp, branch.name)
elseif iszero(last(branch.break_points))
push!(one_bp, branch.name)
else
push!(two_bp, branch.name)
end
end
end
return zero_bp, one_bp, two_bp
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 3339 | # Used to compute PTDF, but it generic code for inverting a large matrix
# Could/should be open sourced. See:
# https://github.com/JuliaLinearAlgebra/GenericLinearAlgebra.jl/pull/46
function _block_inv(
A::AbstractMatrix,
B::AbstractMatrix,
C::AbstractMatrix,
D_inv::AbstractMatrix,
)
B_D_inv = B * D_inv
# Compute -B_D_inv * C + A and store it in A
BLAS.gemm!('N', 'N', -1.0, B_D_inv, C, 1.0, A)
A = inv(A)
B = A * B_D_inv
D_inv_C = D_inv * C
# Compute -D_inv_C * A and store it in C
mul!(C, -D_inv_C, A)
# Compute D_inv_C * B + D_inv and store it in D_inv
BLAS.gemm!('N', 'N', 1.0, D_inv_C, B, 1.0, D_inv)
return A, -B, C, D_inv
end
@views function _partition_big_mat(mat::AbstractMatrix; block_size::Int=13_000)
A = mat[1:block_size, 1:block_size]
B = mat[1:block_size, (block_size + 1):end]
C = mat[(block_size + 1):end, 1:block_size]
D = mat[(block_size + 1):end, (block_size + 1):end]
return A, B, C, D
end
function _blocks_big_mat(
mat::T; block_size::Int=13_000
) where T<:AbstractMatrix{F} where F
# SubMat is the type that `_partition_big_mat` returns
SubMat = SubArray{F, 2, T, Tuple{UnitRange{Int}, UnitRange{Int}}, false}
mat_blocks = Tuple{SubMat, SubMat, SubMat, SubMat}[]
D = mat
while true
A, B, C, D = _partition_big_mat(D; block_size=block_size)
pushfirst!(mat_blocks, (A, B, C, D))
size(D, 1) <= block_size && break
end
return mat_blocks
end
"""
big_mat_inv(mat::AbstractMatrix; block_size::Int=13_000) -> AbstractMatrix
Receives a matrix that is supposed to be inverted. If the size of the matrix is larger than
the defined `block_size`, it first partitions the matrix into smaller blocks until the
matrices that are supposed to be inverted have size less than `block_size`.
The partitioned matrix would look like: `mat = [A B; C D]` where the size of A is guaranteed
to be smaller than the `block_size`. If matrix D is larger than `block_size`, it
gets partitioned `D = [A1 B1;C1 D1]` and this process continues until all Ais and Dis are
smaller than `block_size`.
The default `block_size` is set to be `13_000` as we have empirically observed that, for
matrices smaller than this size, the built-in `inv` can efficiently handle the inversion.
This was set when doing the calculation of admittance matrix inverse in MISO and depending
on the application, this number can be adjusted.
Staring from the right bottom corner of the partitioned matrix, we use block inversion
matrix lemma (https://en.wikipedia.org/wiki/Block_matrix) iteratively until the full matrix
inversion is computed.
"""
function big_mat_inv(mat::AbstractMatrix; block_size::Int=13_000)
# If the matrix is smaller than the specified block size, just do regular inversion
size(mat, 1) <= block_size && return inv(mat)
# partition the matrix into smaller blocks.
blocks = _blocks_big_mat(mat, block_size=block_size)
# iteratively calculating the matrix inversion of each block
A, B, C, D = popfirst!(blocks)
A, B, C, D = _block_inv(A, B, C, inv(D))
num_blocks = length(blocks)
for bl_ in 1:num_blocks
inverted_mat = [A B; C D]
A, B, C, D = popfirst!(blocks)
A, B, C, D = _block_inv(A, B, C, inverted_mat)
end
return [A B; C D]
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 1563 | # v1 deprecations, to be removed in v2
@deprecate get_lodf(system::System) get_lodfs(system)
@deprecate get_regmin(system::System) get_regulation_min(system)
@deprecate get_regmax(system::System) get_regulation_max(system)
@deprecate get_load(system::System) get_loads(system)
@deprecate get_regulation(system::System) get_regulation_offers(system)
@deprecate get_spinning(system::System) get_spinning_offers(system)
@deprecate get_supplemental_on(system::System) get_on_supplemental_offers(system)
@deprecate get_supplemental_off(system::System) get_off_supplemental_offers(system)
@deprecate get_psds_per_bus(system::System) get_psls_per_bus(system)
export get_bids
function get_bids(system::SystemDA, type_of_bid::Symbol)
if type_of_bid === :increment
Base.depwarn("`get_bids(system, :increment)` is deprecated, use `get_increments(system)` instead.", :get_bids)
return get_increments(system)
elseif type_of_bid === :decrement
Base.depwarn("`get_bids(system, :decrement)` is deprecated, use `get_decrements(system)` instead.", :get_bids)
return get_decrements(system)
elseif type_of_bid === :price_sensitive_demand
Base.depwarn("`get_bids(system, :price_sensitive_demand)` is deprecated, use `get_price_sensitive_loads(system)` instead.", :get_bids)
return get_price_sensitive_loads(system)
else
Base.depwarn("`get_bids` is deprecated, use `get_increments` or `get_decrements` or `get_price_sensitive_loads`.", :get_bids)
return getproperty(system, type_of_bid)
end
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 7042 | """
compute_ptdf(system::System; block_size, reference_bus_index) -> KeyedArray
compute_ptdf(buses::Buses, branches::Branches; block_size, reference_bus_index) -> KeyedArray
Takes a system, or data for that system, representing a `M` branch, `N` bus grid
and returns the `M * N` DC-Power Transfer Distribution Factor (DC-PTDF) matrix of the network.
For a ~15,000 bus system with aggregated borders, this is expected to take ~1 minute.
# Keywords
- `block_size=13_000`: Block size to be used when partitioning a big matrix for inversion.
- `reference_bus=first(keys(buses))`: The name of the reference bus.
# Output
- `::KeyedArray`: The PTDF matrix; the axes contain the branch and bus names.
!!! note
The input data must have no isolated components or islands.
"""
function compute_ptdf(system::System; kwargs...)
return compute_ptdf(get_buses(system), get_branches(system); kwargs...)
end
function compute_ptdf(
buses::Buses,
branches::Branches;
block_size=13_000,
reference_bus=nothing,
)
bus_names = collect(keys(buses))
reference_bus_index = _reference_bus(reference_bus, bus_names)
incid_matrix = _incidence(buses, branches)
n_branches, n_buses = size(incid_matrix)
# Remove column related to reference bus from incidence matrix
incid_matrix = incid_matrix[:, Not(reference_bus_index)]
B_fl_tilde = sparse(diagm(_series_susceptance(branches))) * incid_matrix
B_bus_tilde_inv = big_mat_inv(
Matrix(incid_matrix' * B_fl_tilde),
block_size=block_size
)
ptdf_matrix = B_fl_tilde * B_bus_tilde_inv
# Add reference bus column back, filled with zeros
@views ptdf_matrix = hcat(
ptdf_matrix[:, 1:(reference_bus_index - 1)],
zeros(n_branches),
ptdf_matrix[:, reference_bus_index:end],
)
return KeyedArray(ptdf_matrix, (collect(keys(branches)), bus_names))
end
function _reference_bus(reference_bus, bus_names)
reference_bus === nothing && return 1
idx = findfirst(==(reference_bus), bus_names)
idx === nothing && throw(ArgumentError("Reference bus '$reference_bus' not found."))
return idx
end
"""
_series_susceptance(branches) -> Vector{Float64}
Calculates the susceptance of the elements in the branch Dictionary The calculation is
different depending if the element is a line (no tap) or transformer (tap present).
"""
function _series_susceptance(branches)
susceptance = map(_branch_susceptance, branches)
return collect(susceptance)
end
function _branch_susceptance(b)::Float64
if b.tap === missing
return -1 / b.reactance
end
return imag(1 / ((b.resistance + b.reactance * 1im) * (b.tap * exp(b.angle * 1im))))
end
"""
_incidence(buses, branches) -> SparseMatrix
Returns the sparse edge-node incidence matrix related to the buses and branches used as
inputs. Matrix axes correspond to `(keys(branches), keys(buses))`
"""
function _incidence(buses, branches)
n_buses = length(buses)
n_branches = length(branches)
# Define the mapping of buses/branches to the incidence/PTDF matrix
bus_lookup = _make_ax_ref(buses)
# Compute incidence matrix
A_to = sparse(
1:n_branches,
[bus_lookup[b.to_bus] for b in branches],
fill(-1, n_branches),
n_branches,
n_buses
)
A_from = sparse(
1:n_branches,
[bus_lookup[b.from_bus] for b in branches],
fill(1, n_branches),
n_branches,
n_buses
)
incid_matrix = A_to + A_from
return incid_matrix
end
function _make_ax_ref(ax::Dictionary)
return Dictionary(keys(ax), 1:length(ax))
end
"""
compute_lodf(system, branch_names_out) -> KeyedArray
compute_lodf(system::System, ptdf_matrix, branch_names_out) -> KeyedArray
compute_lodf(buses, branches, ptdf, branch_names_out) -> KeyedArray
Returns the `M*O` DC-Line Outage Distribution Factor (DC-LODF) matrix of the network.
**Important Note:** In the current implementation, we use `lodf` only if the contingency
scenario does not have any line coming in service. We can also use this function if we want
to ignore the lines coming in service.
# Inputs
- `buses::Buses`
- `branches::Branches`
- `ptdf_matrix`: The pre-calculated PTDF matrix of the system
- `branch_names_out`: The names of the branches that are going out in the contingency scenario.
# Output
- The LODF matrix as a `KeyedArray`. The axes are the branch names and `branch_names_out`.
!!! note
The resulting LODF matrix is sensitive to the input PTDF matrix. Using a thresholded
PTDF as input might lead to imprecisions in constrast to using the full PTDF.
"""
function compute_lodf(system::System, branch_names_out)
ptdf_matrix = get_ptdf(system)
ismissing(ptdf_matrix) && throw(ArgumentError("System PTDF is missing."))
return compute_lodf(system, ptdf_matrix, branch_names_out)
end
function compute_lodf(system::System, ptdf_matrix, branch_names_out)
buses = get_buses(system)
branches = get_branches(system)
return compute_lodf(buses, branches, ptdf_matrix, branch_names_out)
end
function compute_lodf(buses::Buses, branches::Branches, ptdf_matrix, branch_names_out)
branch_out_names = collect(filter(in(branch_names_out), keys(branches)))
branches_out = getindices(branches, branch_out_names)
if length(branch_out_names) < length(unique(branch_names_out))
@debug("Some of the lines to go out were not found in the line data.")
end
if isempty(branches_out)
@debug(
"All the lines to go out are already out of service.
You can ignore this contingency."
)
return KeyedArray(Matrix{Float64}(undef, 0, 0), (String[], Int[]))
end
incid_out = _incidence(buses, branches_out)
branch_names = collect(keys(branches))
branch_lookup = _make_ax_ref(branches)
# Our monitored lines are all the lines
ptdf_mo = ptdf_matrix.data * incid_out'
# Indices of the branches going out
ind_br_out = [branch_lookup[b] for b in branch_out_names]
ptdf_oo = ptdf_mo[ind_br_out, :]
lodf_matrix = ptdf_mo * inv(I - ptdf_oo)
# Discard any name that wasn't matched, and ensure the order is in line with the PSSE
lodf_matrix = KeyedArray(lodf_matrix, (branch_names, branch_out_names))
# If a monitored line is going out, manually correct LODF values so that the
# post-contingency flow is zero
for br in branch_out_names
if br in branch_names
_correct_lodf!(lodf_matrix, br)
end
end
return lodf_matrix
end
"""
_correct_lodf!(lodf_matrix::KeyedArray, br)
Sets the LODF row corresponding to branch `br` to zero, except for the element `(br, br)`,
which is set to -1. This is to ensure the post-contingency flow on a line that is going out
and is also monitored is set to zero.
"""
function _correct_lodf!(lodf_matrix::KeyedArray, br)
lodf_matrix(br, :) .= zeros(size(lodf_matrix(br, :)))
lodf_matrix[Key(br), Key(br)] = -1.0
return lodf_matrix
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 14180 | const MARKET_WIDE_ZONE = -9999
const BidName = InlineString31
const ZoneNum = Int64
"""
$TYPEDEF
Type defining a market zone. The `Zone` is identified by a number. The other fields contain
the service requirements for the zone. Requirements are given in `pu` assuming a base power
of 100MW.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct Zone
"Zone number"
number::ZoneNum
"Zonal regulation requirement (pu)"
regulation::Float64
"Zonal operating reserve requirement (regulation + spinning + supplemental) (pu)"
operating_reserve::Float64
"Zonal good utility practice requirement (regulation + spinning) (pu)"
good_utility::Float64
end
const Zones = Dictionary{ZoneNum, Zone}
const UnitCode = Int64
"""
$TYPEDEF
Type for static generator attribute (i.e. things that describe a generator that are not time
series data). Parameters given in `pu` assume a base power of 100MW.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct Generator
"Generator id/unit code"
unit_code::UnitCode
"Number of the zone the generator is located in"
zone::Int
"Cost of turning on the generator (\$)"
startup_cost::Float64
"Cost of turning off the generator (\$)"
shutdown_cost::Float64
"Cost of the generator being on but not producing any MW (\$ /hour)"
no_load_cost::Float64
"Minimum time the generator has to be committed for (hours)"
min_uptime::Float64
"Minimum time the generator has to be off for (hours)"
min_downtime::Float64
"Rate at which the generator can increase generation (pu/minute)"
ramp_up::Float64
"Rate at which the generator can decrease generation (pu/minute)"
ramp_down::Float64
"Symbol describing the technology of the generator"
technology::Symbol
end
const Generators = Dictionary{UnitCode, Generator}
const BusName = InlineString15
"""
$TYPEDEF
Type for static bus attributes.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct Bus
"Bus name"
name::BusName
"Base voltage (kV)"
base_voltage::Float64
end
const Buses = Dictionary{BusName, Bus}
const BranchName = InlineString31
"""
$TYPEDEF
Type for static branch attributes. Branches may have between 0 and 2 break
points which is why the `break_points` and `penalties` fields contain variable length `Tuple`s.
Fields:
$TYPEDFIELDS
"""
struct Branch
"Branch long name"
name::BranchName
"Name of the bus the branch goes to"
to_bus::BusName
"Name of the bus the branch goes from"
from_bus::BusName
"Power flow limit for the base case (pu)"
rate_a::Float64
"Power flow limit for contingency scenario (pu)"
rate_b::Float64
"Boolean defining whether the branch is monitored"
is_monitored::Bool
"""
Break points of the branch. Branches can have 0, 1, or 2 break points. Zeros indicate
no break point
"""
break_points::Tuple{Float64, Float64}
"Price penalties for each of the break points of the branch (\$)"
penalties::Tuple{Float64, Float64}
"Resistance of the branch (pu)"
resistance::Float64
"Reactance of the branch (pu)"
reactance::Float64
"Boolean indicating whether the branch is a transformer"
is_transformer::Bool
"Ratio between the nominal winding one and two voltages of the transformer"
tap::Union{Missing, Float64}
"Phase shift angle (radians)"
angle::Union{Missing, Float64}
end
const Branches = Dictionary{BranchName, Branch}
"""
Constructors for a `Branch`. The user has the option to define a `Branch` as a line e.g.
```
line1 = Branch("1", "A", "B", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0)
```
where the final two values (`resistance` and `reactance`) can be left unspecified. Or the
user can define a `Branch`` as a transformer:
```
trnasformer1 = Branch(
"4", "A", "C", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0, 0.5, 30.0
)
```
where two extra parameters are provided as the end representing `tap` and `angle`.
"""
function Branch(
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance=0.0,
reactance=0.0
)
tap = missing
angle = missing
is_transformer = false
return Branch(
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance,
reactance,
is_transformer,
tap,
angle
)
end
function Branch(
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance,
reactance,
tap::Float64,
angle::Float64
)
is_transformer = true
return Branch(
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance,
reactance,
is_transformer,
tap,
angle
)
end
function Branch(;
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance,
reactance,
tap=missing,
angle=missing
)
if ismissing(tap) && ismissing(angle)
is_transformer = false
elseif !ismissing(tap) && !ismissing(angle)
is_transformer = true
else
ArgumentError("Transformers must have non-missing values for both `tap` and `angle`. Got `tap=$tap, angle=$angle`.")
end
return Branch(
name,
to_bus,
from_bus,
rate_a,
rate_b,
is_monitored,
break_points,
penalties,
resistance,
reactance,
is_transformer,
tap,
angle
)
end
###### Time Series types ######
"""
$TYPEDEF
Generator related time series data that is needed for both the day-ahead and real-time formulations.
Values given in `pu` assume a base power of 100MW.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct GeneratorTimeSeries
"Generation of the generator at the start of the time period (pu)"
initial_generation::KeyedArray{Float64, 1}
"Generator offer curves. `KeyedArray` where the axis keys are `generator names x datetimes`"
offer_curve::KeyedArray{Vector{Tuple{Float64, Float64}}, 2}
"Generator minimum output in the ancillary services market (pu)"
regulation_min::KeyedArray{Float64, 2}
"Generator maximum output in the ancillary services market (pu)"
regulation_max::KeyedArray{Float64, 2}
"Generator minimum output (pu)"
pmin::KeyedArray{Float64, 2}
"Generator maximum output (pu)"
pmax::KeyedArray{Float64, 2}
"""
Ancillary services regulation reserve offer prices (\$ /pu).
Generators not providing the service will have `missing` offer data.
"""
regulation_offers::KeyedArray{Union{Missing, Float64}, 2}
"""
Ancillary services spinning reserve offer prices (\$ /pu).
Generators not providing the service will have `missing` offer data.
"""
spinning_offers::KeyedArray{Union{Missing, Float64}, 2}
"""
Ancillary services online supplemental reserve offer prices (\$ /pu).
Generators not providing the service will have `missing` offer data.
"""
on_supplemental_offers::KeyedArray{Union{Missing, Float64}, 2}
"""
Ancillary services offline supplemental reserve offer prices (\$ /pu).
Generators not providing the service will have `missing` offer data.
"""
off_supplemental_offers::KeyedArray{Union{Missing, Float64}, 2}
end
"""
$TYPEDEF
Abstract type for storing time series of generator status information.
"""
abstract type GeneratorStatus end
"""
$TYPEDEF
Generator status time series data needed for the day-ahead formulation.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct GeneratorStatusDA <: GeneratorStatus
"Hours each generator has been at its current commitment status at the start of the day"
hours_at_status::KeyedArray{Float64, 1}
"Flag indicating if the generator is available to be committed in each hour"
availability::KeyedArray{Bool, 2}
"Flag indicating if the generator must be committed in each hour"
must_run::KeyedArray{Bool, 2}
end
"""
$TYPEDEF
Generator status time series data needed for the real-time formulation.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef struct GeneratorStatusRT <: GeneratorStatus
"Generator commitment status indicated by a `Bool`"
commitment::KeyedArray{Bool, 2}
"Generator regulation commitment status indicated by a `Bool`"
regulation_commitment::KeyedArray{Bool, 2}
end
"""
System
The abstract type for representing the whole power system including topology, static
components and their attributes, and time series data.
Topology: `Dictionaries` linking generators, loads, and bids (if present) to buses.
System wide static components and grid matrices: zones, buses, generators, branches, LODF and PTDF.
Time series data: all the time series associated with generators, loads and bids. All stored
as `KeyedArray`s of `ids x datetimes`.
"""
abstract type System end
"""
$TYPEDEF
Subtype of a `System` for modelling the day-ahead market.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef mutable struct SystemDA <: System
"`Dictionary` where the keys are bus names and the values are generator ids at that bus"
gens_per_bus::Dictionary{BusName, Vector{Int}}
"`Dictionary` where the keys are bus names and the values are increment bid ids at that bus"
incs_per_bus::Dictionary{BusName, Vector{BidName}}
"`Dictionary` where the keys are bus names and the values are decrement bid ids at that bus"
decs_per_bus::Dictionary{BusName, Vector{BidName}}
"""
`Dictionary` where the keys are bus names and the values are price sensitive load bid
ids at that bus
"""
psls_per_bus::Dictionary{BusName, Vector{BidName}}
"`Dictionary` where the keys are bus names and the values are load ids at that bus"
loads_per_bus::Dictionary{BusName, Vector{BidName}}
"Zones in the `System`, which will also include a `Zone` entry for the market wide zone"
zones::Zones
"Buses in the `System` indexed by bus name"
buses::Buses
"Generators in the `System` indexed by unit code"
generators::Generators
"Branches in the `System` indexed by branch name"
branches::Branches
"""
The line outage distribution factor matrix of the system for a set of contingencies given
by the keys of the `Dictionary`. Each entry is a `KeyedArray` with axis keys
`branch names x branch on outage`
"""
lodfs::Dictionary{String, KeyedArray{Float64, 2}}
"""
Power transfer distribution factor of the system. `KeyedArray` where the axis keys are
`branch names x bus names`
"""
ptdf::Union{KeyedArray{Float64, 2}, Missing}
# Generator related time series
"Generator related time series data"
generator_time_series::GeneratorTimeSeries
"Generator status time series needed for the day-ahead formulation"
generator_status::GeneratorStatusDA
# Load time series
"Load time series data. `KeyedArray` where the axis keys are `load ids x datetimes`"
loads::KeyedArray{Float64, 2}
# Virtuals/PSD time series
"Increment bids time series data. `KeyedArray` where the axis keys are `bid ids x datetimes`"
increments::KeyedArray{Vector{Tuple{Float64, Float64}}, 2}
"Decrement bids time series data. `KeyedArray` where the axis keys are `bid ids x datetimes`"
decrements::KeyedArray{Vector{Tuple{Float64, Float64}}, 2}
"Price sensitive load bids time series data. `KeyedArray` where the axis keys are `bid ids x datetimes`"
price_sensitive_loads::KeyedArray{Vector{Tuple{Float64, Float64}}, 2}
end
"""
$TYPEDEF
Subtype of a `System` for modelling the real-time market.
Fields:
$TYPEDFIELDS
"""
Base.@kwdef mutable struct SystemRT <: System
"`Dictionary` where the keys are bus names and the values are generator ids at that bus"
gens_per_bus::Dictionary{BusName, Vector{Int}}
"`Dictionary` where the keys are bus names and the values are load ids at that bus"
loads_per_bus::Dictionary{BusName, Vector{BidName}}
"Zones in the `System`, which will also include a `Zone` entry for the market wide zone"
zones::Zones
"Buses in the `System` indexed by bus name"
buses::Buses
"Generators in the `System` indexed by unit code"
generators::Generators
"Branches in the `System` indexed by branch name"
branches::Branches
"""
The line outage distribution factor matrix of the system for a set of contingencies given
by the keys of the `Dictionary`. Each entry is a `KeyedArray` with axis keys
`branch names x branch on outage`
"""
lodfs::Dictionary{String, KeyedArray{Float64, 2}}
"""
Power transfer distribution factor of the system. `KeyedArray` where the axis keys are
`branch names x bus names`
"""
ptdf::Union{KeyedArray{Float64, 2}, Missing}
# Generator related time series
"Generator related time series data"
generator_time_series::GeneratorTimeSeries
"Generator status time series needed for the real-time formulation"
generator_status::GeneratorStatusRT
# Load time series
"Load time series data. `KeyedArray` where the axis keys are `load ids x datetimes`"
loads::KeyedArray{Float64, 2}
end
function Base.show(io::IO, ::MIME"text/plain", system::T) where {T <: System}
Base.summary(io, system)
get(io, :compact, false) && return nothing
z = length(system.zones) - 1
print(io, " with $z Zones")
for c in [:buses, :generators, :branches]
l = length(getproperty(system, c))
print(io, ", $l $(c)")
end
print(io, "\n")
print(io, "Included time series: ")
for (name, type) in zip(fieldnames(T), fieldtypes(T))
if name == last(fieldnames(T))
print(io, "$name")
elseif type <: Union{GeneratorTimeSeries, <:GeneratorStatus}
for name in fieldnames(type)
print(io, "$name, ")
end
elseif type <: KeyedArray && name != :ptdf
print(io, "$name, ")
end
end
return nothing
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 719 | # This file contains tests related to the matrix block inversion procedure
@testset "Block matrix inversion" begin
big_mat_inv = FullNetworkSystems.big_mat_inv
@testset "fallback to `inv`" begin
n = 1000
M = randn(n, n)
# default block size should be >1000, so should fallback to `inv` here
@test big_mat_inv(M) == inv(M)
end
@testset "use block algorithm" begin
n = 1000
for _ in 1:3
M = randn(n, n)
@test inv(M) ≈ big_mat_inv(M; block_size=500) rtol=1e-3
end
n = 2000
for _ in 1:3
M = randn(n, n)
@test inv(M) ≈ big_mat_inv(M; block_size=1800) rtol=1e-3
end
end
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 9476 | function _to_branch(nt)
return Branch(;
nt...,
tap=missing,
angle=missing,
# fill unused fields with zeroes
rate_a=0,
rate_b=0,
is_monitored=false,
break_points=(0,0),
penalties=(0,0),
)
end
branch_nt = NamedTuple{(:name, :to_bus, :from_bus, :resistance, :reactance)}.([
("branch_1", "bus_2", "bus_1", 0.01938, 0.05917),
("branch_2", "bus_5", "bus_1", 0.05403, 0.22304),
("branch_3", "bus_3", "bus_2", 0.04699, 0.19797),
("branch_4", "bus_4", "bus_2", 0.05811, 0.17632),
("branch_5", "bus_5", "bus_2", 0.05695, 0.17388),
("branch_6", "bus_4", "bus_3", 0.06701, 0.17103),
("branch_7", "bus_5", "bus_4", 0.01335, 0.04211),
("branch_8", "bus_7", "bus_4", 0.0, 0.20912),
("branch_9", "bus_9", "bus_4", 0.0, 0.55618),
("branch_10", "bus_6", "bus_5", 0.0, 0.25202),
("branch_11", "bus_11", "bus_6", 0.09498, 0.1989),
("branch_12", "bus_12", "bus_6", 0.12291, 0.25581),
("branch_13", "bus_13", "bus_6", 0.06615, 0.13027),
("branch_14", "bus_8", "bus_7", 0.0, 0.17615),
("branch_15", "bus_9", "bus_7", 0.0, 0.11001),
("branch_16", "bus_10", "bus_9", 0.03181, 0.0845),
("branch_17", "bus_14", "bus_9", 0.12711, 0.27038),
("branch_18", "bus_11", "bus_10", 0.08205, 0.19207),
("branch_19", "bus_13", "bus_12", 0.22092, 0.19988),
("branch_20", "bus_14", "bus_13", 0.17093, 0.34802),
])
branches = index(b -> getfield(b, :name), _to_branch.(branch_nt))
branch_names = collect(keys(branches))
bus_names = string.("bus_", collect(1:14))
buses = Buses(bus_names, Bus.(bus_names, 1))
function _to_transformer(br)
return Branch(
name=string(br.name, "_T"),
to_bus=br.to_bus,
from_bus=br.from_bus,
rate_a=br.rate_a,
rate_b=br.rate_b,
is_monitored=br.is_monitored,
break_points=br.break_points,
penalties=br.penalties,
resistance=br.resistance,
reactance=br.reactance,
tap=1,
angle=1,
)
end
@testset "matrices" begin
@testset "PTDF and incidence" begin
incidence = FullNetworkSystems._incidence(buses, branches)
@test size(incidence) == (20, 14)
@test incidence isa SparseMatrixCSC
@testset "no transformers" begin
ptdf_all_lines = compute_ptdf(buses, branches)
# PTDF should be a branches x buses KeyedArray
@test size(ptdf_all_lines) == (20, 14)
@test ptdf_all_lines isa KeyedArray
# Test if axes and lookup are correct
@test axiskeys(ptdf_all_lines) == (branch_names, bus_names)
@testset "reference_bus" begin
@test all(≈(0.0; atol=1e-3), ptdf_all_lines(:, "bus_1"))
@test any(>(0.0 + 1e-3), ptdf_all_lines(:, "bus_5"))
ptdf_bus_5 = compute_ptdf(buses, branches, reference_bus="bus_5")
@test all(≈(0.0; atol=1e-3), ptdf_bus_5(:, "bus_5"))
@test any(>(0.0 + 1e-3), ptdf_bus_5(:, "bus_1"))
@test_throws(
ArgumentError("Reference bus 'not_here' not found."),
compute_ptdf(buses, branches, reference_bus="not_here"),
)
end
# The tests based on "Direct Calculation of Line Outage Distribution Factors" by
# Guo et al. involve a PTDF multiplied by an incidence matrix, so we multiply the
# PTDF by the incidence and then test for the specific elements that are shown in
# the paper.
ptdf_paper = ptdf_all_lines * incidence'
@test ptdf_paper[2, 2] ≈ 0.3894 atol = 1e-3
@test ptdf_paper[2, 6] ≈ 0.0790 atol = 1e-3
@test ptdf_paper[2, 11] ≈ -0.0092 atol = 1e-3
@test ptdf_paper[6, 2] ≈ 0.1031 atol = 1e-3
@test ptdf_paper[6, 6] ≈ 0.6193 atol = 1e-3
@test ptdf_paper[6, 11] ≈ 0.0078 atol = 1e-3
@test ptdf_paper[11, 2] ≈ -0.0103 atol = 1e-3
@test ptdf_paper[11, 6] ≈ 0.0067 atol = 1e-3
@test ptdf_paper[11, 11] ≈ 0.7407 atol = 1e-3
end
@testset "with_transformers" begin
new_branches = _to_branch.(branch_nt)
new_branches[1] = _to_transformer(new_branches[1])
new_branches[2] = _to_transformer(new_branches[2])
branches_with_transformers = index(b -> getfield(b, :name), new_branches)
incid_w_tr = FullNetworkSystems._incidence(buses, branches_with_transformers)
@test incid_w_tr == incidence
ptdf_w_tr = compute_ptdf(buses, branches_with_transformers)
bt_names = collect(keys(branches_with_transformers))
@test axiskeys(ptdf_w_tr) == (bt_names, bus_names)
ptdf_paper = ptdf_w_tr * incid_w_tr'
# Transformer branches are calculated differently
@test ptdf_paper[2, 2] ≈ 0.3399 atol = 1e-3
# Lines remain the same
@test ptdf_paper[11, 11] ≈ 0.7407 atol = 1e-3
end
end
@testset "LODF" begin
ptdf_mat = FullNetworkSystems.compute_ptdf(buses, branches)
branch_names_out = ["branch_2", "branch_6", "branch_11"]
lodf_mat = compute_lodf(buses, branches, ptdf_mat, branch_names_out)
@test axiskeys(lodf_mat) == (branch_names, branch_names_out)
# Based on "Direct Calculation of Line Outage Distribution Factors" by Guo et al.
@test lodf_mat[5, 1] ≈ 0.5551 atol = 1e-3
@test lodf_mat[5, 2] ≈ 0.4511 atol = 1e-3
@test lodf_mat[5, 3] ≈ -0.0637 atol = 1e-3
@test lodf_mat[13, 1] ≈ -0.0120 atol = 1e-3
@test lodf_mat[13, 2] ≈ 0.0121 atol = 1e-3
@test lodf_mat[13, 3] ≈ 0.3159 atol = 1e-3
@testset "LODF values when a monitored line goes out" begin
# Lines 2, 6, and 11 are going out, but are also monitored. Check if their
# post-contingency flow will be set to zero considering an arbitrary `pnet`.
pnet = KeyedArray([fill(1.0, 7); fill(-1.0, 7)], bus_names)
fl = KeyedArray(
[sum(ptdf_mat(m, n) * pnet(n) for n in bus_names) for m in branch_names],
branch_names
)
flc = KeyedArray(
[
fl(m) + sum(lodf_mat(m, l) * fl(l) for l in branch_names_out)
for m in branch_names
],
branch_names
)
@test all(==(0), flc(branch_names_out))
@test all(!=(0), flc(setdiff(branch_names, branch_names_out)))
end
@testset "LODF is consistent for different input orders" begin
lodf1 = compute_lodf(buses, branches, ptdf_mat, ["branch_2", "branch_6"])
lodf2 = compute_lodf(buses, branches, ptdf_mat, ["branch_6", "branch_2"])
for i in axiskeys(ptdf_mat, 1), j in ["branch_2", "branch_6"]
@test lodf1(i, j) == lodf2(i, j)
end
end
end
@testset "From system" begin
empty_float_matrix = KeyedArray(fill(0.0, 0, 0), (String[], String[]))
empty_missing_float_matrix = KeyedArray(
reshape(Union{Float64, Missing}[], 0, 0),
(String[], String[]),
)
sys = SystemRT(
buses=buses,
branches=branches,
ptdf=missing,
lodfs=Dictionary(),
# Fill in the rest with nonsense (unused)
gens_per_bus=Dictionary(),
loads_per_bus=Dictionary(),
zones=Zones(),
generators=Generators(),
generator_time_series=GeneratorTimeSeries(
initial_generation=KeyedArray(Float64[], String[]),
offer_curve= KeyedArray(fill([(1.0, 1.0)], 0,0), (String[], String[])),
regulation_min=empty_float_matrix,
regulation_max=empty_float_matrix,
pmin=empty_float_matrix,
pmax=empty_float_matrix,
regulation_offers=empty_missing_float_matrix,
spinning_offers=empty_missing_float_matrix,
on_supplemental_offers=empty_missing_float_matrix,
off_supplemental_offers=empty_missing_float_matrix,
),
generator_status=GeneratorStatusRT(
commitment=KeyedArray(falses(0, 0), (String[], String[])),
regulation_commitment=KeyedArray(falses(0, 0), (String[], String[])),
),
loads=empty_float_matrix,
)
@test get_ptdf(sys) === missing
@test get_lodfs(sys) == Dictionary()
@test compute_ptdf(sys) == compute_ptdf(buses, branches) == retrieve_ptdf(sys)
# Double check nothing has set the system PTDF
@test get_ptdf(sys) === missing
@test_throws(
ArgumentError("System PTDF is missing."),
compute_lodf(sys, ["branch_2", "branch_6", "branch_11"])
)
ptdf_sys = compute_ptdf(sys)
lodf_df = compute_lodf(
buses,
branches,
ptdf_sys,
["branch_2", "branch_6", "branch_11"],
)
lodf_input_mat = compute_lodf(sys, ptdf_sys, ["branch_2", "branch_6", "branch_11"])
# Add PTDF to system
sys.ptdf = ptdf_sys
lodf_sys = compute_lodf(sys, ["branch_2", "branch_6", "branch_11"])
@test lodf_sys == lodf_input_mat == lodf_df
end
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 250 | using AxisKeys
using Dates
using Dictionaries
using FullNetworkSystems
using Random: randstring
using SparseArrays
using Test
@testset "FullNetworkSystems.jl" begin
include("system.jl")
include("block_inv.jl")
include("matrices.jl")
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | code | 13095 | @testset "system.jl" begin
@testset "Zone" begin
zone1 = Zone(number=1, regulation=1.0, operating_reserve=1.0, good_utility=1.0)
@test zone1 isa Zone
end
@testset "Generator" begin
gen1 = Generator(
unit_code=111,
zone=1,
startup_cost=0.0,
shutdown_cost=1.0,
no_load_cost=1.0,
min_uptime=24.0,
min_downtime=24.0,
ramp_up=2.0,
ramp_down=2.0,
technology=:tech
)
@test gen1 isa Generator
end
@testset "Bus" begin
bus1 = Bus(name="A", base_voltage=100.0)
@test bus1 isa Bus
end
@testset "Branch" begin
branch1 = Branch(
name="1",
to_bus="A",
from_bus="C",
rate_a=10.0,
rate_b=10.0,
is_monitored=true,
break_points=(100.0, 102.0),
penalties=(5.0, 6.0),
resistance=1.0,
reactance=1.0
)
@test branch1 isa Branch
@test !branch1.is_transformer
branch2 = Branch(
"2",
"A",
"C",
10.0,
10.0,
true,
(100.0, 102.0),
(5.0, 6.0)
)
@test branch2 isa Branch
@test !branch2.is_transformer
transformer1 = Branch(
name="1",
to_bus="A",
from_bus="C",
rate_a=10.0,
rate_b=10.0,
is_monitored=true,
break_points=(100.0, 102.0),
penalties=(5.0, 6.0),
resistance=1.0,
reactance=1.0,
tap=0.5,
angle=30.0
)
@test transformer1 isa Branch
end
@testset "System" begin
zone1 = Zone(1, 1.0, 1.0, 1.0)
zone2 = Zone(2, 4.0, 2.0, 4.0)
zone_market = Zone(-9999, 3.0, 3.0, 3.0)
zones = Dictionary([1, 2, -9999], [zone1, zone2, zone_market])
gen_ids = collect(111:1:120)
gen_types = map(gen_ids) do id
Generator(id, zone1.number, 0.0, 1.0, 1.0, 24.0, 24.0, 2.0, 2.0, :tech)
end
generators = Dictionary(gen_ids, gen_types)
bus_names = FullNetworkSystems.BusName["A", "B", "C"]
bus_types = map(bus_names) do name
Bus(name, 100.0)
end
buses = Dictionary(bus_names, bus_types)
branch_names = string.([1,2,3,4])
branches = Dictionary(
branch_names,
[
Branch("1", "A", "B", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0),
Branch("2", "B", "C", 10.0, 10.0, false, (100.0, 0.0), (5.0, 0.0), 1.0, 1.0),
Branch("3", "C", "A", 10.0, 10.0, true, (0.0, 0.0), (0.0, 0.0), 1.0, 1.0),
Branch("4", "A", "C", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0, 0.5, 30.0,
)
]
)
# Bid IDs/names should be unique. Here length of IDs is arbitrary.
bid_names(prefix, n) = FullNetworkSystems.BidName.(prefix * "_" * randstring(5) for _ in 1:n)
gens_per_bus = Dictionary(bus_names, rand(gen_ids, 3) for _ in bus_names)
incs_per_bus = Dictionary(bus_names, bid_names("inc", 3) for _ in bus_names)
decs_per_bus = Dictionary(bus_names, bid_names("dec", 3) for _ in bus_names)
psls_per_bus = Dictionary(bus_names, bid_names("psl", 3) for _ in bus_names)
loads_per_bus = Dictionary(bus_names, bid_names("load", 3) for _ in bus_names)
lodfs = Dictionary(
["CONTIN_1"],
[KeyedArray(rand(4, 1); branches=branch_names, branch=[first(branch_names)])]
)
ptdf = KeyedArray(rand(4, 3); row=branch_names, col=bus_names)
ids = gen_ids
datetimes = DateTime(2017, 12, 15):Hour(1):DateTime(2017, 12, 15, 23)
time_series(T=Float64) = KeyedArray(rand(T, length(ids), length(datetimes)); ids, datetimes)
services_time_series() = KeyedArray(vcat(rand(length(ids) - 1, length(datetimes)), fill(missing, 1, length(datetimes))); ids, datetimes)
initial_generation = KeyedArray([rand(length(ids) - 2); fill(0.0, 2)]; ids)
offer_curve = KeyedArray(fill([(1.0, 100.0)], length(ids), length(datetimes)); ids, datetimes)
regulation_min = time_series()
regulation_max = time_series()
pmin = time_series()
pmax = time_series()
pmin = time_series()
pmax = time_series()
regulation_offers = services_time_series()
spinning_offers = services_time_series()
on_supplemental_offers = services_time_series()
off_supplemental_offers = services_time_series()
generator_time_series = GeneratorTimeSeries(;
initial_generation,
offer_curve,
regulation_min,
regulation_max,
pmin,
pmax,
regulation_offers,
spinning_offers,
on_supplemental_offers,
off_supplemental_offers,
)
hours_at_status = KeyedArray(rand(length(ids)); ids)
availability = time_series(Bool)
must_run = time_series(Bool)
da_generator_status = GeneratorStatusDA(; hours_at_status, availability, must_run)
loads = time_series()
nbids = 8 # arbitrary
bid_time_series(prefix) = KeyedArray(fill([(1.0, 100.0)], nbids, length(datetimes)); ids=bid_names(prefix, nbids), datetimes)
increments = bid_time_series("inc")
decrements = bid_time_series("dec")
price_sensitive_loads = bid_time_series("psl")
da_system = SystemDA(;
gens_per_bus,
incs_per_bus,
decs_per_bus,
psls_per_bus,
loads_per_bus,
zones,
buses,
generators,
branches,
lodfs,
ptdf,
generator_time_series,
generator_status=da_generator_status,
loads,
increments,
decrements,
price_sensitive_loads,
)
@test da_system isa SystemDA
commitment = time_series(Bool)
regulation_commitment= time_series(Bool)
rt_generator_status = GeneratorStatusRT(; commitment, regulation_commitment)
rt_system = SystemRT(;
gens_per_bus,
loads_per_bus,
zones,
buses,
generators,
branches,
lodfs,
ptdf,
generator_time_series,
generator_status=rt_generator_status,
loads,
)
@test rt_system isa SystemRT
@testset "System accessor functions" begin
@testset "Common accessors $T" for (system, T) in (
(da_system, SystemDA), (rt_system, SystemRT)
)
@test get_datetimes(system) == datetimes
@test get_zones(system) == zones
@test get_regulation_requirements(system) == Dictionary([1, 2, -9999], [1.0, 4.0, 3.0])
@test get_operating_reserve_requirements(system) == Dictionary([1, 2, -9999], [1.0, 2.0, 3.0])
@test get_good_utility_requirements(system) == Dictionary([1, 2, -9999], [1.0, 4.0, 3.0])
@test get_buses(system) == buses
@test get_generators(system) == generators
@test get_branches(system) == branches
@test get_lines(system) == Dictionary(
["1", "2", "3"], [branches["1"], branches["2"], branches["3"]]
)
@test get_transformers(system) == Dictionary(["4"], [branches["4"]])
@test get_gens_per_bus(system) == gens_per_bus
@test get_loads_per_bus(system) == loads_per_bus
@test get_ptdf(system) == ptdf
@test get_lodfs(system) == lodfs
@test get_initial_generation(system) == initial_generation
@test get_loads(system) == loads
@test get_offer_curve(system) == offer_curve
@test get_pmin(system) == pmin
@test get_pmax(system) == pmax
@test get_regulation_min(system) == regulation_min
@test get_regulation_max(system) == regulation_max
@test skipmissing(get_regulation_offers(system)) == skipmissing(regulation_offers)
@test skipmissing(get_spinning_offers(system)) == skipmissing(spinning_offers)
@test skipmissing(get_on_supplemental_offers(system)) == skipmissing(on_supplemental_offers)
@test skipmissing(get_off_supplemental_offers(system)) == skipmissing(off_supplemental_offers)
gens_by_zone = gens_per_zone(system)
@test issetequal(keys(gens_by_zone), [1, FullNetworkSystems.MARKET_WIDE_ZONE])
for (_, v) in gens_by_zone
@test v == gen_ids
end
zero_bp, one_bp, two_bp = branches_by_breakpoints(da_system)
@test zero_bp == ["3"]
@test one_bp == String[] #unmonitored
@test two_bp == ["1", "4"]
# Also test on a system with a 1-breakpoint branch
da_system.branches = Dictionary(
branch_names,
[
Branch("1", "A", "B", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0),
Branch("2", "B", "C", 10.0, 10.0, true, (100.0, 0.0), (5.0, 0.0), 1.0, 1.0),
Branch("3", "C", "A", 10.0, 10.0, true, (0.0, 0.0), (5.0, 6.0), 1.0, 1.0),
Branch("4", "A", "C", 10.0, 10.0, true, (100.0, 102.0), (5.0, 6.0), 1.0, 1.0),
]
)
zero_bp, one_bp, two_bp = branches_by_breakpoints(da_system)
@test zero_bp == ["3"]
@test one_bp == ["2"]
@test two_bp == ["1", "4"]
@test eltype(zero_bp) == eltype(one_bp) == eltype(two_bp) == FullNetworkSystems.BranchName
da_system.branches = branches # reset
# Check that we can remove the PTDF
system.ptdf = missing
@test system.ptdf === missing
@testset "deprecated" begin
@test (@test_deprecated get_lodf(system)) == lodfs
@test (@test_deprecated get_regmin(system)) == regulation_min
@test (@test_deprecated get_regmax(system)) == regulation_max
@test (@test_deprecated get_load(system)) == loads
@test (@test_deprecated skipmissing(get_regulation(system))) == skipmissing(regulation_offers)
@test (@test_deprecated skipmissing(get_spinning(system))) == skipmissing(spinning_offers)
@test (@test_deprecated skipmissing(get_supplemental_on(system))) == skipmissing(on_supplemental_offers)
@test (@test_deprecated skipmissing(get_supplemental_off(system))) == skipmissing(off_supplemental_offers)
end
end
@testset "SystemDA only accessors" begin
@test get_initial_commitment(da_system) == [trues(length(ids) - 2); falses(2)]
@test get_initial_uptime(da_system) == [hours_at_status[1:end-2]..., 0, 0]
@test get_initial_downtime(da_system) == [zeros(length(ids)-2); hours_at_status[end-1:end]...]
@test get_incs_per_bus(da_system) == incs_per_bus
@test get_decs_per_bus(da_system) == decs_per_bus
@test get_psls_per_bus(da_system) == psls_per_bus
@test get_increments(da_system) == increments
@test get_decrements(da_system) == decrements
@test get_price_sensitive_loads(da_system) == price_sensitive_loads
virtuals = get_virtuals(da_system)
@test size(virtuals) == (nbids * 2, length(datetimes))
v_id_prefixes = first.(axiskeys(virtuals, 1), 3)
@test in("inc", v_id_prefixes) && in("dec", v_id_prefixes)
@test get_availability(da_system) == availability
@test get_must_run(da_system) == must_run
@testset "deprecated" begin
@test (@test_deprecated get_bids(da_system, :increment)) == increments
@test (@test_deprecated get_bids(da_system, :decrement)) == decrements
@test (@test_deprecated get_bids(da_system, :price_sensitive_demand)) == price_sensitive_loads
@test (@test_deprecated get_psds_per_bus(da_system)) == psls_per_bus
end
end
@testset "SystemRT only accessors" begin
@test get_commitment(rt_system) == commitment
@test get_regulation_commitment(rt_system) == regulation_commitment
end
end
end
end
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | docs | 937 | # FullNetworkSystems
[](https://invenia.github.io/FullNetworkSystems.jl/stable)
[](https://invenia.github.io/FullNetworkSystems.jl/dev)
[](https://github.com/invenia/FullNetworkSystems.jl/actions/workflows/JuliaNightly.yml?query=branch%3Amain)
[](https://codecov.io/gh/invenia/FullNetworkSystems.jl)
[](https://github.com/invenia/BlueStyle)
[](https://github.com/SciML/ColPrac)
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 1.7.0 | f936c8e94466bc35c479fead9e28b110f5b591c8 | docs | 225 | ```@meta
CurrentModule = FullNetworkSystems
```
# FullNetworkSystems
Documentation for [FullNetworkSystems](https://github.com/invenia/FullNetworkSystems.jl).
```@index
```
```@autodocs
Modules = [FullNetworkSystems]
```
| FullNetworkSystems | https://github.com/invenia/FullNetworkSystems.jl.git |
|
[
"MIT"
] | 0.2.0 | a5c4f933e59401f83e1c36ccfc40e4ac2fd87e73 | code | 647 | using SemanticCaches
using Documenter
DocMeta.setdocmeta!(
SemanticCaches, :DocTestSetup, :(using SemanticCaches); recursive = true)
makedocs(;
modules = [SemanticCaches],
authors = "J S <[email protected]> and contributors",
sitename = "SemanticCaches.jl",
format = Documenter.HTML(;
canonical = "https://svilupp.github.io/SemanticCaches.jl",
edit_link = "main",
assets = String[]
),
pages = [
"Home" => "index.md",
"API Reference" => "api_reference.md"
]
)
deploydocs(;
repo = "github.com/svilupp/SemanticCaches.jl",
devbranch = "main"
)
| SemanticCaches | https://github.com/svilupp/SemanticCaches.jl.git |
|
[
"MIT"
] | 0.2.0 | a5c4f933e59401f83e1c36ccfc40e4ac2fd87e73 | code | 3051 | # # Example how to use caching with PromptingTools.jl
# ## Setup
using PromptingTools
using SemanticCaches
using HTTP
# ## Create Cache Layer
## Define the new caching mechanism as a layer for HTTP
## See documentation [here](https://juliaweb.github.io/HTTP.jl/stable/client/#Quick-Examples)
module MyCache
using HTTP, JSON3
using SemanticCaches
const SEM_CACHE = SemanticCache()
const HASH_CACHE = HashCache()
function cache_layer(handler)
return function (req; cache_key::Union{AbstractString, Nothing} = nothing, kw...)
# only apply the cache layer if the user passed `cache_key`
# we could also use the contents of the payload, eg, `cache_key = get(body, "model", "unknown")`
if req.method == "POST" && cache_key !== nothing
body = JSON3.read(copy(req.body))
if occursin("v1/chat/completions", req.target)
## We're in chat completion endpoint
input = join([m["content"] for m in body["messages"]], " ")
elseif occursin("v1/embeddings", req.target)
## We're in embedding endpoint
input = body["input"]
else
## Skip, unknown API
return handler(req; kw...)
end
## Check the cache
@info "Check if we can cache this request ($(length(input)) chars)"
active_cache = length(input) > 5000 ? HASH_CACHE : SEM_CACHE
item = active_cache("key1", input; verbose = 2) # change verbosity to 0 to disable detailed logs
if !isvalid(item)
@info "Cache miss! Pinging the API"
# pass the request along to the next layer by calling `cache_layer` arg `handler`
resp = handler(req; kw...)
item.output = resp
# Let's remember it for the next time
push!(active_cache, item)
end
## Return the calculated or cached result
return item.output
end
# pass the request along to the next layer by calling `cache_layer` arg `handler`
# also pass along the trailing keyword args `kw...`
return handler(req; kw...)
end
end
# Create a new client with the auth layer added
HTTP.@client [cache_layer]
end # module
# Let's push the layer globally in all HTTP.jl requests
HTTP.pushlayer!(MyCache.cache_layer)
# HTTP.poplayer!() # to remove it later
# ## Profit
# Let's call the API
@time msg = aigenerate("What is the meaning of life?"; http_kwargs = (; cache_key = "key1"))
# The first call will be slow as usual, but any subsequent call should be pretty quick - try it a few times!
# You can also use it for embeddings, eg,
@time msg = aiembed("how is it going?"; http_kwargs = (; cache_key = "key2")) # 0.7s
@time msg = aiembed("how is it going?"; http_kwargs = (; cache_key = "key2")) # 0.02s
# Even with a tiny difference (no question mark), it still picks the right cache
@time msg = aiembed("how is it going"; http_kwargs = (; cache_key = "key2")) # 0.02s
| SemanticCaches | https://github.com/svilupp/SemanticCaches.jl.git |
|
[
"MIT"
] | 0.2.0 | a5c4f933e59401f83e1c36ccfc40e4ac2fd87e73 | code | 523 | module SemanticCaches
using HTTP
using LinearAlgebra
using Dates
using Statistics: mean
using FlashRank
using FlashRank: EmbedderModel
global EMBEDDER::Union{Nothing, EmbedderModel} = nothing
export SemanticCache, CachedItem, HashCache
include("types.jl")
include("similarity_lookup.jl")
function __init__()
## Initialize the embedding model
global EMBEDDER
EMBEDDER = try
EmbedderModel(:tiny_embed)
catch e
# Probably a CI issue!
@warn "Error in DataDeps: $e"
end
end
end
| SemanticCaches | https://github.com/svilupp/SemanticCaches.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.