licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 1.0.2 | 377585d2d44d9717448ddb079abe61272ada26b4 | docs | 721 | # DungBase
<!--




 -->
[](https://travis-ci.org/yakir12/DungBase.jl)
[](http://codecov.io/github/yakir12/DungBase.jl?branch=master)
| DungBase | https://github.com/yakir12/DungBase.jl.git |
|
[
"MIT"
] | 1.0.2 | 377585d2d44d9717448ddb079abe61272ada26b4 | docs | 39 | # DungBase
*Documentation goes here.*
| DungBase | https://github.com/yakir12/DungBase.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | code | 729 | using SampleChainsDynamicHMC
using Documenter
DocMeta.setdocmeta!(SampleChainsDynamicHMC, :DocTestSetup, :(using SampleChainsDynamicHMC); recursive=true)
makedocs(;
modules=[SampleChainsDynamicHMC],
authors="Chad Scherrer <[email protected]> and contributors",
repo="https://github.com/cscherrer/SampleChainsDynamicHMC.jl/blob/{commit}{path}#{line}",
sitename="SampleChainsDynamicHMC.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://cscherrer.github.io/SampleChainsDynamicHMC.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
)
deploydocs(;
repo="github.com/cscherrer/SampleChainsDynamicHMC.jl",
)
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | code | 440 | using TupleVectors
function divergence_indices(chain::DynamicHMCChain)
inds = findall(isdivergence, SampleChains.info(chain))
end
function divergences(chain::DynamicHMCChain)
TupleVector(chain[divergence_indices(chain)])
end
using SampleChainsDynamicHMC.DynamicHMC: TreeStatisticsNUTS
using SampleChainsDynamicHMC.DynamicHMC: is_divergent
isdivergence(tree::TreeStatisticsNUTS) = tree.termination.left == tree.termination.right
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | code | 5573 | module SampleChainsDynamicHMC
using Reexport
@reexport using SampleChains
@reexport using DynamicHMC
using LogDensityProblems
using NestedTuples
using ElasticArrays
using StructArrays
using ConcreteStructs
using TransformVariables
using MappedArrays
using Random
using TupleVectors
using TupleVectors:chainvec
export dynamichmc
@concrete mutable struct DynamicHMCChain{T} <: AbstractChain{T}
samples # :: AbstractVector{T}
logq # log-density for distribution the sample was drawn from
info # Per-sample metadata, type depends on sampler used
meta # Metadata associated with the sample as a whole
state
transform
reporter
end
function DynamicHMCChain(t::TransformVariables.TransformTuple, Q::DynamicHMC.EvaluatedLogDensity, tree_stats, steps, reporter = NoProgressReport())
tQq = TransformVariables.transform(t, Q.q)
T = typeof(tQq)
samples = chainvec(tQq)
logq = chainvec(Q.ℓq)
info = chainvec(tree_stats)
meta = steps
transform = t
return DynamicHMCChain{T}(samples, logq, info, meta, Q, transform, reporter)
end
TupleVectors.summarize(ch::DynamicHMCChain) = summarize(samples(ch))
function gettransform(chain::DynamicHMCChain)
return getfield(chain, :transform)
end
function SampleChains.pushsample!(chain::DynamicHMCChain, Q::DynamicHMC.EvaluatedLogDensity, tree_stats)
push!(samples(chain), transform(gettransform(chain), Q.q))
push!(logq(chain), Q.ℓq)
push!(info(chain), tree_stats)
end
function SampleChains.step!(chain::DynamicHMCChain)
Q, tree_stats = DynamicHMC.mcmc_next_step(getfield(chain, :meta), getfield(chain, :state))
setfield!(chain, :state, Q)
return Q, tree_stats
end
@concrete struct DynamicHMCConfig <: ChainConfig{DynamicHMCChain}
init
warmup_stages
algorithm
reporter
ad_backend
end
# Docs adapted from https://tamaspapp.eu/DynamicHMC.jl/stable/interface/
"""
dynamichmc(
; init = ()
, warmup_stages = DynamicHMC.default_warmup_stages()
, algorithm = DynamicHMC.NUTS()
, reporter = DynamicHMC.NoProgressReport()
, ad_backend = Val(:ForwardDiff)
)
`init`: a `NamedTuple` that can contain the following fields (all of them
optional and provided with reasonable defaults):
- `q`: initial position. Default: random (uniform `[-2,2]` for each coordinate).
- `κ`: kinetic energy specification. Default: Gaussian with identity matrix.
- `ϵ`: a scalar for initial step size, or `nothing` for heuristic finders.
`warmup_stages`: a sequence of warmup stages. See
`DynamicHMC.default_warmup_stages` and
`DynamicHMC.fixed_stepsize_warmup_stages`; the latter requires an `ϵ` in
initialization.
`algorithm`: see `DynamicHMC.NUTS`. It is very unlikely you need to modify this,
except perhaps for the maximum depth.
`reporter`: how progress is reported. This is currently silent by default (see
`DynamicHMC.NoProgressReport`), but this default will likely change in future
releases.
`ad_backend`: The automatic differentiation backend to use for gradient
computation, specified as either a symbol or a `Val` type with a symbol that
refers to an AD package. See [LogDensityProblems.jl](https://tamaspapp.eu/LogDensityProblems.jl/stable/#Automatic-differentiation)
for supported packages, including `ForwardDiff`, `ReverseDiff`, `Zygote`, and `Tracker`.
For more details see https://tamaspapp.eu/DynamicHMC.jl/stable/interface/
# Example
```jldoctest
julia> using LinearAlgebra, ReverseDiff
julia> config = dynamichmc(
warmup_stages=default_warmup_stages(
M=Symmetric, # adapt dense positive definite metric
stepsize_adaptation=DualAveraging(δ=0.9), # target acceptance rate 0.9
doubling_stages=7, # 7-stage metric adaptation
),
reporter=LogProgressReport(), # log progress using Logging
ad_backend=Val(:ReverseDiff), # use ReverseDiff AD package
);
```
"""
function dynamichmc(;
init = ()
, warmup_stages = DynamicHMC.default_warmup_stages()
, algorithm = DynamicHMC.NUTS()
, reporter = DynamicHMC.NoProgressReport()
, ad_backend = Val(:ForwardDiff)
)
DynamicHMCConfig(init, warmup_stages, algorithm, reporter, ad_backend)
end
function SampleChains.newchain(rng::Random.AbstractRNG, config::DynamicHMCConfig, ℓ, tr)
P = LogDensityProblems.TransformedLogDensity(tr, ℓ)
∇P = LogDensityProblems.ADgradient(config.ad_backend, P)
reporter = config.reporter
results = DynamicHMC.mcmc_keep_warmup(rng, ∇P, 0;
initialization = config.init
, warmup_stages = config.warmup_stages
, algorithm = config.algorithm
, reporter = reporter
)
steps = DynamicHMC.mcmc_steps(
results.sampling_logdensity,
results.final_warmup_state,
)
Q = results.final_warmup_state.Q
Q, tree_stats = DynamicHMC.mcmc_next_step(steps, Q)
chain = DynamicHMCChain(tr, Q, tree_stats, steps, reporter)
end
function SampleChains.newchain(config::DynamicHMCConfig, ℓ, tr)
return newchain(Random.GLOBAL_RNG, config, ℓ, tr)
end
function SampleChains.sample!(chain::DynamicHMCChain, n::Int=1000)
reporter = getfield(chain, :reporter)
mcmc_reporter = DynamicHMC.make_mcmc_reporter(reporter, n; currently_warmup = false)
@cleanbreak for j in 1:n
Q, tree_stats = step!(chain)
pushsample!(chain, Q, tree_stats)
DynamicHMC.report(mcmc_reporter, j)
end
return chain
end
end
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | code | 1925 |
using SampleChainsDynamicHMC
using TransformVariables
using Logging
using Test
function ℓ(nt)
z = nt.x/nt.σ
return -z^2 - nt.σ - log(nt.σ)
end
t = as((x=asℝ, σ=asℝ₊))
@testset "single chain" begin
chain = newchain(dynamichmc(), ℓ, t)
@test chain isa SampleChainsDynamicHMC.DynamicHMCChain
@test length(chain) == 1
sample!(chain, 9)
@test length(chain) == 10
sample!(chain, 90)
@test length(chain) == 100
end
@testset "multichain" begin
chains = newchain(4, dynamichmc(), ℓ, t)
@test chains isa MultiChain
chains_chains = getfield(chains, :chains)
@test length(chains_chains) == 4
@test all(x -> length(x) == 1, chains_chains)
sample!(chains, 9)
@test all(x -> length(x) == 10, chains_chains)
sample!(chains, 90)
@test all(x -> length(x) == 100, chains_chains)
samples(chains)
end
using LinearAlgebra
using ReverseDiff
using SampleChainsDynamicHMC.LogDensityProblems
@testset "config options" begin
config = dynamichmc(
warmup_stages=default_warmup_stages(
M=Symmetric, # adapt dense positive definite metric
stepsize_adaptation=DualAveraging(δ=0.9), # target acceptance rate 0.9
doubling_stages=7, # 7-stage metric adaptation
),
reporter=LogProgressReport(), # log progress using Logging
ad_backend=Val(:ReverseDiff), # use ReverseDiff AD package
)
chain = newchain(config, ℓ, t)
@test length(chain) == 1
meta = getfield(chain, :meta)
@test meta.H.κ.M⁻¹ isa Symmetric
@test meta.H.ℓ isa LogDensityProblems.ReverseDiffLogDensity
sample!(chain, 9)
@test length(chain) == 10
sample!(chain, 90)
@test length(chain) == 100
end
@testset "reporting" begin
io = IOBuffer()
chains = with_logger(SimpleLogger(io)) do
newchain(dynamichmc(reporter=LogProgressReport()), ℓ, t)
end
warmup_log = String(take!(io))
@test !isempty(warmup_log)
io = IOBuffer()
with_logger(SimpleLogger(io)) do
sample!(chains, 10)
end
log = String(take!(io))
@test !isempty(log)
end
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | code | 115 | using SampleChainsDynamicHMC
using Test
@testset "SampleChainsDynamicHMC.jl" begin
include("notebook.jl")
end
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | docs | 3459 | # SampleChainsDynamicHMC
[](https://cscherrer.github.io/SampleChainsDynamicHMC.jl/stable)
[](https://cscherrer.github.io/SampleChainsDynamicHMC.jl/dev)
[](https://github.com/cscherrer/SampleChainsDynamicHMC.jl/actions)
[](https://codecov.io/gh/cscherrer/SampleChainsDynamicHMC.jl)
Setting up:
```julia
julia> using SampleChainsDynamicHMC
julia> using TransformVariables
julia> function ℓ(nt)
z = nt.x/nt.σ
return -z^2 - nt.σ - log(nt.σ)
end
ℓ (generic function with 1 method)
julia> t = as((x=asℝ, σ=asℝ₊))
TransformVariables.TransformTuple{NamedTuple{(:x, :σ), Tuple{TransformVariables.Identity, TransformVariables.ShiftedExp{true, Float64}}}}((x = asℝ, σ = asℝ₊), 2)
```
Initialize and take some samples:
```julia
julia> chain = newchain(DynamicHMCChain, ℓ, t)
1-element Chain with schema (x = Float64, σ = Float64)
(x = -0.66±0.0, σ = 0.65±0.0)
julia> sample!(chain, 9)
10-element Chain with schema (x = Float64, σ = Float64)
(x = -0.36±0.38, σ = 1.26±0.69)
julia> sample!(chain, 90)
100-element Chain with schema (x = Float64, σ = Float64)
(x = -0.32±0.68, σ = 1.06±0.66)
julia> chain[1]
(x = -0.660818661864279, σ = 0.6482824278360845)
julia> chain.x[1:10]
10-element ElasticArrays.ElasticVector{Float64, 0, Vector{Float64}}:
-0.660818661864279
-0.31966349282522916
-0.5030732787889958
-0.27788387641411594
-0.9287874718868021
-0.6260927333733151
0.4303096842134812
-0.3844104968943612
0.05987431572954072
-0.351002647246055
```
Or multiple chains:
```julia
julia> chains = newchain(4, DynamicHMCChain, ℓ, t)
4-element MultiChain with 4 chains and schema (x = Float64, σ = Float64)
(x = -0.38±0.75, σ = 0.76±0.76)
julia> sample!(chains, 9)
40-element MultiChain with 4 chains and schema (x = Float64, σ = Float64)
(x = -0.11±0.73, σ = 0.83±0.8)
julia> sample!(chains, 90)
400-element MultiChain with 4 chains and schema (x = Float64, σ = Float64)
(x = -0.18±0.75, σ = 0.9±0.97)
julia> samples(chains)
400-element TupleVector with schema (x = Float64, σ = Float64)
(x = -0.18±0.75, σ = 0.9±0.97)
julia> getchains(chains) .|> summarize
4-element Vector{NamedTuple{(:x, :σ), Tuple{RealSummary, RealSummary}}}:
(x = -0.22±0.73, σ = 1.4±0.92)
(x = -0.031±0.33, σ = 0.46±0.37)
(x = -0.0214±0.014, σ = 0.07497±0.0044)
(x = -0.46±1.2, σ = 1.7±1.0)
```
A `MultiChain` is still represented abstractly similarly to a single chain, for easy comptuations:
```julia
julia> chains[1]
(x = -0.36681258114618465, σ = 1.7508963122497017)
julia> chains.x[1:10]
vcat(10-element view(::ElasticArrays.ElasticVector{Float64, 0, Vector{Float64}}, 1:10) with eltype Float64, 0-element view(::ElasticArrays.ElasticVector{Float64, 0, Vector{Float64}}, 1:0) with eltype Float64, 0-element view(::ElasticArrays.ElasticVector{Float64, 0, Vector{Float64}}, 1:0) with eltype Float64, 0-element view(::ElasticArrays.ElasticVector{Float64, 0, Vector{Float64}}, 1:0) with eltype Float64):
-0.36681258114618465
-0.09339967949694516
-0.3089171887973833
-1.5420534117776032
-0.10574714292144685
-0.11312594562766448
-0.008799704824529742
0.5209894936643252
-0.11204122979765113
-1.100922340370071
```
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 0.3.5 | 844be8f8ea59a458ebd65b2c2db20c17b43943e4 | docs | 146 | ```@meta
CurrentModule = SampleChainsDynamicHMC
```
# SampleChainsDynamicHMC
```@index
```
```@autodocs
Modules = [SampleChainsDynamicHMC]
```
| SampleChainsDynamicHMC | https://github.com/cscherrer/SampleChainsDynamicHMC.jl.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | code | 1317 | using Dash
using DashTextareaAutocomplete
const WORDLIST = ["apple", "application", "apartment",
"boat", "banana", "boss",
"coast", "code", "cat"]
app = dash(;external_stylesheets=["https://codepen.io/chriddyp/pen/bWLwgP.css"])
app.layout = html_div(className = "container") do
dashtextareaautocomplete(id = "input",
# value = "initial value",
placeholder = "Type something!",
wordList = WORDLIST,
# common options with their default values
# triggerChar = ":",
# minChar = 1,
style = (minHeight = "100px",),
dropdownStyle = (maxHeight = "120px", overflow = "auto")),
html_div(id = "output"),
html_button("CLICK", id = "btn"),
html_div(id = "output2")
end
callback!(app, Output("output", "children"),
Input("input", "value")) do value
return "You have entered $value"
end
callback!(app, Output("output2", "children"),
Input("btn", "n_clicks"),
State("input", "value")) do clicks, value
return "After click, you have: $value"
end
run_server(app, "0.0.0.0", 8050, debug=true)
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | code | 1011 |
module DashTextareaAutocomplete
using Dash, Pkg.Artifacts
resources_path() = artifact"dash_textarea_autocomplete_resources"
const version = "1.3.2"
include("jl/dashtextareaautocomplete.jl")
function __init__()
DashBase.register_package(
DashBase.ResourcePkg(
"dash_textarea_autocomplete",
resources_path(),
version = version,
[
DashBase.Resource(
relative_package_path = "dash_textarea_autocomplete.min.js",
external_url = "https://unpkg.com/[email protected]/dash_textarea_autocomplete/dash_textarea_autocomplete.min.js",
dynamic = nothing,
async = nothing,
type = :js
),
DashBase.Resource(
relative_package_path = "dash_textarea_autocomplete.min.js.map",
external_url = "https://unpkg.com/[email protected]/dash_textarea_autocomplete/dash_textarea_autocomplete.min.js.map",
dynamic = true,
async = nothing,
type = :js
)
]
)
)
end
end
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | code | 2151 | # AUTO GENERATED FILE - DO NOT EDIT
export dashtextareaautocomplete
"""
dashtextareaautocomplete(;kwargs...)
A DashTextareaAutocomplete component.
DashTextareaAutocomplete
Simple `@webscopeio/react-textarea-autocomplete` wrapper for Dash
enabling auto-completion in multi-line `<textarea>` elements.
Keyword arguments:
- `id` (String; optional): The ID used to identify this component in Dash callbacks.
- `className` (String; optional): Class names of the <textarea> (from `react-textarea-autocomplete`).
- `containerClassName` (String; optional): Class names of the textarea container (from `react-textarea-autocomplete`).
- `containerStyle` (Dict; optional): Styles of the textarea container (from `react-textarea-autocomplete`).
- `dropdownStyle` (Dict; optional): Styles of the dropdown wrapper.
- `itemStyle` (Dict; optional): Styles of the items wrapper.
- `listStyle` (Dict; optional): Style of the list wrapper (from `react-textarea-autocomplete`).
- `loaderStyle` (Dict; optional): Style of the loader wrapper (from `react-textarea-autocomplete`).
- `minChar` (Real; optional): Number of characters that user should type for trigger a suggestion.
Defaults to 1. (from `react-textarea-autocomplete`)
- `placeholder` (String; optional): Provides a hint to the user of what can be entered in the <textarea> field.
- `style` (Dict; optional): Style of the <textarea>.(from `react-textarea-autocomplete`).
- `triggerChar` (String; optional): Character that triggers auto-completion machinery.
Defaults to `:`. (from `react-textarea-autocomplete`)
- `value` (String; optional): The value displayed in the <textarea>.
- `wordList` (Array; required): List of string available for auto-completion.
"""
function dashtextareaautocomplete(; kwargs...)
available_props = Symbol[:id, :className, :containerClassName, :containerStyle, :dropdownStyle, :itemStyle, :listStyle, :loaderStyle, :minChar, :placeholder, :style, :triggerChar, :value, :wordList]
wild_props = Symbol[]
return Component("dashtextareaautocomplete", "DashTextareaAutocomplete", "dash_textarea_autocomplete", available_props, wild_props; kwargs...)
end
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | docs | 6123 | # CONTRIBUTING
This project was generated by the [dash-component-boilerplate](https://github.com/plotly/dash-component-boilerplate) it contains the minimal set of code required to create your own custom Dash component.
_N.B.: because of the current usage of `postbuild_fixups.sh` build script, this project can only be build on a Linux distribution_
---
## Install dependencies
1. Install npm packages
```
$ npm install
```
2. Create a virtual env and activate.
```
$ python3 -m venv venv
$ . venv/bin/activate
```
3. Install python packages required to build components.
```
$ pip install wheel
$ pip install -r requirements.txt
# or
$ pip install -r requirements.txt --upgrade
```
4. Install the python packages for testing (optional)
```
$ pip install -r tests/requirements.txt
# or
$ pip install -r tests/requirements.txt --upgrade
```
## Write your component code in `src/lib/components/DashTextareaAutocomplete.react.js`.
- The demo app is in `src/demo` and you will import your example component code into your demo app.
- Test your code in a Python environment:
1. Build your code
```
$ npm run build
```
**N.B.**
+ We use `postbuild_fixups.sh` to re-write certain lines in the generated Julia files of this package to allow usage of Julia's [Artifact](https://pkgdocs.julialang.org/v1/artifacts/#Artifacts) system.
2. Run and modify the `usage.py` sample dash app:
```
$ python usage.py
```
- Write tests for your component.
- A sample test is available in `tests/test_usage.py`, it will load `usage.py` and you can then automate interactions with selenium.
- Run the tests with `$ pytest tests`.
- The Dash team uses these types of integration tests extensively. Browse the Dash component code on GitHub for more examples of testing (e.g. https://github.com/plotly/dash-core-components)
- <strike>Add custom styles to your component by putting your custom CSS files into your distribution folder (`dash_textarea_autocomplete`).
- Make sure that they are referenced in `MANIFEST.in` so that they get properly included when you're ready to publish your component.
- Make sure the stylesheets are added to the `_css_dist` dict in `dash_textarea_autocomplete/__init__.py` so dash will serve them automatically when the component suite is requested.</strike> **does not apply**
- [Review your code](./review_checklist.md)
## Create a production build and publish:
0. Pre-requisites
1. Contact @etpinard to get write access to this repo
2. Create an npmjs.com account and a pypi.org account and ask @etpinard
for publish rights
3. Provide your login/pass for Pypi (if you're not using a keychain-type Python module for that)
and sign-in to NPM with `npm login`
4. Install dependencies and activate python virtualenv [ref](#install-dependencies)
1. Version, build, commit and push your code:
```
$ npm version --no-git-tag-version <patch|minor|major>
$ npm run build
$ git add --all
$ git commit -m "X.Y.Z"
$ git push
```
**N.B.**
+ we use the `--no-git-tag-version` flag, because [`Registrator.jl`](https://github.com/JuliaRegistries/Registrator.jl)
won't work if a tag corresponding to the to-be-registered version already
exists.
2. Create a Python distribution
```
$ rm -rf dist/*
$ pip install wheel
$ python setup.py sdist bdist_wheel
```
This will create source and wheel distribution in the generated the `dist/` folder.
See [PyPA](https://packaging.python.org/guides/distributing-packages-using-setuptools/#packaging-your-project)
for more information.
3. Test your tarball by copying it into a new environment and installing it locally:
```
$ mkdir some-tmp-dir
$ cd some-tmp-dir
$ python3 -m venv venv
$ . venv/bin/activate
$ pip install wheel
$ pip install /path/to/dist/dash_textarea_autocomplete-X.Y.Z.tar.gz
```
4. If it works, then you can publish the component:
1. Publish on PyPI (N.B. must have PyPI token and `$HOME/.pypirc` filled correctly)
```
$ pip install twine
$ twine upload dist/*
```
2. Publish on NPM
```
$ npm publish
```
_Publishing your component to NPM will make the JavaScript bundles available on the unpkg CDN. By default, Dash serves the component library's CSS and JS locally, but if you choose to publish the package to NPM you can set `serve_locally` to `False` and you may see faster load times._
3. Publish to Julia's General Registry
+ Go to the repo's Github's page and add the following comment on the version commit:
```
@JuliaRegistrator register branch=main
```
<details>
<summary>If something goes wrong (like a test failure), click here</summary>
+ fix the problem,
+ run `npm run build`,
+ add, commit and push (but do not bump the version!),
+ redo step 2-3 and
+ make another `@JuliaRegistrator register branch=main` comment on the newly pushed
commit on Github. The JuliaRegistrator bot will match the version
number with the opened `JuliaRegistries/General` PR.
After the `JuliaRegistries/General` PR is merged, the Julia version of
the package will be out-of-sync with the NPM and PyPI versions. So, it
is hightly recommanded to redo the publish process from scratch starting
with an `npm version --no-git-tag-version patch` call. Note that we
cannot simply abandoned an opened `JuliaRegistries/General` PR, as the
Julia registries require "sequential" version increments as of
2021-12-16. That is, going from `v1.1.0` to `v1.2.1` if the `v1.2.0`
release is botched is not allowed.
</details>
5. Tag and make a Github release
+ Done automatically by [`TagBot`](https://github.com/JuliaRegistries/TagBot)
after the new version has been merged in Julia's General Registry.
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | docs | 3805 | # Dash Textarea Autocomplete
Dash component wrapper for [`@webscopeio/react-textarea-autocomplete`](https://github.com/webscopeio/react-textarea-autocomplete).

## Get started
### Dash for Python
1. Install `dash-textarea-autocomplete`, Dash and its dependencies:
```
pip install dash dash-textarea-autocomplete
```
2. Run `python usage.py`
```py
# usage.py
import dash_textarea_autocomplete
from dash import Dash, callback, html, Input, Output, State
WORD_LIST = ['apple', 'application', 'apartment',
'boat', 'banana', 'boss',
'coast', 'code', 'cat']
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = Dash(__name__, external_stylesheets=external_stylesheets)
app.layout = html.Div([
dash_textarea_autocomplete.DashTextareaAutocomplete(
id='input',
# value='initial value',
placeholder='Type something, use `:` to invoke auto-completion',
wordList=WORD_LIST,
# common options with their default values
# triggerChar=':',
# minChar=1,
style={'minHeight': '100px'},
dropdownStyle={'maxHeight': '120px', 'overflow': 'auto'}),
html.Div(id='output'),
html.Button('CLICK', id='btn'),
html.Div(id='output2')
], className='container')
@callback(Output('output', 'children'),
Input('input', 'value'))
def display_output(value):
return 'You have entered: {}'.format(value)
@callback(Output('output2', 'children'),
Input('btn', 'n_clicks'),
State('input', 'value'))
def display_output2(n_clicks, value):
return 'After click, you have: {}'.format(value)
if __name__ == '__main__':
app.run_server(debug=True)
```
3. Visit http://localhost:8050 in your web browser
### Dash for Julia
1. Install `DashTextareaAutocomplete.jl` and `Dash.jl`:
```jl
pkg> activate .
pkg> add Dash DashTextareaAutocomplete
```
2. Run `julia --project usage.jl` - [click here to see `usage.jl`](https://github.com/etpinard/dash-textarea-autocomplete/blob/main/usage.jl)
3. Visit http://localhost:8050 in your web browser
### Dash for R
_TODO_
## Props
```
- id (string; optional): The ID used to identify this component in Dash callbacks.
- value (string; optional): The value displayed in the <textarea>.
- placeholder (string; optional): Provides a hint to the user of what can be entered in the <textarea> field.
- wordList (list; required): List of string available for auto-completion.
- triggerChar (string; default ":"): Character that triggers auto-completion machinery.
Defaults to `:`. (from `react-textarea-autocomplete`)
- minChar (number; default 1): Number of characters that user should type for trigger a suggestion.
Defaults to 1. (from `react-textarea-autocomplete`)
- className (string; optional): Class names of the <textarea> (from `react-textarea-autocomplete`).
- containerClassName (string; optional): Class names of the textarea container (from `react-textarea-autocomplete`).
- style (dict; optional): Style of the <textarea>.(from `react-textarea-autocomplete`).
- listStyle (dict; optional): Style of the list wrapper (from `react-textarea-autocomplete`).
- itemStyle (dict; optional): Styles of the items wrapper.
- loaderStyle (dict; optional): Style of the loader wrapper (from `react-textarea-autocomplete`).
- containerStyle (dict; optional): Styles of the textarea container (from `react-textarea-autocomplete`).
- dropdownStyle (dict; optional): Styles of the dropdown wrapper.
```
## Contributing
See [CONTRIBUTING.md](https://github.com/etpinard/dash-textarea-autocomplete/blob/main/CONTRIBUTING.md)
## License
[MIT](https://github.com/etpinard/dash-textarea-autocomplete/blob/main/LICENSE)
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 1.3.2 | b0fcc10a08442dbb97021711b3d9be783415f911 | docs | 3296 | # Code Review Checklist
## Code quality & design
- Is your code clear? If you had to go back to it in a month, would you be happy to? If someone else had to contribute to it, would they be able to?
A few suggestions:
- Make your variable names descriptive and use the same naming conventions throughout the code.
- For more complex pieces of logic, consider putting a comment, and maybe an example.
- In the comments, focus on describing _why_ the code does what it does, rather than describing _what_ it does. The reader can most likely read the code, but not necessarily understand why it was necessary.
- Don't overdo it in the comments. The code should be clear enough to speak for itself. Stale comments that no longer reflect the intent of the code can hurt code comprehension.
* Don't repeat yourself. Any time you see that the same piece of logic can be applied in multiple places, factor it out into a function, or variable, and reuse that code.
* Scan your code for expensive operations (large computations, DOM queries, React re-renders). Have you done your possible to limit their impact? If not, it is going to slow your app down.
* Can you think of cases where your current code will break? How are you handling errors? Should the user see them as notifications? Should your app try to auto-correct them for them?
## Component API
- Have you tested your component on the Python side by creating an app in `usage.py` ?
Do all of your component's props work when set from the back-end?
Should all of them be settable from the back-end or are some only relevant to user interactions in the front-end?
- Have you provided some basic documentation about your component? The Dash community uses [react docstrings](https://github.com/plotly/dash-docs/blob/master/tutorial/plugins.py#L45) to provide basic information about dash components. Take a look at this [Checklist component example](https://github.com/plotly/dash-core-components/blob/master/src/components/Checklist.react.js) and others from the dash-core-components repository.
At a minimum, you should describe what your component does, and describe its props and the features they enable.
Be careful to use the correct formatting for your docstrings for them to be properly recognized.
## Tests
- The Dash team uses integration tests extensively, and we highly encourage you to write tests for the main functionality of your component. In the `tests` folder of the boilerplate, you can see a sample integration test. By launching it, you will run a sample Dash app in a browser. You can run the test with:
```
python -m tests.test_render
```
[Browse the Dash component code on GitHub for more examples of testing.](https://github.com/plotly/dash-core-components)
## Ready to publish? Final scan
- Take a last look at the external resources that your component is using. Are all the external resources used [referenced in `MANIFEST.in`](https://github.com/plotly/dash-docs/blob/0b2fd8f892db720a7f3dc1c404b4cff464b5f8d4/tutorial/plugins.py#L55)?
- [You're ready to publish!](https://github.com/plotly/dash-component-boilerplate/blob/master/%7B%7Bcookiecutter.project_shortname%7D%7D/README.md#create-a-production-build-and-publish)
| DashTextareaAutocomplete | https://github.com/etpinard/dash-textarea-autocomplete.git |
|
[
"MIT"
] | 0.1.0 | 0f5467d9014b72e5b86927f4d168fd68e49633e1 | code | 4070 | module Gateway
using Sockets
export getgateway
function parse_linux_proc_net_route(file::IOStream)
# /proc/net/route file:
# Iface Destination Gateway Flags RefCnt Use Metric Mask MTU Window IRTT
# wlo1 00000000 0100A8C0 0003 0 0 600 00000000 0 0 0
# wlo1 0000A8C0 00000000 0001 0 0 600 00FFFFFF 0 0 0
sep = "\t"
field = 14
scanner = read(file, String)
tokens = split(scanner, sep)
if length(tokens) <= field
throw(error("No gateway found"))
end
gateway = parse(UInt32, tokens[field], base=16)
gateway = hton(gateway) # Convert hex address to big endian
Sockets.IPv4(gateway)
end
function parse_windows_route(output::IOStream)
# Windows "route print 0.0.0.0" output:
# ===========================================================================
# Interface List
# 3 ...02 16 4a a8 72 ca ...... Intel(R) PRO/100 VE Network Connection
# 1 ........................... Software Loopback Interface 1
# ===========================================================================
# IPv4 Route Table
# ===========================================================================
# Active Routes:
# Network Destination Netmask Gateway Interface Metric
# 0.0.0.0 0.0.0.0 192.168.0.1 192.168.0.100 20
# ===========================================================================
#
# Get to "Active Routes" section and jump 2 lines below and pick address from 3rd column
sep = "\n"
column = 3
scanner = read(output, String)
tokens = split(output, sep)
sep = 0
for (idx, line) in enumerate(tokens)
if sep == 3
if length(tokens) <= idx + 2
throw(error("No gateway found"))
end
fields = split(tokens[idx+2])
if length(fields) < 3
throw(error("No gateway found"))
end
return Sockets.IPv4(fields[column])
end
if startswith(line, "=======")
sep += 1
continue
end
end
throw(error("No gateway found"))
end
function parse_osx_route(output::IOStream)
# Darwin route frame:
# route to: default
# destination: default
# mask: default
# gateway: 192.168.1.1
sep = "\n"
column = 2
scanner = read(output, String)
tokens = split(scanner, sep)
for line in tokens
fields = split(line)
if length(fields) >=2 && fields[column - 1] == "gateway:"
return Sockets.IPv4(fields[column])
end
end
throw(error("No gateway found"))
end
function parse_unix_netstat(output::IOStream)
# For unix based OS such as *BSD, solaris etc
# netstat -rn output:
# Routing tables
#
# Internet:
# Destination Gateway Flags Netif Expire
# default 10.88.88.2 UGS em0
# 10.88.88.0/24 link#1 U em0
# 10.88.88.148 link#1 UHS lo0
# 127.0.0.1 link#2 UH lo0
#
# Internet6:
# Destination Gateway Flags Netif Expire
# ::/96 ::1 UGRS lo0
# ::1 link#2 UH lo0
# ::ffff:0.0.0.0/96 ::1 UGRS lo0
# fe80::/10 ::1 UGRS lo0
# ...
sep = "\n"
column = 2
scanner = read(output, String)
tokens = split(scanner, sep)
for line in tokens
fields = split(line)
if length(fields) >=2 && fields[column - 1] == "default"
return Sockets.IPv4(fields[column])
end
end
throw(error("No gateway found"))
end
function getgateway()
if Sys.islinux()
open("/proc/net/route") do file
return parse_linux_proc_net_route(file)
end
elseif Sys.iswindows()
output = read(`cmd /c route print 0.0.0.0`)
return parse_windows_route(output)
elseif Sys.isapple()
output = read(`/sbin/route -n get 0.0.0.0`)
return parse_osx_route(output)
elseif Sys.isbsd()
output = read(`netstat -rn`)
return parse_unix_netstat(output)
else
println("Operating system not supported please file a issue on github")
end
end
end #module
| Gateway | https://github.com/glitzflitz/Gateway.jl.git |
|
[
"MIT"
] | 0.1.0 | 0f5467d9014b72e5b86927f4d168fd68e49633e1 | code | 198 | using Gateway
using Test
@testset "GetGateway.jl" begin
if Sys.islinux()
result = Gateway.parse_linux_proc_net_route(open("/proc/net/route"))
@test Gateway.getgateway() == result
end
end
| Gateway | https://github.com/glitzflitz/Gateway.jl.git |
|
[
"MIT"
] | 0.1.0 | 0f5467d9014b72e5b86927f4d168fd68e49633e1 | docs | 566 | # Gateway.jl
[](https://travis-ci.org/glitzflitz/Gateway.jl)
Julia library for obtaining IP address of the default gateway
Provides implementation for:
+ Linux
+ Windows
+ OS X
+ FreeBSD
+ Solaris
## Documentation
For most use cases, all you ever need is:
```julia
getgateway() => Sockets.IPv4
```
## Project Status
The package is tested against Julia `v1.2` release on Linux and Windows. If you have OS X and BSD variant/Solaris environment please feel free to submit test results.
| Gateway | https://github.com/glitzflitz/Gateway.jl.git |
|
[
"MIT"
] | 0.1.0 | d7365528f2a2c2c99979c867ed5cd98ad7037835 | code | 61 | module AstroConstants
using Logging
include("tpc.jl")
end
| AstroConstants | https://github.com/JuliaSpaceMissionDesign/AstroConstants.jl.git |
|
[
"MIT"
] | 0.1.0 | d7365528f2a2c2c99979c867ed5cd98ad7037835 | code | 4533 | export load_tpc, load_tpc!
# TPC associated regular expressions (precompiled)
const _FIND_BODY_REGEX = Regex("(BODY\\w{1,}\\D{1,}=)")
const _FIND_DATA_REGEX = Regex("=\\D{1,}\\(([^()]*)\\)|(=\\D{1,}([0-9]*\\.?[0-9]*))")
const _FIND_BODY_INDEX = Regex("(?<=BODY)(.*?)(?=_)")
const _FIND_PROP_NAME_REGEX = Regex("(?<=\\d_)(.*?)(?==)")
"""
PlanetaryConstantsDict{T}
Store TPC associated constants by NAIF ID.
"""
struct PlanetaryConstantsDict{T}
data::Dict{Int, Dict{Symbol, Vector{T}}}
end
PlanetaryConstantsDict{T}() where T = PlanetaryConstantsDict{T}(Dict{Int, Dict{Symbol, Vector{T}}}())
Base.length(c::PlanetaryConstantsDict{T}) where T = length(c.data)
Base.keys(c::PlanetaryConstantsDict{T}) where T = keys(c.data)
Base.values(c::PlanetaryConstantsDict{T}) where T = values(c.data)
Base.eltype(::PlanetaryConstantsDict{T}) where T = T
Base.getindex(c::PlanetaryConstantsDict{T}, key) where T = getindex(c.data, key)
function Base.show(io::IO, cache::PlanetaryConstantsDict{T}) where T
println(io, "ConstantsDict{$T} with $(length(cache.data)) entries:")
for (idx, props) in cache.data
propstr = join([String(p) for p in keys(props)], ", ")
println(io, "$idx => ($(propstr))")
end
end
"""
load_tpc!(cache::ConstantsDict{T}, file::String) where T
Load TPC file in the `cache` dictionary.
"""
function load_tpc!(cache::PlanetaryConstantsDict{T}, file::String) where T
@info "Loading constants from $file"
# read document
lines = readlines(file)
# pre-allocate data for processing
saved = Vector{String}(undef, length(lines))
save = false
last_saved_index = 0
# load and strip lines (remove tabs and spaces)
# extract lines which are within `\begindata` and `\begintext`
for line in lines
line = strip(line)
if line == "\\begindata"
save = true
continue
elseif line == "\\begintext"
save = false
continue
end
if save
if line == ""
continue
end
@inbounds saved[last_saved_index+1] = line
last_saved_index += 1
end
end
@inbounds resolved_lines = @view saved[1:last_saved_index]
resolved_lines = join(resolved_lines, " ")
# extract lines which actually have data using the `BODY**** =` pattern
# this vector contains a list of `BODY******* =` elements
name_idx = findall(_FIND_BODY_REGEX, resolved_lines)
# row data are extracted as between square brackets, the `=`
# before the brackets is kept
data_idx = findall(_FIND_DATA_REGEX, resolved_lines)
# data are mapped to a dictionary
@inbounds for i in eachindex(name_idx)
if length(name_idx[i]) > 0
# extract full name of the entry
raw_name = resolved_lines[name_idx[i]]
# parse naif id
naif = parse(Int, match(_FIND_BODY_INDEX, raw_name).match)
# parse property name
prop = Symbol(lowercase(strip(match(_FIND_PROP_NAME_REGEX, raw_name).match)))
# parse data
raw_data = split(replace(resolved_lines[data_idx[i]], "D" => "E"))
if raw_data[2] == "("
# data is a vector
mergewith!(
merge!, cache.data,
Dict(
naif => Dict(
prop => map(
x -> parse(T, x),
@view raw_data[3:(end-1)]
)
)
)
)
else
# data is a value
valfloat = tryparse(T, raw_data[2])
mergewith!(
merge!, cache.data,
Dict(
naif => Dict(
prop => T[valfloat !== nothing ? valfloat : tryparse(Int64, raw_data[2])]
)
)
)
end
end
end
return cache
end
"""
load_tpc(::Type{T}, file::String) where T
Load TPC file in a [`PlanetaryConstantsDict`](@ref) object.
"""
function load_tpc(::Type{T}, file::String) where T
cache = PlanetaryConstantsDict{T}()
return load_tpc!(cache, file)
end
function load_tpc(::Type{T}, files::AbstractVector{String}) where T
cache = PlanetaryConstantsDict{T}()
for file in files
load_tpc!(cache, file)
end
return cache
end
| AstroConstants | https://github.com/JuliaSpaceMissionDesign/AstroConstants.jl.git |
|
[
"MIT"
] | 0.1.0 | d7365528f2a2c2c99979c867ed5cd98ad7037835 | code | 101 | using AstroConstants
using Test
@testset "AstroConstants.jl" begin
# Write your tests here.
end
| AstroConstants | https://github.com/JuliaSpaceMissionDesign/AstroConstants.jl.git |
|
[
"MIT"
] | 0.1.0 | d7365528f2a2c2c99979c867ed5cd98ad7037835 | docs | 66 | # AstroConstants
_Astrodynamics constants parsing and handling._
| AstroConstants | https://github.com/JuliaSpaceMissionDesign/AstroConstants.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | code | 587 | using Documenter
using Cosmology
using Documenter.Remotes: GitHub
DocMeta.setdocmeta!(Cosmology, :DocTestSetup, :(using Cosmology); recursive = true)
include("pages.jl")
makedocs(;
modules = [Cosmology],
authors = "Julia Astro",
repo = GitHub("JuliaAstro/Cosmology.jl"),
sitename = "Cosmology.jl",
format = Documenter.HTML(;
prettyurls = get(ENV, "CI", "false") == "true",
canonical = "https://juliaastro.github.io/Cosmology.jl",
assets = String[],
),
pages=pages,
)
deploydocs(;
repo = "github.com/JuliaAstro/Cosmology.jl",
)
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | code | 69 | pages = [
"Home" => "index.md",
"API/Reference" => "api.md"
] | Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | code | 9348 | module Cosmology
using QuadGK
using Unitful
import Unitful: km, s, Gyr
using UnitfulAstro: Mpc, Gpc
export cosmology,
age,
angular_diameter_dist,
comoving_radial_dist,
comoving_transverse_dist,
comoving_volume,
comoving_volume_element,
distmod,
H,
hubble_dist,
hubble_time,
luminosity_dist,
lookback_time,
scale_factor
abstract type AbstractCosmology end
abstract type AbstractClosedCosmology <: AbstractCosmology end
abstract type AbstractFlatCosmology <: AbstractCosmology end
abstract type AbstractOpenCosmology <: AbstractCosmology end
struct FlatLCDM{T <: Real} <: AbstractFlatCosmology
h::T
Ω_Λ::T
Ω_m::T
Ω_r::T
end
FlatLCDM(h::Real, Ω_Λ::Real, Ω_m::Real, Ω_r::Real) =
FlatLCDM(promote(float(h), float(Ω_Λ), float(Ω_m), float(Ω_r))...)
a2E(c::FlatLCDM, a) = sqrt(c.Ω_r + c.Ω_m * a + c.Ω_Λ * a^4)
struct ClosedLCDM{T <: Real} <: AbstractClosedCosmology
h::T
Ω_k::T
Ω_Λ::T
Ω_m::T
Ω_r::T
end
ClosedLCDM(h::Real, Ω_k::Real, Ω_Λ::Real, Ω_m::Real, Ω_r::Real) =
ClosedLCDM(promote(float(h), float(Ω_k), float(Ω_Λ), float(Ω_m),
float(Ω_r))...)
struct OpenLCDM{T <: Real} <: AbstractOpenCosmology
h::T
Ω_k::T
Ω_Λ::T
Ω_m::T
Ω_r::T
end
OpenLCDM(h::Real, Ω_k::Real, Ω_Λ::Real, Ω_m::Real, Ω_r::Real) =
OpenLCDM(promote(float(h), float(Ω_k), float(Ω_Λ), float(Ω_m),
float(Ω_r))...)
function a2E(c::Union{ClosedLCDM,OpenLCDM}, a)
a2 = a * a
sqrt(c.Ω_r + c.Ω_m * a + (c.Ω_k + c.Ω_Λ * a2) * a2)
end
for c in ("Flat", "Open", "Closed")
name = Symbol("$(c)WCDM")
@eval begin
struct $(name){T <: Real} <: $(Symbol("Abstract$(c)Cosmology"))
h::T
Ω_k::T
Ω_Λ::T
Ω_m::T
Ω_r::T
w0::T
wa::T
end
function $(name)(h::Real, Ω_k::Real, Ω_Λ::Real, Ω_m::Real, Ω_r::Real,
w0::Real, wa::Real)
$(name)(promote(float(h), float(Ω_k), float(Ω_Λ), float(Ω_m),
float(Ω_r), float(w0), float(wa))...)
end
end
end
function WCDM(h::Real, Ω_k::Real, Ω_Λ::Real, Ω_m::Real, Ω_r::Real, w0::Real, wa::Real)
if Ω_k < 0
ClosedWCDM(h, Ω_k, Ω_Λ, Ω_m, Ω_r, w0, wa)
elseif Ω_k > 0
OpenWCDM(h, Ω_k, Ω_Λ, Ω_m, Ω_r, w0, wa)
else
FlatWCDM(h, Ω_k, Ω_Λ, Ω_m, Ω_r, w0, wa)
end
end
function a2E(c::Union{FlatWCDM,ClosedWCDM,OpenWCDM}, a)
ade = exp((1 - 3 * (c.w0 + c.wa)) * log(a) + 3 * c.wa * (a - 1))
sqrt(c.Ω_r + (c.Ω_m + c.Ω_k * a) * a + c.Ω_Λ * ade)
end
"""
cosmology(;h = 0.69,
Neff = 3.04,
OmegaK = 0,
OmegaM = 0.29,
OmegaR = nothing,
Tcmb = 2.7255,
w0 = -1,
wa = 0)
# Parameters
* `h` - Dimensionless Hubble constant
* `OmegaK` - Curvature density (Ω_k)
* `OmegaM` - Matter density (Ω_m)
* `OmegaR` - Radiation density (Ω_r)
* `Tcmb` - CMB temperature in Kelvin; used to compute Ω_γ
* `Neff` - Effective number of massless neutrino species; used to compute Ω_ν
* `w0` - CPL dark energy equation of state; `w = w0 + wa(1-a)`
* `wa` - CPL dark energy equation of state; `w = w0 + wa(1-a)`
# Examples
```jldoctest
julia> c = cosmology()
Cosmology.FlatLCDM{Float64}(0.69, 0.7099122024007928, 0.29, 8.77975992071536e-5)
julia> c = cosmology(OmegaK=0.1)
Cosmology.OpenLCDM{Float64}(0.69, 0.1, 0.6099122024007929, 0.29, 8.77975992071536e-5)
julia> c = cosmology(w0=-0.9, OmegaK=-0.1)
Cosmology.ClosedWCDM{Float64}(0.69, -0.1, 0.8099122024007929, 0.29, 8.77975992071536e-5, -0.9, 0.0)
```
"""
function cosmology(;h = 0.69,
Neff = 3.04,
OmegaK = 0,
OmegaM = 0.29,
OmegaR = nothing,
Tcmb = 2.7255,
w0 = -1,
wa = 0)
if OmegaR === nothing
OmegaG = 4.48131e-7 * Tcmb^4 / h^2
OmegaN = Neff * OmegaG * (7 / 8) * (4 / 11)^(4 / 3)
OmegaR = OmegaG + OmegaN
end
OmegaL = 1 - OmegaK - OmegaM - OmegaR
if !(w0 == -1 && wa == 0)
return WCDM(h, OmegaK, OmegaL, OmegaM, OmegaR, w0, wa)
end
if OmegaK < 0
return ClosedLCDM(h, OmegaK, OmegaL, OmegaM, OmegaR)
elseif OmegaK > 0
return OpenLCDM(h, OmegaK, OmegaL, OmegaM, OmegaR)
else
return FlatLCDM(h, OmegaL, OmegaM, OmegaR)
end
end
# hubble rate
scale_factor(z) = 1 / (1 + z)
E(c::AbstractCosmology, z) = (a = scale_factor(z); a2E(c, a) / a^2)
H(c::AbstractCosmology, z) = 100 * c.h * E(c, z) * km / s / Mpc
hubble_dist0(c::AbstractCosmology) = 2997.92458 / c.h * Mpc
hubble_dist(c::AbstractCosmology, z) = hubble_dist0(c) / E(c, z)
hubble_time0(c::AbstractCosmology) = 9.777922216807891 / c.h * Gyr
hubble_time(c::AbstractCosmology, z) = hubble_time0(c) / E(c, z)
# distances
Z(c::AbstractCosmology, z::Real, ::Nothing; kws...) =
QuadGK.quadgk(a->1 / a2E(c, a), scale_factor(z), 1; kws...)[1]
Z(c::AbstractCosmology, z₁::Real, z₂::Real; kws...) =
QuadGK.quadgk(a->1 / a2E(c, a), scale_factor(z₂), scale_factor(z₁); kws...)[1]
comoving_radial_dist(c::AbstractCosmology, z₁, z₂ = nothing; kws...) = hubble_dist0(c) * Z(c, z₁, z₂; kws...)
"""
comoving_radial_dist([u::Unitlike,] c::AbstractCosmology, [z₁,] z₂)
Comoving radial distance in Mpc at redshift `z₂` as seen by an observer at `z₁`.
Redshift `z₁` defaults to 0 if omitted. Will convert to compatible unit `u` if
provided.
"""
comoving_radial_dist
comoving_transverse_dist(c::AbstractFlatCosmology, z₁, z₂ = nothing; kws...) =
comoving_radial_dist(c, z₁, z₂; kws...)
function comoving_transverse_dist(c::AbstractOpenCosmology, z₁, z₂ = nothing; kws...)
sqrtok = sqrt(c.Ω_k)
hubble_dist0(c) * sinh(sqrtok * Z(c, z₁, z₂; kws...)) / sqrtok
end
function comoving_transverse_dist(c::AbstractClosedCosmology, z₁, z₂ = nothing; kws...)
sqrtok = sqrt(abs(c.Ω_k))
hubble_dist0(c) * sin(sqrtok * Z(c, z₁, z₂; kws...)) / sqrtok
end
angular_diameter_dist(c::AbstractCosmology, z; kws...) =
comoving_transverse_dist(c, z; kws...) / (1 + z)
angular_diameter_dist(c::AbstractCosmology, z₁, z₂; kws...) =
comoving_transverse_dist(c, z₁, z₂; kws...) / (1 + z₂)
"""
angular_diameter_dist([u::Unitlike,] c::AbstractCosmology, [z₁,] z₂)
Ratio of the proper transverse size in Mpc of an object at redshift `z₂` to its
angular size in radians, as seen by an observer at `z₁`. Redshift `z₁` defaults
to 0 if omitted. Will convert to compatible unit `u` if provided.
"""
angular_diameter_dist
luminosity_dist(c::AbstractCosmology, z; kws...) =
comoving_transverse_dist(c, z; kws...) * (1 + z)
"""
luminosity_dist([u::Unitlike,] c::AbstractCosmology, z)
Bolometric luminosity distance in Mpc at redshift `z`. Will convert to
compatible unit `u` if provided.
"""
luminosity_dist
"""
distmod(c::AbstractCosmology, z)
Distance modulus in magnitudes at redshift `z`.
"""
distmod(c::AbstractCosmology, z; kws...) =
5 * log10(luminosity_dist(c, z; kws...) / Mpc) + 25
# volumes
"""
comoving_volume([u::Unitlike,] c::AbstractCosmology, z)
Comoving volume in cubic Gpc out to redshift `z`. Will convert to compatible unit `u` if provided.
"""
comoving_volume(c::AbstractFlatCosmology, z; kws...) =
(4pi / 3) * (comoving_radial_dist(Gpc, c, z; kws...))^3
function comoving_volume(c::AbstractOpenCosmology, z; kws...)
DH = hubble_dist0(Gpc, c)
x = comoving_transverse_dist(Gpc, c, z; kws...) / DH
sqrtok = sqrt(c.Ω_k)
2pi * (DH)^3 * (x * sqrt(1 + c.Ω_k * x^2) - asinh(sqrtok * x) / sqrtok) / c.Ω_k
end
function comoving_volume(c::AbstractClosedCosmology, z; kws...)
DH = hubble_dist0(Gpc, c)
x = comoving_transverse_dist(Gpc, c, z; kws...) / DH
sqrtok = sqrt(abs(c.Ω_k))
2pi * (DH)^3 * (x * sqrt(1 + c.Ω_k * x^2) - asin(sqrtok * x) / sqrtok) / c.Ω_k
end
"""
comoving_volume_element([u::Unitlike,] c::AbstractCosmology, z)
Comoving volume element in Gpc out to redshift `z`. Will convert to compatible unit `u` if provided.
"""
comoving_volume_element(c::AbstractCosmology, z; kws...) =
hubble_dist0(Gpc, c) * angular_diameter_dist(Gpc, c, z; kws...)^2 / a2E(c, scale_factor(z))
# times
T(c::AbstractCosmology, a0, a1; kws...) = QuadGK.quadgk(x->x / a2E(c, x), a0, a1; kws...)[1]
"""
age([u::Unitlike,] c::AbstractCosmology, z)
Age of the universe in Gyr at redshift `z`. Will convert to compatible unit `u` if provided.
"""
age(c::AbstractCosmology, z; kws...) = hubble_time0(c) * T(c, 0, scale_factor(z); kws...)
"""
lookback_time([u::Unitlike,] c::AbstractCosmology, z)
Difference between age at redshift 0 and age at redshift `z` in Gyr.
Will convert to compatible unit `u` if provided.
"""
lookback_time(c::AbstractCosmology, z; kws...) = hubble_time0(c) * T(c, scale_factor(z), 1; kws...)
# Easily select a different unit
for f in (:hubble_dist0, :hubble_dist, :hubble_time0, :hubble_time,
:comoving_radial_dist, :comoving_transverse_dist,
:angular_diameter_dist, :luminosity_dist,
:comoving_volume, :comoving_volume_element,
:age, :lookback_time)
@eval $f(u::Unitful.Unitlike, args...; kws...) = uconvert(u, $f(args...; kws...))
end
end # module
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | code | 8385 | using Cosmology
using Test, Unitful, UnitfulAstro, QuadGK
using Documenter
DocMeta.setdocmeta!(Cosmology, :DocTestSetup, :(using Cosmology); recursive = true)
doctest(Cosmology)
# values from http://icosmos.co.uk/
dist_rtol = 1e-6
age_rtol = 2e-4
# Integrating a unitful function would require UnitfulIntegration.jl. Without using it, we
# strip the units away from the integrand function
integrand(c, z) = 4pi*ustrip(comoving_volume_element(c, z))
@testset "FlatLCDM" begin
c = cosmology(h=0.7, OmegaM=0.3, OmegaR=0)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1651.9145u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 625.3444u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,1,rtol=dist_rtol) ≈ 3303.829u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 151.0571u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6607.6579u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.1002 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 13.4694u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.7527u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (13.4694-5.7527)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "OpenLCDM" begin
c = cosmology(h=0.7, OmegaK=0.1, OmegaM=0.3, OmegaR=0)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1619.9588u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 598.9118u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,1,rtol=dist_rtol) ≈ 3209.784u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 140.0856u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6479.8352u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.0578 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 13.064u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.5466u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (13.064-5.5466)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "ClosedLCDM" begin
c = cosmology(h=0.7, OmegaK=-0.1, OmegaM=0.3, OmegaR=0)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1686.5272u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 655.6019u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,1,rtol=dist_rtol) ≈ 3408.937u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 163.8479u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6746.1088u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.1453 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 13.925u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.9868u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (13.925-5.9868)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "FlatWCDM" begin
c = cosmology(h=0.7, OmegaM=0.3, OmegaR=0, w0=-0.9, wa=0.1)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1612.0585u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 607.6802u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,1,rtol=dist_rtol) ≈ 3224.1169u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 140.3851u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6448.2338u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.0472 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 13.1915u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.6464u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (13.1915-5.6464)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "OpenWCDM" begin
c = cosmology(h=0.7, OmegaK=0.1, OmegaM=0.3, OmegaR=0, w0=-0.9, wa=0.1)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1588.0181u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 585.4929u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,rtol=dist_rtol,1) ≈ 3147.6227u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 132.0466u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6352.0723u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.0146 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 12.8488u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.4659u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (12.8488-5.4659)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "ClosedWCDM" begin
c = cosmology(h=0.7, OmegaK=-0.1, OmegaM=0.3, OmegaR=0, w0=-0.9, wa=0.1)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1637.5993u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 632.5829u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_radial_dist(c,1,rtol=dist_rtol) ≈ 3307.9932u"Mpc" rtol = dist_rtol
@test comoving_volume(c,1,rtol=dist_rtol) ≈ 149.8301u"Gpc^3" rtol = dist_rtol
@test quadgk(z -> integrand(c, z), 0, 2.5)[1] ≈ ustrip(comoving_volume(c, 2.5))
@test luminosity_dist(c,1,rtol=dist_rtol) ≈ 6550.3973u"Mpc" rtol = dist_rtol
@test distmod(c,1,rtol=dist_rtol) ≈ 44.0813 rtol = dist_rtol
@test age(c,0,rtol=age_rtol) ≈ 13.5702u"Gyr" rtol = age_rtol
@test age(c,1,rtol=age_rtol) ≈ 5.8482u"Gyr" rtol = age_rtol
@test lookback_time(c,1,rtol=age_rtol) ≈ (13.5702-5.8482)u"Gyr" rtol = age_rtol
@test age(c, 1) + lookback_time(c, 1) ≈ age(c, 0)
end
@testset "Non-Float64" begin
# Test that FlatLCDM works with non-Float64 (BigFloat in this example)
c = cosmology(h=0.7, OmegaM=big(0.3), OmegaR=0)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1651.9145u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 625.3444u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_volume_element(c, big(1.41)) ≈ 3.4030879e10u"Mpc^3" rtol = dist_rtol
# Test that FlatWCDM works with non-Float64 (BigFloat in this example)
c = cosmology(h=big(0.7), OmegaM=0.3, OmegaR=0, w0=-0.9, wa=0.1)
@test angular_diameter_dist(c,1,rtol=dist_rtol) ≈ 1612.0585u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,1,2,rtol=dist_rtol) ≈ 607.6802u"Mpc" rtol = dist_rtol
@test angular_diameter_dist(c,pi,rtol=dist_rtol) ≈ angular_diameter_dist(c,0,pi,rtol=dist_rtol) rtol = dist_rtol
@test comoving_volume_element(c, big(1.41)) ≈ 3.1378625e10u"Mpc^3" rtol = dist_rtol
end
@testset "Unit conversion" begin
c = cosmology(h=0.9, OmegaM=0.5, OmegaR=0)
for u in (u"m", u"pc", u"ly")
@test unit(luminosity_dist(u, c, 1)) == u
@test unit(angular_diameter_dist(u, c, 2)) == u
end
for u in (u"s", u"yr")
@test unit(age(u, c, 3)) == u
@test unit(lookback_time(u, c, 4)) == u
end
end
@testset "Utilities" begin
c = cosmology(h = 0.7)
@test hubble_time(c, 0) ≈ Cosmology.hubble_time0(c)
@test hubble_dist(c, 0) ≈ Cosmology.hubble_dist0(c)
@test H(c, 0) ≈ 70u"km/s/Mpc"
end
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | docs | 729 | # Cosmology.jl
[](https://juliaastro.github.io/Cosmology.jl/stable)
[](https://juliaastro.github.io/Cosmology.jl/dev)
[](https://github.com/JuliaAstro/Cosmology.jl/actions?query=workflow%3ACI)
[](https://coveralls.io/r/JuliaAstro/Cosmology.jl?branch=master)
A cosmology calculator for Julia.
## Installation
To install the package:
```julia
pkg> add Cosmology
```
Then, to load into your session:
```julia
julia> using Cosmology
```
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | docs | 1755 | ```@meta
DocTestSetup = quote
using Cosmology, Unitful, UnitfulAstro
ENV["UNITFUL_FANCY_EXPONENTS"] = false
end
```
# API/Reference
!!! tip "Unitful"
[Unitful.jl](https://github.com/painterqubits/Unitful.jl) works seamlessly with Cosmology.jl. In order to use its features, make sure it is installed and imported, along with [UnitfulAstro](https://github.com/juliaastro/UnitfulAstro.jl).
```julia
pkg> add Unitful UnitfulAstro
julia> using Unitful, UnitfulAstro
```
## Cosmological Models
```@docs
cosmology
```
## Distances
```@docs
angular_diameter_dist
comoving_radial_dist
luminosity_dist
distmod
```
### Examples
```jldoctest
julia> c = cosmology(OmegaM=0.26)
Cosmology.FlatLCDM{Float64}(0.69, 0.7399122024007928, 0.26, 8.77975992071536e-5)
julia> angular_diameter_dist(c, 1.2)
1784.0089227105113 Mpc
julia> angular_diameter_dist(c, 0.7, 1.2)
606.6521737365097 Mpc
julia> luminosity_dist(c, 1.5)
11420.338287150073 Mpc
julia> luminosity_dist(u"Gpc", c, 1.5) # Can convert to appropriate unit
11.420338287150074 Gpc
```
## Volumes
```@docs
comoving_volume_element
comoving_volume
```
### Examples
```jldoctest
julia> c = cosmology(OmegaM=0.26)
Cosmology.FlatLCDM{Float64}(0.69, 0.7399122024007928, 0.26, 8.77975992071536e-5)
julia> comoving_volume_element(c, 2.1)
46.74459228888613 Gpc^3
julia> comoving_volume(c, 0.6)
49.3633436631307 Gpc^3
julia> comoving_volume(u"ly^3", c, 0.6)
1.7127035381753e30 ly^3
```
## Times
```@docs
age
lookback_time
```
### Examples
```jldoctest
julia> c = cosmology(OmegaM=0.26)
Cosmology.FlatLCDM{Float64}(0.69, 0.7399122024007928, 0.26, 8.77975992071536e-5)
julia> age(c, 1.2)
5.4454795007229455 Gyr
julia> lookback_time(u"yr", c, 1.2)
8.761465604385489e9 yr
```
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 1.0.3 | 52957a97998867e1a6405483af12e05d8d352a44 | docs | 953 | # Cosmology.jl
[](https://travis-ci.org/JuliaAstro/Cosmology.jl)
[](https://coveralls.io/r/JuliaAstro/Cosmology.jl?branch=master)
[`Cosmology.jl`](https://github.com/JuliaAstro/Cosmology.jl) provides functions for quick cosmological calculations, written in pure Julia.
## Installation
To install the package, enter `pkg` mode (`]`)
```julia-repl
pkg> add Cosmology
```
Then, to load the package
```julia-repl
julia> using Cosmology
```
## Contributing
If you would like to contribute to `Cosmology.jl`, head over to our [GitHub page](https://github.com/juliaastro/cosmology.jl) and open an issue or pull request!
For any new features, we ask the contributors to add any relevant unit tests and bump the package version by one minor version.
| Cosmology | https://github.com/JuliaAstro/Cosmology.jl.git |
|
[
"MIT"
] | 0.1.0 | 04e73b60c712d19b41cfc75a986ce1fdbaa42dec | code | 718 | using ModuleDocstrings
using Documenter
DocMeta.setdocmeta!(ModuleDocstrings, :DocTestSetup, :(using ModuleDocstrings); recursive=true)
makedocs(;
modules=[ModuleDocstrings],
authors="Tim Holy <[email protected]> and contributors",
repo="https://github.com/JuliaDocs/ModuleDocstrings.jl/blob/{commit}{path}#{line}",
sitename="ModuleDocstrings.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://JuliaDocs.github.io/ModuleDocstrings.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
)
deploydocs(;
repo="github.com/JuliaDocs/ModuleDocstrings.jl",
devbranch="main",
push_preview = true,
)
| ModuleDocstrings | https://github.com/JuliaDocs/ModuleDocstrings.jl.git |
|
[
"MIT"
] | 0.1.0 | 04e73b60c712d19b41cfc75a986ce1fdbaa42dec | code | 3797 | """
- `ModuleDocstrings.generate`: Create an API summary docstring for a module.
- `ModuleDocstrings.write`: add an API summary docstring to a package.
"""
module ModuleDocstrings
"""
ModuleDocstrings.generate(mod::Module)
Return an API summary string for `mod`.
The summary is assembled from all docstrings in the package, picking the first sentence of each docstring.
When added to the package (see [`ModuleDocstrings.write`](@ref)), you should expect to make edits by hand:
- exclude docstrings that shouldn't appear in the API summary
- rephrase summaries for greater clarity or compactness (alternatively, consider making such changes to the
original docstring)
"""
function generate(mod::Module)
exported = Set(names(mod))
docex, docpriv = Pair{String,Vector{String}}[], Pair{String,Vector{String}}[]
for (bind, doc) in Base.Docs.meta(mod)
tgt, key = bind.var ∈ exported ? (docex, String(bind.var)) : (docpriv, string(bind))
push!(tgt, key => firstsentences(doc))
end
sort!(docex; by=first)
sort!(docpriv; by=first)
io = IOBuffer()
for doc in (docex, docpriv)
for (key, methsummaries) in doc
print(io, "- `", key, "`:")
if length(methsummaries) == 1
print(io, ' ', methsummaries[1])
else
for msum in methsummaries
print(io, "\n + ", msum)
end
end
print(io, '\n')
end
end
return String(take!(io))
end
firstsentences(docs::Base.Docs.MultiDoc) = String[firstsentences(docstr) for (sig, docstr) in docs.docs]
firstsentences(doc) = firstsentence(doc)
firstsentence(docstr::Base.Docs.DocStr) = firstsentence(docstr.text)
firstsentence(itr) = firstsentence(join(itr))
function firstsentence(d::AbstractDict)
@assert length(d) == 1 "multiple entries ($(length(itr))) for a given signature"
return firstsentence(first(d).second)
end
function firstsentence(str::AbstractString)
# @show str
io = IOBuffer()
for line in split(str, '\n')
startswith(line, " ") && continue # code line
all(isspace, line) && continue
idx = findfirst(r"\.(\s|$)", line)
if idx === nothing
print(io, line, ' ')
continue
end
print(io, line[1:first(idx)])
return String(take!(io))
end
return String(take!(io))
end
"""
ModuleDocstrings.write(mod, str)
Edit the module-defining file to insert `str` as a docstring for `mod`.
The package should be checked out in `develop` mode before calling `write`.
"""
function write(mod::Module, str)
path = pathof(mod)
(path === nothing || !iswritable(path)) && error_write(mod, path)
modstr = read(path, String)
idxs = findfirst("module $mod", modstr)
idxs === nothing && error("could not identify start of module")
open(path, "w") do io
print(io, modstr[1:first(idxs)-1], "\"\"\"\n", str, "\"\"\"\n", modstr[first(idxs):end])
end
end
"""
ModuleDocstrings.write(mod)
Modify the source file for `mod` to add an API summary docstring.
The docstring is produced by [ModuleDocstrings.generate](@ref).
The package should be checked out in `develop` mode before calling `write`.
"""
write(mod::Module) = write(mod, generate(mod))
# this is replacing, not extending, the Base function of the same name
iswritable(filename::AbstractString) = isfile(filename) && (uperm(filename) & 0x02) != 0x00
error_write(mod, ::Nothing) = error("$mod must be a writable package, but there is no corresponding file, suggesting it wasn't loaded from a package.")
error_write(mod, path::AbstractString) = error("$mod must be a writable package, but the path \"$path\" is not writable.\nDid you forget to `Pkg.develop` the package?")
end
| ModuleDocstrings | https://github.com/JuliaDocs/ModuleDocstrings.jl.git |
|
[
"MIT"
] | 0.1.0 | 04e73b60c712d19b41cfc75a986ce1fdbaa42dec | code | 2139 | using ModuleDocstrings
using Example # test error on a non-devved package
using Pkg
using Test
@testset "ModuleDocstrings.jl" begin
str = ModuleDocstrings.generate(ModuleDocstrings)
@test occursin("ModuleDocstrings.generate", str)
@test occursin("ModuleDocstrings.write", str)
m = @eval Module() begin
"""
foo1()
`foo1` is pretty useful.
"""
foo1() = 0
"""
`foo2` doesn't show the signature.
"""
foo2() = 0
"""
foo3()
`foo3` contains a [`$(string(@__MODULE__)).foo1`](@ref) that contains a period.
"""
foo3()
@__MODULE__
end
str = ModuleDocstrings.generate(m)
@test occursin("- `Main.anonymous.foo1`: `foo1` is pretty useful.", str)
@test occursin("- `Main.anonymous.foo2`: `foo2` doesn't show the signature.", str)
@test occursin("- `Main.anonymous.foo3`: `foo3` contains a [`Main.anonymous.foo1`](@ref) that contains a period.", str)
if Base.VERSION >= v"1.8.0-DEV.363" # use strings in @test_throws; we don't care what type of error this is
@test_throws "must be a writable package, but there is no corresponding file" ModuleDocstrings.write(m)
@test_throws r"must be a writable package, but the path \".*\" is not writable" ModuleDocstrings.write(Example)
else
@test_throws Exception ModuleDocstrings.write(m)
@test_throws Exception ModuleDocstrings.write(Example)
end
mktempdir() do pkgs
push!(LOAD_PATH, pkgs)
newpkgdir = joinpath(pkgs, "DevDummy")
Pkg.generate(newpkgdir)
open(joinpath(newpkgdir, "src", "DevDummy.jl"), "w") do io
print(io,
"""
module DevDummy
\"\"\"
greet()
Print a delightful greeting.
\"\"\"
greet() = print("Hello World!")
end # module
"""
)
end
@eval using DevDummy
ModuleDocstrings.write(DevDummy)
str = read(joinpath(newpkgdir, "src", "DevDummy.jl"), String)
@test occursin(
"""
\"\"\"
- `DevDummy.greet`: Print a delightful greeting.
\"\"\"
module DevDummy
""", str)
end
end
| ModuleDocstrings | https://github.com/JuliaDocs/ModuleDocstrings.jl.git |
|
[
"MIT"
] | 0.1.0 | 04e73b60c712d19b41cfc75a986ce1fdbaa42dec | docs | 1762 | # ModuleDocstrings
[](https://JuliaDocs.github.io/ModuleDocstrings.jl/stable)
[](https://github.com/JuliaDocs/ModuleDocstrings.jl/actions)
[](https://codecov.io/gh/JuliaDocs/ModuleDocstrings.jl)
A package to create simple "module docstrings" for Julia packages. These are targeted at summarizing the main components of your package, essentially as a prompt or reminder to users. For example:
```julia
julia> using ModuleDocstrings
help?> ModuleDocstrings
search: ModuleDocstrings
• ModuleDocstrings.generate: Create an API summary docstring for a
module.
• ModuleDocstrings.write: add an API summary docstring to a package.
```
This reminds users that the two main functions are `ModuleDocstrings.generate` and `ModuleDocstrings.write`.
These summaries are brief; to learn more about a particular function, read its help in full:
```julia
help?> ModuleDocstrings.generate
ModuleDocstrings.generate(mod::Module)
Return an API summary string for mod.
The summary is assembled from all docstrings in the package, picking the first sentence of each docstring. When added to the
package (see ModuleDocstrings.write), you should expect to make edits by hand:
• exclude docstrings that shouldn't appear in the API summary
• rephrase summaries for greater clarity or compactness (alternatively, consider making such changes to the original
docstring)
```
Once you've added the docstring to a `Pkg.develop`ed package, it can be submitted as a pull request.
| ModuleDocstrings | https://github.com/JuliaDocs/ModuleDocstrings.jl.git |
|
[
"MIT"
] | 0.1.0 | 04e73b60c712d19b41cfc75a986ce1fdbaa42dec | docs | 3613 | ```@meta
CurrentModule = ModuleDocstrings
```
# ModuleDocstrings
This package aims to make it easier to attach a docstring to a module, providing users with a quick summary of the core functionality in a package.
To demonstrate, let's create a module with a few docstrings. This module has two functions: `radius` with a single method,
and `distance` with two methods (the details of the methods don't really matter much for this demonstration):
```jldoctest example
julia> module TestDocStrings
export radius, distance
"""
radius(x, y, z)
Compute the radius of the cartesian-coordinate position `[x, y, z]`.
There really isn't much more to say; it's pretty straightforward.
"""
radius(x, y, z) = sqrt(x^2 + y^2 + z^2)
"""
distance(pos1::AbstractVector, pos2::AbstractVector)
Compute the distance between points `pos1` and `pos2`.
"""
distance(pos1::AbstractVector, pos2::AbstractVector) = radius((pos1 - pos2)...)
"""
distance(pos::AbstractVector, points::PointCollection)
Compute the minimum distance between `pos` and any point in `points`.
"""
distance(pos::AbstractVector, points::AbstractVector{<:AbstractVector}) = minimum(p -> distance(pos, p), points)
end
TestDocStrings
```
Now let's generate a module doctring:
```jldoctest example
julia> using ModuleDocstrings
julia> print(ModuleDocstrings.generate(TestDocStrings))
- `distance`:
+ Compute the minimum distance between `pos` and any point in `points`.
+ Compute the distance between points `pos1` and `pos2`.
- `radius`: Compute the radius of the cartesian-coordinate position `[x, y, z]`.
```
From this, you can see that both methods of `distance` are listed, as well as the single method for `radius`.
For each, only the first sentence is used in the summary.
If this were a package that you have in `Pkg.develop` mode, you could insert this string into the package with [`ModuleDocstrings.write`](@ref). However, in this case, you get
```jldoctest example; filter=(r"julia/dev/.*")
julia> ModuleDocstrings.write(TestDocStrings)
ERROR: TestDocStrings must be a writable package, but there is no corresponding file, suggesting it wasn't loaded from a package.
Stacktrace:
[1] error(s::String)
@ Base ./error.jl:33
[2] error_write(mod::Module, #unused#::Nothing)
@ ModuleDocstrings ~/.julia/dev/ModuleDocstrings/src/ModuleDocstrings.jl:101
[3] write(mod::Module, str::String)
@ ModuleDocstrings ~/.julia/dev/ModuleDocstrings/src/ModuleDocstrings.jl:79
[4] write(mod::Module)
@ ModuleDocstrings ~/.julia/dev/ModuleDocstrings/src/ModuleDocstrings.jl:96
[5] top-level scope
@ none:1
```
This error ocurred because we defined the module at the REPL; it will likewise error if you have `Pkg.add`ed rather than `Pkg.develop`ed. But for a package checked out in `develop` mode it will modify the main package file.
!!! warning
Be sure you've saved any work *before* running `ModuleDocstrings.write`.
Generally speaking, you should then edit the docstring to trim any methods that don't merit a mention in the summary, and/or to improve the clarity, brevity, or organization of the summaries. Sometimes, you may discover that you can improve the original source docstring as well.
Your changes can then be submitted as a pull request.
## API
Documentation for [ModuleDocstrings](https://github.com/JuliaDocs/ModuleDocstrings.jl).
```@docs
ModuleDocstrings.generate
ModuleDocstrings.write
```
| ModuleDocstrings | https://github.com/JuliaDocs/ModuleDocstrings.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 212 | module ScTenifold
export Net, Knk, sctenifoldnet, sctenifoldknk, sctenifoldxct
include("sctenifoldnet.jl")
include("sctenifoldknk.jl")
include("sctenifoldxct.jl")
using .Net
using .Knk
using .Xct
end # module
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 376 | cd(dirname(@__FILE__))
# include("tensordecomp.jl")
include("ScTenifold.jl")
using .ScTenifold
using LinearAlgebra, Statistics, Distributions, Random
d=NegativeBinomial(20,0.98)
X=rand(d,100,2000)
lbszv=30
X=X[:,vec(sum(X,dims=1).>lbszv)]
@time Z1,A1=ScTenifold.tenrnet(X, donorm=true)
include("tensordecomp2.jl")
Z0a=ScTenifold.tensordecomp(A1)
Z2=tensordecomp2(A1) | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 229 | function drgenes(d::AbstractVector{T}) where T<:Real
d²=d.^2
fc=d²./mean(d²)
χ² = Chisq(1)
pVals = ccdf.(χ², fc)
pAdjusted = MultipleTesting.adjust(pVals, BenjaminiHochberg())
return fc,pVals,pAdjusted
end | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 570 | function manialn(X::AbstractMatrix{T},Y::AbstractMatrix{T}) where T<:Real
μ,dim=0.9,30
n1,n2=size(X,1),size(Y,1)
W₁,W₂=X.+1,Y.+1
ℐ=Matrix(I,n1,n2)
μ = μ*(sum(W₁)+sum(W₂))/(2*sum(ℐ))
𝕎 = [W₁ μ*ℐ; μ*ℐ' W₂]
L=diagm(vec(sum(abs.(𝕎),dims=1))).-𝕎
# λ,V =KrylovKit.eigsolve(L,35,:SR,krylovdim=40)
# V=hcat(V)
λ,V = eigen(L)
i=real(λ).>=1e-8
V=real(V[:,i])
dim=min(dim,size(V,2))
V=V[:,1:dim]
aln0=V[1:n1,:]
aln1=V[n1+1:end,:]
d = norm.((aln0.-aln1)[i,:] for i = 1:n1)
# _,idx=findmax(dd)
return d
end
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 673 | function pcnet(X::AbstractMatrix{T}, p::Int=3;
scalein::Bool=true, scaleout::Bool=false,
symmout::Bool=false) where T<:Real
if scalein
σ=std(X,dims=1)
σ[σ.==0].=1.0
X=(X.-mean(X,dims=1))./σ
end
ℊ=size(X,2)
A=1.0 .-Matrix(I,ℊ,ℊ)
Threads.@threads for k in 1:ℊ
y=X[:,k]
𝒳=X[:,1:end.≠k]
ϕ=TSVD.tsvd(𝒳,p)[3]
s=𝒳*ϕ
s ./= (vecnorm(s).^2)'
b=sum(y.*s,dims=1)
𝒷=ϕ*b'
@inbounds A[k,A[k,:].==1.0]=𝒷
end
if symmout
A=0.5*(A+A')
end
if scaleout
A=A./maximum(abs.(A))
end
return convert(Matrix{Float16},A)
end
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 783 | module Knk
export sctenifoldknk
function sctenifoldknk(X::AbstractMatrix{T}, k::Float64) where T<:Real
#X1=copy(X)
#X1[k,:].=0
end
end
#=
using DelimitedFiles
cd(dirname(@__FILE__))
X0=readdlm("X.txt",',',Int16)
X1=copy(X0)
genelist=vec(readdlm("genelist.txt",String))
idx=findall(genelist.=="Cftr")
X1[idx[1],:].=0
lbsz=sum(X0,dims=1)
X0=(X0./lbsz)*1e4
lbsz=sum(X1,dims=1)
X1=(X1./lbsz)*1e4
include("scTenifoldNet.jl")
using .scTenifoldNet
#X0=rand(100,1000);
#X1=rand(100,1000);
@show Threads.nthreads()
@time Z0=scTenifoldNet.pcnet(X0')
@time Z1=scTenifoldNet.pcnet(X1')
@time d,aln0,aln1=scTenifoldNet.manialn(Z0,Z1)
fc,p,adjp=scTenifoldNet.drgenes(d)
writedlm("qvalues.txt",[fc p adjp])
using StatsPlots, Distributions
x=rand(Chisq(1), length(fc))
qqplot(x, fc)
end
=# | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 810 | module Net
export sctenifoldnet, tenrnet, manialn, drgenes, pcnet, tensordecomp, tensordecomp2
const NCOMP1,NCOMP2=3,5
const NLAYERS,NCELLS=10,500
using Statistics, LinearAlgebra, Distributions, Random
using MultipleTesting, Random, SparseArrays, TSVD, TensorToolbox
# import KrylovKit
include("support.jl")
include("pcnet.jl") # include the contents of other files in the module
include("tenrnet.jl")
include("tensordecomp.jl")
include("manialn.jl")
include("drgenes.jl")
function sctenifoldnet(X::AbstractMatrix{T}, Y::AbstractMatrix{T}; donorm::Bool=false) where T<:Real
Z0,_=tenrnet(X,donorm=donorm)
Z1,_=tenrnet(Y,donorm=donorm)
Z0=0.5*(Z0+Z0')
Z1=0.5*(Z1+Z1')
d=manialn(Z0,Z1)
fc,p,adjp=drgenes(d)
return d,fc,p,adjp
end
end | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 360 | module Xct
export sctenifoldxct
const NCOMP1,NCOMP2=3,5
const NLAYERS,NCELLS=10,500
using Statistics, LinearAlgebra, Distributions, Random
using MultipleTesting, Random, SparseArrays, TSVD, TensorToolbox
# import KrylovKit
include("support.jl")
function sctenifoldxct(X::AbstractMatrix{T}, Y::AbstractMatrix{T}; donorm::Bool=false) where T<:Real
end
end | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 210 |
# vecnorm(x) = x./norm.(x[:,i] for i in 1:size(x,2))'
vecnorm(x::AbstractMatrix) = norm.(x[:,i] for i in 1:size(x,2))
function normc!(x)
for i in 1:size(x,2)
x[:,i]=x[:,i]./norm(x[:,i])
end
end | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 624 | function tenrnet(X::AbstractMatrix{T}; donorm::Bool=true) where T<:Real
ℊ,𝒸=size(X)
if donorm
lbsz=sum(X,dims=1)
# X=(X./lbsz)*median(lbsz)
X=(X./lbsz)*1e4
end
A=zeros(Float16, ℊ, ℊ, NLAYERS)
for k=1:NLAYERS
println("network ... $k")
𝕩=X[:,randperm(𝒸)][:,1:NCELLS] # jackknife (m-out-of-n)
# 𝕩=X[:,rand(1:𝒸,NCELLS)]; # bootstrapping (m-out-of-n)
𝕩ᵀ=transpose(𝕩)
a=pcnet(𝕩ᵀ,NCOMP1)
a[abs.(a).<quantile(vec(abs.(a)),0.95)].=0.0
@inbounds A[:,:,k]=sparse(a)
end
Z=tensordecomp(A,NCOMP2)
return Z,A
end | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 266 | function tensordecomp(Λ::AbstractArray{T,3}, p::Int=5;
scaleout::Bool=true) where T
𝒯=TensorToolbox.cp_als(Λ,p)
𝕏=TensorToolbox.full(𝒯)
A=mean(𝕏[:,:,i] for i=1:size(𝕏,3))
if scaleout
A ./=maximum(abs.(A))
end
return A
end
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 1206 | using TensorDecompositions
function tensordecomp2(Λ::AbstractArray{T,3}, p::Int=5;
scaleout::Bool=true) where T
Λ=convert(Array{Float64}, Λ)
size(Λ)
𝒯=TensorDecompositions.candecomp(Λ, p, (randn(size(Λ,1), k), randn(size(Λ,2), k), randn(size(Λ,3), k)), compute_error=true, method=:ALS);
# 𝒯=TensorToolbox.cp_als(Λ,p)
𝕏=TensorDecompositions.compose(𝒯)
A=mean(𝕏[:,:,i] for i=1:size(𝕏,3))
if scaleout
A ./=maximum(abs.(A))
end
return A
end
#=
u = randn(10); v = randn(20); w = randn(30)
T = cat(map(x -> x * u * v', w)..., dims=3) + 0.2 * randn(10, 20, 30)
k=2
F = candecomp(T, k, (randn(10, k), randn(20, k), randn(30, k)), compute_error=true, method=:ALS);
# https://juliapackages.com/p/ntfk
import NTFk
import TensorDecompositions
csize = (2, 3, 4)
tsize = (5, 10, 15)
tucker_orig = NTFk.rand_tucker(csize, tsize; factors_nonneg=true, core_nonneg=true)
T_orig = TensorDecompositions.compose(tucker_orig)
T_orig .*= 1000
sizes = [csize, (1,3,4), (3,3,4), (2,2,4), (2,4,4), (2,3,3), (2,3,5)]
tucker_estimated, csize_estimated = NTFk.analysis(T_orig, sizes, 3; eigmethod=[false,false,false], progressbar=false, tol=1e-16, max_iter=100000, lambda=0.);
=#
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 680 | import CanDecomp
A = rand(2, 3)
B = rand(5, 3)
C = rand(10, 3)
T_orig = CanDecomp.totensor(A, B, C)
Af = rand(size(A)...);
Bf = rand(size(B)...);
Cf = rand(size(C)...);
import StaticArrays
CanDecomp.candecomp!(StaticArrays.SVector(Af, Bf, Cf), T_orig, Val{:nnoptim}; regularization=1e-3, print_level=0, max_cd_iters=1000)
T_est = CanDecomp.totensor(Af, Bf, Cf);
import NTFk
import LinearAlgebra
@info("Norm $(LinearAlgebra.norm(T_est .- T_orig))")
import Cairo, Fontconfig
NTFk.plot2matrices(A, Af; progressbar=nothing)
NTFk.plot2matrices(B, Bf; progressbar=nothing)
NTFk.plot2matrices(C, Cf; progressbar=nothing)
NTFk.plotlefttensor(T_orig, T_est; progressbar=nothing)
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 463 | import TensorDecompositions
import NTFk
csize = (2, 3, 4)
tsize = (5, 10, 15)
tucker_orig = NTFk.rand_tucker(csize, tsize; factors_nonneg=true, core_nonneg=true)
T_orig = TensorDecompositions.compose(tucker_orig)
T_orig .*= 1000
sizes = [csize, (1,3,4), (3,3,4), (2,2,4), (2,4,4), (2,3,3), (2,3,5)]
tucker_estimated, csize_estimated = NTFk.analysis(T_orig, sizes, 3; eigmethod=[false,false,false], progressbar=false, tol=1e-16, max_iter=100000, lambda=0.);
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 444 | using ScTenifold
X0=rand(100,1000);
X1=copy(X0)
X1[4,:].=0.0
d,fc,p,adjp=ScTenifold.sctenifoldnet(X0,X1,donorm=false)
#@show Threads.nthreads()
#@time Z0=ScTenifoldNet.tenrnet(X0, donorm=false)
#@time Z1=ScTenifoldNet.tenrnet(X1, donorm=false)
#Z0=0.5*(Z0+Z0')
#Z1=0.5*(Z1+Z1')
#@time d,aln0,aln1=ScTenifoldNet.manialn(Z0,Z1)
#fc,p,adjp=ScTenifoldNet.drgenes(d)
#using StatsPlots, Distributions
#x=rand(Chisq(1), length(fc))
#qqplot(x, fc) | ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 623 | using ScTenifold
using LinearAlgebra, Statistics, Distributions, Random
d=NegativeBinomial(20,0.98)
X=rand(d,100,2000)
lbszv=30
Y=copy(X)
Y[10,:]=Y[50,:]
Y[2,:]=Y[11,:]
Y[3,:]=Y[5,:]
X=X[:,vec(sum(X,dims=1).>lbszv)]
Y=Y[:,vec(sum(Y,dims=1).>lbszv)]
@show Threads.nthreads()
@time Z0,_=ScTenifold.tenrnet(X, donorm=true)
@time Z1,_=ScTenifold.tenrnet(Y, donorm=true)
Z0=0.5*(Z0+Z0')
Z1=0.5*(Z1+Z1')
@time d=ScTenifold.manialn(Z0,Z1)
fc,p,adjp=ScTenifold.drgenes(d)
@show [adjp[10] adjp[11] adjp[50] adjp[20]];
@show [adjp[70] adjp[31] adjp[55] adjp[26]];
using StatsPlots
x=rand(Chisq(1), length(fc))
qqplot(x, fc)
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | code | 419 | # push!(LOAD_PATH,"E:/GitHub/ScTenifold.jl/src/");
using ScTenifold # obviously the tests use the ScTenifoldNet module...
using Test # and the Base.Test module...
tests = ["code_test1", "code_test2"] # the test file names are stored as strings...
for t in tests
include("$(t).jl") # ... so that they can be evaluated in a loop
end
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.1.1 | 3372d4b1cfea67b8efafebbcf4a37394022be057 | docs | 1675 | # ScTenifold.jl
*A single-cell gene regulatory network analysis suite in Julia.*
[](https://ci.appveyor.com/project/jamesjcai/sctenifold-jl)
## Installation
First of all, you need to install a Julia compiler. A recommended way is to
download a pre-built binary of Julia. The pre-built binaries for several major
platforms are distributed at <https://julialang.org/downloads/>. Currently,
`ScTenifold.jl` is an unregistered package.
Then, install `ScTenifold.jl` with the following command:
```julia
using Pkg; Pkg.add(PackageSpec(url="git://github.com/jamesjcai/ScTenifold.jl.git"))
```
To check the installation, you can try `using ScTenifold` in your REPL:
```
_ _ _(_)_ | Documentation: https://docs.julialang.org
(_) | (_) (_) |
_ _ _| |_ __ _ | Type "?" for help, "]?" for Pkg help.
| | | | | | |/ _` | |
| | |_| | | | (_| | | Version 1.5.3 (2020-11-09)
_/ |\__'_|_|_|\__'_| | Official https://julialang.org/ release
|__/ |
julia> using ScTenifold
[ Info: Precompiling ScTenifold [fcca1770-266e-4af5-8612-876362e279e9]
julia>
```
No error messages mean you have successfully installed ScTenifold.jl.
To run unit tests, execute the following command:
```julia
using Pkg; Pkg.test("ScTenifold")
```
## Contributing and Questions
Contributions are very welcome, as are feature requests and suggestions. Please open an
[issue][issues-url] if you encounter any problems or would just like to ask a question.
[issues-url]: https://github.com/jamesjcai/ScTenifold.jl/issues
| ScTenifold | https://github.com/jamesjcai/ScTenifold.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 897 | using Genie, Genie.Router
using Genie.Renderer, Genie.Renderer.Html, Genie.Renderer.Json
using JSON
using SwagUI
swagger_document = JSON.parsefile("./swagger.json")
urls = Array{Dict{String, Any}, 1}()
url1 = Dict{String, Any}()
url1["url"] = "https://petstore.swagger.io/v2/swagger.json"
url1["name"] = "Spec1"
url2 = Dict{String, Any}()
url2["url"] = "https://petstore.swagger.io/v2/swagger.json"
url2["name"] = "Spec2"
push!(urls, url1)
push!(urls, url2)
options = Options()
# options.custom_css = ".swagger-ui .topbar { display: none }"
options.show_explorer = true
# options.swagger_options["validatorUrl"] = nothing
options.swagger_options["url"] = "https://petstore.swagger.io/v2/swagger.json"
# options.swagger_options["urls"] = urls
route("/docs") do
render_swagger(nothing, options=options)
# render_swagger(swagger_document, options=options)
end
up(8001, async = false) | SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 3793 |
const SWAGGER_UI_BUNDLE_JS_URL = "https://cdnjs.cloudflare.com/ajax/libs/swagger-ui/4.9.1/swagger-ui-bundle.js"
const SWAGGER_UI_STANDALONE_JS_URL = "https://cdnjs.cloudflare.com/ajax/libs/swagger-ui/4.9.1/swagger-ui-standalone-preset.js"
const SWAGGER_UI_CSS_URL = "https://cdnjs.cloudflare.com/ajax/libs/swagger-ui/4.9.1/swagger-ui.css"
function build_js_string(options::Options, swagger_doc::Union{Dict{String, Any}, Nothing})::String
swagger_options = _options_to_json(options, swagger_doc)
return """
window.onload = function() {
let url = window.location.search.match(/url=([^&]+)/);
if (url && url.length > 1) {
url = decodeURIComponent(url[1]);
} else {
url = window.location.origin;
}
const options = $(swagger_options)
url = options.swaggerUrl || url
const customOptions = options.customOptions
const swaggerOptions = {
spec: options.swaggerDoc,
url: url,
urls: options.swaggerUrls,
dom_id: '#swagger-ui',
deepLinking: true,
presets: [
SwaggerUIBundle.presets.apis,
SwaggerUIStandalonePreset
],
plugins: [
SwaggerUIBundle.plugins.DownloadUrl
],
layout: "StandaloneLayout"
}
for (const attr in customOptions) {
swaggerOptions[attr] = customOptions[attr]
}
const ui = SwaggerUIBundle(swaggerOptions);
if (customOptions.oauth) {
ui.initOAuth(customOptions.oauth)
}
if (customOptions.authAction) {
ui.authActions.authorize(customOptions.authAction)
}
window.ui = ui;
};
"""
end
function build_html_string(options::Options, js_string::String)::String
favicon = options.custom_favicon == "" ? DEFAULT_FAVICON : "<link rel='icon' href=$(options.custom_favicon) />"
title = options.custom_site_title
style = options.custom_css
stylesheet = options.custom_css_url == "" ? "" : "<link rel='stylesheet' type='text/css' href=$(options.custom_css_url) />"
script = options.custom_js == "" ? "" : "<script src=$(options.custom_js) charset='UTF-8'></script>"
explorer = options.show_explorer ? "" : ".swagger-ui .topbar .download-url-wrapper { display: none; }"
return """
<!-- HTML for static distribution bundle build -->
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>$(title)</title>
<link rel="stylesheet" type="text/css" href="$(SWAGGER_UI_CSS_URL)" />
$(favicon)
$(script)
<style>
html
{
box-sizing: border-box;
overflow: -moz-scrollbars-vertical;
overflow-y: scroll;
}
*,
*:before,
*:after
{
box-sizing: inherit;
}
body
{
margin:0;
background: #fafafa;
}
$(explorer)
$(style)
</style>
</head>
<body>
<div id="swagger-ui"></div>
<script src="$(SWAGGER_UI_BUNDLE_JS_URL)" charset="UTF-8"></script>
<script src="$(SWAGGER_UI_STANDALONE_JS_URL)" charset="UTF-8"></script>
<script>$(js_string)</script>
$(stylesheet)
</body>
</html>
"""
end | SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 1692 | const DEFAULT_FAVICON = "<link rel='icon' type='image/png' href='./favicon-16x16.png' sizes='16x16' /><link rel='icon' type='image/png' href='./favicon-32x32.png' sizes='32x32' />"
const DEFAULT_TITLE = "Swagger UI"
"""
mutable struct Options
"""
mutable struct Options
show_explorer::Bool
custom_js::String
custom_css::String
custom_css_url::String
custom_site_title::String
custom_favicon::String
swagger_options::Dict{String, Any}
function Options(;
show_explorer::Bool=true,
custom_js::String="",
custom_css::String="",
custom_css_url::String="",
custom_site_title::String=DEFAULT_TITLE,
custom_favicon::String="",
swagger_options::Dict{String, Any}=Dict{String, Any}())
this = new()
this.show_explorer = show_explorer
this.custom_js = custom_js
this.custom_css = custom_css
this.custom_css_url = custom_css_url
this.custom_site_title = custom_site_title
this.custom_favicon = custom_favicon
this.swagger_options = swagger_options
return this
end
end
function _options_to_json(options::Options, swagger_doc::Union{Dict{String, Any}, Nothing})::String
swagger_options = options.swagger_options
opts = Dict{String, Any}()
opts["customOptions"] = swagger_options
if !isnothing(swagger_doc)
opts["swaggerDoc"] = swagger_doc
end
if haskey(swagger_options, "swaggerUrl")
opts["swaggerUrl"] = swagger_options["swaggerUrl"]
end
if haskey(swagger_options, "swaggerUrls")
opts["swaggerUrls"] = swagger_options["swaggerUrls"]
end
return JSON.json(opts)
end | SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 1131 | module SwagUI
using JSON
include("Options.jl")
include("Builders.jl")
export Options, render_swagger
"""
render_swagger(swagger_doc::Union{Dict{String, Any}, Nothing};
options=nothing, kw...)
Render Swagger UI's HTML as `String`, based on the options passed in. If a vaid `swagger_doc` is passed,
the `url` and `urls` in the `Options.swagger_options` are ignored by the UI. If `swagger_doc` is `nothing`,
`url` or `urls` has to be included in the `Options.swagger_options`.
Options can be passed as an `Options` struct or indirectly via keyword arguments (see `Options`).
# Arguments
Required:
- `swagger_doc::Union{Dict{String, Any}, Nothing}` : The swagger specification file `swagger.json` as a `Dict{String, Any}`.
"""
function render_swagger(swagger_doc::Union{AbstractDict{<:AbstractString,<:Any}, Nothing};
options::Union{Options,Nothing}=nothing, kw...)::String
isnothing(options) && (options = Options(;kw...))
js_string = build_js_string(options, swagger_doc)
html_string = build_html_string(options, js_string)
return html_string
end
end # module
| SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 3152 | using JSON
using Genie, HTTP
using Genie.Router, Genie.Responses
const PORT = 8000
const HOST = "127.0.0.1"
const PROTOCAL = "http"
const BASE_URL = "$PROTOCAL://$HOST:$PORT"
const ASSETS_PATH = joinpath(dirname(dirname(@__FILE__)), "test", "assets")
const DEFAULT_SWAGGER_JSON = "https://petstore.swagger.io/v2/swagger.json"
function serve_assets(path::String; excludes::Array{String, 1}=Array{String, 1}())
for (root, _, files) in walkdir(path)
for file in files
if !(file in excludes)
route(file) do
open(read, joinpath(root, file)) |> String
end
end
end
end
end
serve_assets(ASSETS_PATH)
up(PORT, HOST; open_browser = false, verbose = true, async = true)
# basic settings
swagger_document = JSON.parsefile(joinpath(ASSETS_PATH, "swagger.json"))
swagger_options = Dict{String, Any}()
options = Options()
swagger_options["url"] = "https://petstore.swagger.io/v2/swagger.json"
options.swagger_options = swagger_options
@testset "Render Assets Tests" begin
for (root, dirs, files) in walkdir(ASSETS_PATH)
for file in files
r = HTTP.request("GET", "$BASE_URL/$file")
local_file = read(open(joinpath(ASSETS_PATH, file)), String)
@test r.status == 200
@test local_file == String(r.body)
end
end
end
@testset "Basic Swagger Tests" begin
route_name = "docs"
route("/$route_name", method = GET) do
render_swagger(nothing, options=options)
end
r = HTTP.request("GET", "$BASE_URL/$route_name")
html_string = String(r.body)
opts = Dict{String, Any}()
opts["customOptions"] = swagger_options
@test r.status == 200
@test occursin("<title>$(options.custom_site_title)</title>", html_string)
@test occursin("const options = $(JSON.json(opts))", html_string)
end
@testset "Customizations Tests" begin
route_name = "docs_custom"
custom_options = Options()
custom_swagger_options = Dict{String, Any}()
custom_swagger_options["url"] = DEFAULT_SWAGGER_JSON
custom_options.swagger_options = custom_swagger_options
# custom css
css_string = ".swagger-ui .topbar { display: none }"
custom_options.custom_css = css_string
# custom stylesheet
stylesheet_path = joinpath(ASSETS_PATH, "custom.css")
custom_options.custom_css_url = stylesheet_path
# custom favicon
favicon_path = joinpath(ASSETS_PATH, "favicon.ico")
custom_options.custom_favicon = favicon_path
# custom site title
title = "Jimmy's new API"
custom_options.custom_site_title = title
route("/$route_name", method = GET) do
render_swagger(nothing, options=custom_options)
end
r = HTTP.request("GET", "$BASE_URL/$route_name")
html_string = replace(String(r.body), "\n" => "")
@test r.status == 200
@test occursin(css_string, html_string)
@test occursin("<link rel='stylesheet' type='text/css' href=$(stylesheet_path) />", html_string)
@test occursin("<title>$(title)</title>", html_string)
@test occursin("<link rel='icon' href=$(favicon_path) />", html_string)
end
| SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | code | 42 | using SwagUI
using Test
include("app.jl") | SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.10.0 | 3b131b0726fc1b56e9ae86668b5e5bc44fe14d08 | docs | 3831 | # Julia Swagger UI
[](https://codecov.io/gh/jiachengzhang1/SwaggerUI)
Want to use [Swagger UI](https://swagger.io/tools/swagger-ui/) in Julia? This package has your back!
Inspired by [swagger-ui-express](https://github.com/scottie1984/swagger-ui-express), the package auto-generates Swagger UI based on `swagger.json`. The generated API documentation can be served as an endpoint using package such as [Genie.jl](https://github.com/GenieFramework/Genie.jl).
Pre-built [swagger-ui](https://github.com/swagger-api/swagger-ui/tree/master/dist) is used. Because swagger-ui is implemented in Node.js, all pre-built files and assets are included in the [dist](dist) folder for serving. (Open an issue if there are better ways to do this.)
## Installation
```julia
julia> ]
pkg> add SwagUI
```
## Usage
Genie simple setup
```julia
using Genie, Genie.Router
using JSON
using SwagUI
# use a swagger json from the local machine
swagger_document = JSON.parsefile("./swagger.json")
route("/docs") do
render_swagger(swagger_document)
end
```
### Inegrate with [Swagger Markdown](https://github.com/GenieFramework/SwaggerMarkdown.jl)
```julia
using Genie, Genie.Router
using JSON
using SwagUI
using SwaggerMarkdown
@swagger """
/doge:
get:
description: Doge to the moon!
responses:
'200':
description: Doge to the moon!!.
"""
route("/doge") do
JSON.json("Doge to the moon!!")
end
# build a swagger document from markdown
info = Dict{String, Any}()
info["title"] = "Swagger Petstore"
info["version"] = "1.0.5"
openApi = OpenAPI("2.0", info)
swagger_document = build(openApi)
route("/docs") do
render_swagger(swagger_document)
end
up(8001, async = false)
```
[SwaggerMarkdown](https://github.com/jiachengzhang1/SwaggerMarkdown) builds the swagger document from markdown comments in the code. It returns a `swagger_document::Dict{String, Any}` that can be used by `SwaggerUI.render_swagger` to render the API documentation as Swagger's fashion.
### Configuration & Customization
**Swagger Explorer**
The explorer can be turned off through setting the `Options.show_explorer` to `false`. It's turned on, i.e. `Options.show_explorer = true` by default.
```julia
using Genie, Genie.Router
using JSON
using SwagUI
swagger_document = JSON.parsefile("./swagger.json")
# turn off the explorer
options = Options()
options.show_explorer = false
route("/docs") do
render_swagger(swagger_document, options=options)
end
```
**Custom swagger options**
Swagger UI can be configured by setting `Options.swagger_options::Dict{String, Any}`. The key is the name of the configuration while the value is the setting. More details about configuration can be found through the [Official Swagger UI Configuration page](https://github.com/swagger-api/swagger-ui/blob/master/docs/usage/configuration.md). The example below sets the URL pointing to API definition to `https://petstore.swagger.io/v2/swagger.json`.
```julia
using Genie, Genie.Router
using JSON
using SwagUI
# set the URL pointing to API definition
options = Options()
options.swagger_options["url"] = "https://petstore.swagger.io/v2/swagger.json"
route("/docs") do
# if swagger_options["url"] or swagger_options["urls"] is set,
# swagger_document is not needed
render_swagger(nothing, options=options)
end
```
**Custom CSS**
Set `Options.custom_css` to a custom CSS as `String`.
```julia
using Genie, Genie.Router
using JSON
using SwagUI
swagger_document = JSON.parsefile("./swagger.json")
# set custom css using options
options = Options()
options.custom_css = ".swagger-ui .topbar { display: none }"
route("/docs") do
render_swagger(swagger_document, options=options)
end
```
**TODO**
More examples
| SwagUI | https://github.com/GenieFramework/SwagUI.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 358 | push!(LOAD_PATH,"../src/")
using Pkg
Pkg.activate(".")
using Mangal
using Weave
_path_elements = ["docs", "src", "vignettes"]
_jmd_files = filter(x -> endswith(x, ".Jmd"), readdir(joinpath(_path_elements...)))
_files_to_compile = [joinpath(_path_elements..., f) for f in _jmd_files]
for _file in _files_to_compile
weave(_file, doctype="github")
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 805 | using Pkg
using Documenter
# push!(LOAD_PATH,"../src/")
using Mangal
_pkg_doc = [
"Data types" => "pkg/types.md",
"Counting objects" => "pkg/count.md",
"Methods for data retrieval" => "pkg/methods.md",
"Networks retrieval" => "pkg/ecologicalnetworks.md",
"Internal functions" => "pkg/internals.md"
]
_pkg_vig = [
"Introduction" => "vignettes/introduction.md",
"Counting & paging" => "vignettes/counting.md",
]
_list_of_pages = [
"index.md",
"Vignettes" => _pkg_vig,
"Package documentation" => _pkg_doc
]
makedocs(
sitename = "Mangal.jl",
authors = "Timothée Poisot",
modules = [Mangal],
pages = _list_of_pages
)
deploydocs(
deps = Deps.pip("pygments", "python-markdown-math"),
repo = "github.com/PoisotLab/Mangal.jl.git",
devbranch = "main"
)
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 2169 | module Mangal
using HTTP
using JSON
using GeoInterface
using Dates
using TestItems
const web_root = "https://mangal.io/"
const api_root = web_root * "api/v2/"
# Login
include(joinpath(".", "login.jl"))
# Types
include(joinpath(".", "types.jl"))
export MangalDataset
export MangalNetwork
export MangalReferenceTaxon
export MangalNode
export MangalInteraction
export MangalReference
export MangalAttribute
export MangalUser
# Endpoints
const _MANGAL_ENDPOINTS = Dict(
MangalReferenceTaxon => "taxonomy",
MangalNode => "node",
MangalInteraction => "interaction",
MangalNetwork => "network",
MangalDataset => "dataset",
MangalReference => "reference",
MangalAttribute => "attribute",
MangalUser => "user",
)
#trait = "trait",
# The cache!
global _MANGAL_CACHES = Dict(
MangalNode => Dict{Int64,MangalNode}(),
MangalReferenceTaxon => Dict{Int64,MangalReferenceTaxon}(),
MangalNetwork => Dict{Int64,MangalNetwork}(),
MangalAttribute => Dict{Int64,MangalAttribute}(),
)
# Response formatters
include("response_format.jl")
# Basic functions
include(joinpath(".", "basics.jl"))
# Generate code to write most of the API -- including count method
include(joinpath(".", "generators.jl"))
export nodes, node
export backbones, backbone
export datasets, dataset
export networks, network
export references, reference
export interactions, interaction
export attributes, attribute
export users, user
# Datasets
include(joinpath(".", "dataset.jl"))
# Networks
include(joinpath(".", "network.jl"))
# Taxonomy backbone(s)
include(joinpath(".", "backbone.jl"))
# Network nodes
include(joinpath(".", "node.jl"))
# Interactions
include(joinpath(".", "interaction.jl"))
# References
include(joinpath(".", "reference.jl"))
# Counts
include(joinpath(".", "count.jl"))
# Show
include(joinpath(".", "show.jl"))
@testitem "We can get data in/out of cache" begin
N = nodes()[1]
n = node(N.id)
@test length(Mangal._MANGAL_CACHES[MangalNode]) != 0
end
@testitem "We can get attribute data" begin
@test typeof(attributes()) <: Vector{MangalAttribute}
@test typeof(attribute(6)) <: MangalAttribute
end
end # module
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 796 | """
backbone(name::AbstractString)
Returns the backbone entry for a taxon, matched by *exact* name.
"""
function backbone(name::AbstractString)
q = backbones(Pair("name", name))
return isequal(1)(length(q)) ? only(q) : nothing
end
@testitem "We can get data from the backbone" begin
@test typeof(backbones()) <: Vector{MangalReferenceTaxon}
end
@testitem "We can get data from the backbone with a search" begin
@test typeof(backbones("q" => "Salix")) <: Vector{MangalReferenceTaxon}
end
@testitem "We can get specific taxa from the backbone" begin
@test backbone(1).name == "Abutilon theophrasti"
@test backbone("Abutilon theophrasti").id == 1
end
@testitem "We can get paged data from the backbone" begin
@test length(backbones(Pair("count", 10))) == 10
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 3491 | """
cache(results::Vector{T}) where {T <: Union{MangalReferenceTaxon,MangalNode,MangalNetwork}}
Internally, the `Mangal` package uses a cache to store some objects that are
likely to be queried more than once. These are `MangalNode` and
`MangalReferenceTaxon`, which are called in a nested way during the querying of
*e.g.* `Interactions`. This is **not** a fancy mechanism, and it only works when
calling the nodes or backbones by their `id` (which is what the resources-hungry
functions do internally anyways).
"""
function cache(results::Vector{T}) where {T <: Union{MangalReferenceTaxon,MangalNode,MangalNetwork,MangalAttribute}}
for result in results
if !haskey(_MANGAL_CACHES[T], result.id)
global _MANGAL_CACHES[T][result.id] = result
end
end
end
"""
generate_base_header()
If a bearer token is present, this function will add it to the header.
"""
function generate_base_header()
headers = ["Content-Type" => "application/json"]
if haskey(ENV, "MANGAL_BEARER_TOKEN")
push!(headers, "Authorization" => "bearer $(ENV["MANGAL_BEARER_TOKEN"])")
end
return headers
end
"""
generate_request_query(parameters::Pair...)
Takes a series of `Pairs`, and returns an URL-ready query string.
"""
function generate_request_query(parameters::Pair...)
query = ""
for (i, pair) in enumerate(promote(parameters...))
i == 1 || (query *= "&")
i == 1 && (query *= "?")
query *= pair.first * "=" * string(pair.second)
end
return replace(query, " " => "+")
end
"""
search_objects_by_query(endpoint::AbstractString, formatter::Function, query::Pair...)
In all cases, it is assumed that the functions will be wrapepd
in calls to query objects until no further objects are found.
"""
function search_objects_by_query(ReturnType::Type, query::Pair...)
# Headers
headers = Mangal.generate_base_header()
# Full endpoint
endpoint = Mangal.api_root * Mangal._MANGAL_ENDPOINTS[ReturnType]
# Convert query parameters
request_url = length(query) == 0 ? endpoint : endpoint*Mangal.generate_request_query(query...)
# Perform the request
this_request = HTTP.get(request_url, headers)
request_body = String(this_request.body)
request_body = replace(request_body, "" => "-") # This shouldn't be necessary anymore but...
# Returns the collection
parsed_json = JSON.parse.(request_body)
# Return the formatted object(s)
formatter = (x) -> format_mangal_response(ReturnType, x)
return convert(Vector{ReturnType}, formatter.(parsed_json))
end
"""
number_of_objects(endpoint::AbstractString, query::Pair...)
This function will return the number of objects already queried, and remaining
to query, for any given query.
"""
function number_of_objects(endpoint::AbstractString, query::Pair...)
# Headers
headers = Mangal.generate_base_header()
# Full endpoint
endpoint = Mangal.api_root * endpoint
# Convert query parameters
request_url = query == nothing ? endpoint : endpoint*Mangal.generate_request_query("count" => 1, query...)
# Perform the request
this_request = HTTP.get(request_url, headers)
content_range = first(filter(head -> head.first == "Content-Range", this_request.headers))
positions = parse.(Int64, split(content_range.second, [' ', '-', '/'])[2:end])
range_begin = positions[1]+1
range_stop = positions[2]+1
range_ends = positions[3]
return range_ends
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 1289 | """
count(::Type{MangalNetwork}, d::MangalDataset, query::Pair...)
Return the number of networks that belong to a dataset, according to an optional
series of queries.
"""
function count(::Type{MangalNetwork}, d::MangalDataset, query::Pair...)
return count(MangalNetwork, "dataset_id" => d.id, query...)
end
"""
count(::Type{MangalInteraction}, n::MangalNetwork, query::Pair...)
Return the number of interactions that belong to a network, according to an optional
series of queries.
"""
function count(::Type{MangalInteraction}, n::MangalNetwork, query::Pair...)
return count(MangalInteraction, "network_id" => n.id, query...)
end
"""
count(::Type{MangalNode}, n::MangalNetwork, query::Pair...)
Return the number of nodes that belong to a network, according to an optional
series of queries.
"""
function count(::Type{MangalNode}, n::MangalNetwork, query::Pair...)
return count(MangalNode, "network_id" => n.id, query...)
end
"""
count(::Type{MangalNode}, n::MangalReferenceTaxon, query::Pair...)
Return the number of nodes that are instances of a reference taxon, according to an optional
series of queries.
"""
function count(::Type{MangalNode}, r::MangalReferenceTaxon, query::Pair...)
return count(MangalNode, "taxonomy_id" => r.id, query...)
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 665 | """
dataset(name::AbstractString)
Return a single dataset by its name.
"""
function dataset(name::AbstractString)
q = datasets(Pair("name", name))
return isequal(1)(length(q)) ? only(q) : nothing
end
@testitem "We can get dataset data" begin
@test typeof(datasets()) <: Vector{MangalDataset}
# Datasets with count
count_2 = datasets(Pair("count", 2))
@test typeof(count_2) <: Vector{MangalDataset}
@test length(count_2) == 2
# Dataset by id
@test typeof(dataset(1)) <: MangalDataset
# Dataset by name
@test typeof(dataset("roberson_1929")) <: MangalDataset
@test isnothing(dataset("This_DOESNOT_exist"))
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 2621 | types_names = (
(MangalReferenceTaxon, :backbone),
(MangalNode, :node),
(MangalNetwork, :network),
(MangalDataset, :dataset),
(MangalReference, :reference),
(MangalInteraction, :interaction),
(MangalAttribute, :attribute),
(MangalUser, :user),
)
import Base.count
for mg_type_pair in types_names
mg_type, mg_singular = mg_type_pair
mg_plural = Symbol(string(mg_singular) * "s")
@eval begin
"""
count(::Type{$($mg_type)}, query::Pair...)
Returns the number of $($mg_type) objects that match a query.
"""
function Base.count(::Type{$mg_type}, query::Pair...)
return Mangal.number_of_objects(Mangal._MANGAL_ENDPOINTS[$mg_type], query...)
end
end
if haskey(Mangal._MANGAL_CACHES, mg_type)
@eval begin
"""
$($mg_plural)(query::Pair...)
This function will return objects of type $($mg_type) according to the query
parameters. To accelerate future queries, the objects returned will be cached.
To get the latest $($mg_type) records, this function can be called with no arguments.
"""
function $mg_plural(query::Pair...)
results = search_objects_by_query($mg_type, query...)
Mangal.cache(results)
return results
end
"""
$($mg_singular)(id::Int64)
Returns the object of type $($mg_type) whose identifier is `id`.
"""
function $mg_singular(id::Int64)
if haskey(Mangal._MANGAL_CACHES[$mg_type], id)
return Mangal._MANGAL_CACHES[$mg_type][id]
else
return first($mg_plural(Pair("id", id)))
end
end
end
else
@eval begin
"""
$($mg_plural)(query::Pair...)
This function will return objects of type $($mg_type) according to the query
parameters.
To get the latest $($mg_type) records, this function can be called with no arguments.
"""
function $mg_plural(query::Pair...)
results = search_objects_by_query($mg_type, query...)
return results
end
"""
$($mg_singular)(id::Int64)
Returns the object of type $($mg_type) whose identifier is `id`.
"""
function $mg_singular(id::Int64)
return first($mg_plural(Pair("id", id)))
end
end
end
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 2621 | """
interactions(from::MangalNode, ::Colon, query::Pair...)
Returns interactions established *by* the species given as its first argument.
"""
function interactions(from::MangalNode, ::Colon, query::Pair...)
return interactions(Pair("node_from", string(from.id)), query...)
end
"""
interactions(::Colon, to::MangalNode, query::Pair...)
Returns interactions established *to* the species given as its second argument.
"""
function interactions(::Colon, to::MangalNode, query::Pair...)
return interactions(Pair("node_to", string(to.id)), query...)
end
"""
interactions(with::MangalNode, query::Pair...)
Returns interactions established *around* the species given as its first argument.
"""
function interactions(with::MangalNode, query::Pair...)
to = interactions(with, :, query...)
fr = interactions(:, with, query...)
append!(to, fr)
return unique(to)
end
"""
interactions(from::MangalNode, to::MangalNode, query::Pair...)
Returns interactions between two nodes.
"""
function interactions(from::MangalNode, to::MangalNode, query::Pair...)
return interactions(Pair("node_from", string(from.id)), Pair("node_to", string(to.id)), query...)
end
"""
interactions(n::MangalNetwork, query::Pair...)
Returns interactions within a network.
"""
function interactions(n::MangalNetwork, query::Pair...)
return interactions("network_id" => n.id, query...)
end
@testitem "We can get all interactions" begin
@test typeof(interactions()) <: Vector{MangalInteraction}
end
@testitem "We can get all interactions by type" begin
@test typeof(interactions(Pair("type", "mutualism"))) <: Vector{MangalInteraction}
end
@testitem "We can get all interactions to a node" begin
@test typeof(interactions(:, node(31863))) <: Vector{MangalInteraction}
end
@testitem "We can get all interactions from a node" begin
@test typeof(interactions(node(31904), :)) <: Vector{MangalInteraction}
end
@testitem "We can get all interactions to a node by type" begin
@test typeof(interactions(:, node(31863), Pair("type", "predation"))) <: Vector{MangalInteraction}
end
@testitem "We can get all interactions from a node by type" begin
@test typeof(interactions(node(31904), :, Pair("type", "predation"))) <: Vector{MangalInteraction}
end
@testitem "We can get interactions between nodes" begin
@test typeof(interactions(node(31904), node(31863))) <: Vector{MangalInteraction}
end
@testitem "We can get interactions between nodes by type" begin
@test typeof(interactions(node(31904), node(31863), Pair("type", "predation"))) <: Vector{MangalInteraction}
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 1555 | """
login(token::AbstractString)
This function will store the token in the `MANGAL_BEARER_TOKEN` environmental
variable. To get the your token, please use `login` with no argument.
"""
function login(token::AbstractString)
ENV["MANGAL_BEARER_TOKEN"] = token
@info "Bearer token registered"
end
"""
login()
Read the bearer token from the `MANGAL_BEARER_TOKEN` environment variable. If
not found, displays a login message with a login URL. Currently, being logged in
is only necessary to access private datasets.
"""
function login()
if haskey(ENV, "MANGAL_BEARER_TOKEN")
@info "Your bearer token is already registered"
else
Mangal.login_message()
end
end
"""
login_message()
Points user to the login URL, and explains how the bearer token can be saved
persistently.
"""
function login_message()
@info "You need to login"
msg = """ To login, please go to
$(Mangal.web_root)auth/
You will be prompted to login using ORCID - when this is done, you will be
returned to your profile page, which contains the access_token. Copy and
paste this value, and use it in the login function:
julia> my_access_token = "12345654-1234-1234-4321-4343435353"
julia> Mangal.login(my_access_token)
If you want to save your bearer token, you can place it in an environmental
variable named MANGAL_BEARER_TOKEN -- this will let you use
julia> Mangal.login()
"""
@info msg
end
@testitem "We can get user data" begin
@test typeof(users()) <: Vector{MangalUser}
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 1687 | """
networks(dataset::MangalDataset, query::Pair...)
Returns networks that are part of a `MangalDataset`. Allows additional query parameters.
"""
function networks(dataset::MangalDataset, query::Pair...)
return networks(Pair("dataset_id", dataset.id), query...)
end
"""
network(name::AbstractString)
Returns a network of a given name.
"""
function network(name::AbstractString)
q = networks(Pair("name", name))
return isequal(1)(length(q)) ? only(q) : nothing
end
@testitem "We can get networks" begin
@test typeof(networks()) <: Vector{MangalNetwork}
end
@testitem "We can get a network by ID" begin
@test typeof(network(19)) <: MangalNetwork
end
@testitem "We can get a network by name" begin
@test typeof(network("howking_1968_19680601_12")) <: MangalNetwork
end
@testitem "We can get a network by dataset" begin
ds = dataset(62)
n_ds = networks(ds)
@test typeof(n_ds) <: Vector{MangalNetwork}
end
@testitem "We can get a network by dataset with additional arguments" begin
ds = dataset(62)
n_ds_q = networks(ds, Pair("count", 2))
@test typeof(n_ds_q) <: Vector{MangalNetwork}
@test length(n_ds_q) == 2
end
@testitem "We can page through multiple network queries" begin
this_page = 1
networks_ponisio = networks(dataset("ponisio_2017"), Pair("count", 10), Pair("page", this_page))
keep_querying = true
while keep_querying
global this_page += 1
response = networks(dataset("ponisio_2017"), Pair("count", 10), Pair("page", this_page))
append!(networks_ponisio, response)
global keep_querying = length(response) > 0
end
@test length(networks_ponisio) == 131
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 1071 | """
nodes(network::MangalNetwork, query::Pair...)
Returns the nodes that are part of a `MangalNetwork`, with an additional optional query.
"""
function nodes(network::MangalNetwork, query::Pair...)
return nodes(Pair("network_id", network.id), query...)
end
"""
nodes(taxon::MangalReferenceTaxon, query::Pair...)
Returns the nodes that are instance of a `MangalReferenceTaxon`, with an additional query.
"""
function nodes(taxon::MangalReferenceTaxon, query::Pair...)
return nodes(Pair("taxonomy_id", taxon.id), query...)
end
@testitem "We can get node data" begin
@test typeof(nodes()) <: Vector{MangalNode}
end
@testitem "We can get nodes of a network" begin
N1 = network(19)
nN1 = nodes(N1)
@test typeof(nN1) <: Vector{MangalNode}
end
@testitem "We can get taxonomic data in nodes" begin
n1 = node(2158)
@test typeof(n1) <: MangalNode
@test typeof(n1.taxon) <: MangalReferenceTaxon
end
@testitem "We can get nodes from backbone data" begin
b1 = backbone(1)
nb1 = nodes(b1)
@test first(nb1).taxon == b1
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 250 | """
reference(d::MangalDataset)
Returns the `MangalReference` associated to a `MangalDataset`. This is a
convenience function that returns the `reference` field of the dataset.
"""
function reference(d::MangalDataset)
return d.reference
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 5573 | """
format_mangal_coordinates(d::Dict{T,Any}) where {T <: AbstractString}
Returns a set of coordinates in a `GeoInterface` object, which can be a `PointTrait`
or a `PolygonTrait`.
"""
function format_mangal_coordinates(d::Dict{T,Any}) where {T <: AbstractString}
point_type = d["geom"]["type"] == "Point" ? PointTrait : PolygonTrait
if point_type == PolygonTrait
coords = [float.(x) for x in first(d["geom"]["coordinates"])]
point_coordinates = coords
else
coords = float.(d["geom"]["coordinates"])
point_coordinates = coords
end
return point_coordinates
end
function format_mangal_response(::Type{MangalDataset}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_public = d["public"]
obj_name = d["name"]
obj_created = DateTime(d["created_at"][1:19])
obj_updated = DateTime(d["updated_at"][1:19])
obj_reference = isnothing(d["ref_id"]) ? missing : reference(d["ref_id"])
obj_user = d["user_id"]
obj_description = d["description"]
return MangalDataset(obj_id, obj_public, obj_name, obj_created,
obj_updated, obj_reference, obj_user, obj_description
)
end
function format_mangal_response(::Type{MangalNetwork}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_public = d["public"]
obj_name = d["name"]
obj_date = isnothing(d["date"]) ? missing : DateTime(d["date"][1:19])
obj_position = isnothing(d["geom"]) ? missing : format_mangal_coordinates(d)
obj_created = DateTime(d["created_at"][1:19])
obj_updated = DateTime(d["updated_at"][1:19])
obj_user = isnothing(d["user_id"]) ? missing : d["user_id"]
obj_description = d["description"]
obj_complete = d["all_interactions"]
obj_dataset = isnothing(d["dataset_id"]) ? missing : dataset(d["dataset_id"])
return MangalNetwork(obj_id, obj_public, obj_name, obj_date, obj_position,
obj_created, obj_updated, obj_user, obj_description,
obj_complete, obj_dataset)
end
function format_mangal_response(::Type{MangalNode}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_name = d["original_name"]
obj_created = DateTime(d["created_at"][1:19])
obj_updated = DateTime(d["updated_at"][1:19])
obj_taxon = isnothing(d["taxonomy"]) ? missing : Mangal.format_mangal_response(MangalReferenceTaxon, d["taxonomy"])
return MangalNode(obj_id, obj_name, obj_created, obj_updated, obj_taxon)
end
function format_mangal_response(::Type{MangalReferenceTaxon}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_name = d["name"]
obj_bold = isnothing(d["bold"]) ? missing : d["bold"]
obj_tsn = isnothing(d["tsn"]) ? missing : d["tsn"]
obj_ncbi = isnothing(d["ncbi"]) ? missing : d["ncbi"]
obj_eol = isnothing(d["eol"]) ? missing : d["eol"]
obj_gbif = isnothing(d["gbif"]) ? missing : d["gbif"]
obj_created = DateTime(d["created_at"][1:19])
obj_updated = DateTime(d["updated_at"][1:19])
return MangalReferenceTaxon(obj_id, obj_name, obj_bold, obj_tsn,
obj_ncbi, obj_eol, obj_gbif, obj_created, obj_updated)
end
function format_mangal_response(::Type{MangalInteraction}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_network = network(d["network_id"])
obj_from = node(d["node_from"])
obj_to = node(d["node_to"])
obj_date = isnothing(d["date"]) ? missing : DateTime(d["date"][1:19])
obj_position = isnothing(d["geom"]) ? missing : format_mangal_coordinates(d)
obj_directed = d["direction"] == "directed"
obj_interaction = Symbol(d["type"])
obj_method = isnothing(d["method"]) ? missing : d["method"]
obj_strength = isnothing(d["value"]) ? missing : d["value"]
obj_created = DateTime(d["created_at"][1:19])
obj_updated = DateTime(d["updated_at"][1:19])
obj_attribute = isnothing(d["attr_id"]) ? missing : format_mangal_response(MangalAttribute, d["attribute"])
return MangalInteraction(obj_id, obj_network, obj_from, obj_to, obj_date, obj_position,
obj_directed, obj_interaction, obj_method, obj_strength, obj_created, obj_updated,
obj_attribute)
end
function format_mangal_response(::Type{MangalReference}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_year = d["year"] == "NA" ? missing : parse(Int64, d["year"][1:4])
obj_doi = isnothing(d["doi"]) ? missing : d["doi"]
obj_jstor = isnothing(d["jstor"]) ? missing : d["jstor"]
obj_pmid = isnothing(d["pmid"]) ? missing : d["pmid"]
obj_bibtex = isnothing(d["bibtex"]) ? missing : d["bibtex"]
obj_paper = isnothing(d["paper_url"]) ? missing : d["paper_url"]
obj_data = isnothing(d["data_url"]) ? missing : d["data_url"]
return MangalReference(obj_id, obj_year, obj_doi, obj_jstor, obj_pmid,
obj_bibtex, obj_paper, obj_data)
end
function format_mangal_response(::Type{MangalAttribute}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_name = d["name"]
obj_description = d["description"]
obj_unit = isnothing(d["unit"]) ? missing : d["unit"]
return MangalAttribute(obj_id, obj_name, obj_description, obj_unit)
end
function format_mangal_response(::Type{MangalUser}, d::Dict{T,Any}) where {T <: AbstractString}
obj_id = d["id"]
obj_name = d["name"]
obj_orcid = d["orcid"]
obj_email = isnothing(d["email"]) ? missing : d["email"]
obj_org = isnothing(d["organization"]) ? missing : d["organization"]
return MangalUser(obj_id, obj_name, obj_email, obj_orcid, obj_org)
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 893 | function _short_desc(s::String)
return length(s) < 80 ? s : strip(s[1:79]*"…")
end
function Base.show(io::IO, dataset::MangalDataset)
print(io, """🗃️ Mangal dataset #$(dataset.id) ($(dataset.name))""")
end
function Base.show(io::IO, network::MangalNetwork)
print(io, """🕸️ Mangal network #$(network.id) ($(network.name))""")
end
function Base.show(io::IO, interaction::MangalInteraction)
print(io, """🔄 Mangal int° #$(interaction.id): from $(interaction.from.name) to $(interaction.to.name)""")
end
function Base.show(io::IO, user::MangalUser)
print(io, """🫂 Mangal contributor #$(user.id): $(user.name) - $(user.orcid)""")
end
function Base.show(io::IO, node::MangalNode)
print(io, """🦝 Mangal node #$(node.id): $(node.name)""")
end
function Base.show(io::IO, taxon::MangalReferenceTaxon)
print(io, """🌲 Mangal taxon #$(taxon.id): $(taxon.name)""")
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 4793 | """
Attribute
"""
struct MangalAttribute
id::Int64
name:: AbstractString
description::AbstractString
unit::Union{Missing,AbstractString}
end
"""
Reference
"""
struct MangalReference
id::Int64
year::Union{Missing,Int64}
doi::Union{String,Missing}
jstor::Union{String,Missing}
pmid::Union{String,Missing}
bibtex::Union{String,Missing}
paper::Union{String,Missing}
data::Union{String,Missing}
end
"""
A `MangalDataset` identifies a collection of networks, possibly containing
a single element. A dataset is identified by its `id` or `name` (both of
which are *unique*).
`name` (`AbstractString`): a unique name describing the dataset.
`public` (`Bool`): indicates whether the dataset details are available to others
than its owner.
`reference` (`Union{Int64,Nothing}`) (*optional*): a reference to the `id` of
the `MangalReference`, or `nothing` if there is no associated reference for this
dataset.
`user` (`Int64`): `id` of the user who added the dataset to the database. This
is *not necessarily* the author of the dataset, see `reference` (and the same
field in the `MangalNetwork`) to get the actual authorship.
`description` (`AbstractString`): a free-form description of the dataset.
"""
struct MangalDataset
id::Int64
public::Bool
name::AbstractString
created::DateTime
updated::DateTime
reference::Union{MangalReference,Missing}
user::Int64
description::AbstractString
end
"""
A `MangalNetwork` is a wrapper around *nodes* (and not around interactions, for
reasons which are really not worth mentioning here, but see the documentation
for `MangalNode` for some hints).
`name` (`AbstractString`): a unique name describing the network.
`dataset` (`Int64`): the unique id of the `MangalDataset` to which the network
belongs.
`public` (`Bool`): indicates whether the network details are available to others
than its owner.
`date` (`DateTime`): date and time at which the network was sampled.
`position` (`AbstractGeometryTrait`): the location at which the network was sampled.
This can be any sort of geospatial construct, most notably points *or* polygons.
`complete` (`Bool`): indicates whether the network was sampled completely, or is
a collection of interactions with possible gaps.
`reference` (`Union{Int64,Nothing}`) (*optional*): a reference to the `id` of
the `MangalReference`, or `nothing` if there is no associated reference for this
network.
`user` (`Int64`): `id` of the user who added the network to the database. This
is *not necessarily* the author of the network, see `reference` to get the
actual authorship.
`description` (`AbstractString`): a free-form description of the network.
"""
struct MangalNetwork
id::Int64
public::Bool
name::AbstractString
date::Union{DateTime,Missing}
position
created::DateTime
updated::DateTime
user::Union{Int64,Missing}
description::AbstractString
complete::Bool
dataset::Union{MangalDataset,Missing}
end
"""
Reference taxon (unique identifier of network nodes)
"""
struct MangalReferenceTaxon
id::Int64
name::AbstractString
bold::Union{Int64,Missing}
tsn::Union{Int64,Missing}
ncbi::Union{Int64,Missing}
eol::Union{Int64,Missing}
gbif::Union{Int64,Missing}
created::DateTime
updated::DateTime
end
"""
Node in a network
The `taxon` field is a `MangalReferenceTaxon` object, so that one can, for
example, query the TSN identifier of a node through `object.taxon.tsn`.
This approach has been chosen because (i) names of nodes in networks can be non
unique and (ii) nodes within the same networks can refer to various taxonomic
levels. As an example, if a network has four distinct nodes identified as
`Ascariasis sp.`, they will represent four nodes in the networks, but map onto
the same `MangalReferenceTaxon` (representing the entire *Ascariasis* genus).
This approach provides a seemless representation of the same taxon across
different networks, but also of the same taxon *within* networks.
"""
struct MangalNode
id::Int64
name::AbstractString
created::DateTime
updated::DateTime
taxon::Union{MangalReferenceTaxon,Missing}
end
"""
Interaction
"""
struct MangalInteraction
id::Int64
network::MangalNetwork
from::MangalNode
to::MangalNode
date::Union{DateTime,Missing}
position
directed::Bool
interaction::Symbol
method::Union{AbstractString,Missing}
strength::Union{Number,Missing}
created::DateTime
updated::DateTime
attribute::Union{Missing,MangalAttribute}
end
"""
MangalUser
"""
struct MangalUser
id::Int64
name::AbstractString
email::Union{Missing,AbstractString}
orcid::Union{Missing,AbstractString}
organization::Union{Missing,AbstractString}
end | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 552 | module MangalTestKnownBugs
using Mangal
using EcologicalNetworks
using Test
_problematic_ids = [28, 83, 86, 100, 101, 102, 103, 1280, 2463, 2464, 3258, 922, 924, 889, 890, 907, 906, 1516]
for (idx,netid) in enumerate(_problematic_ids)
thisnet = Mangal.network(netid)
@test typeof(thisnet) <: MangalNetwork
U = convert(UnipartiteNetwork, thisnet)
@test typeof(U) <: UnipartiteNetwork
T = taxonize(U)
@test eltype(EcologicalNetworks.species(T)) <: MangalReferenceTaxon
end
end
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | code | 39 | using TestItemRunner
@run_package_tests | Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 1420 | ### Mangal.jl
[](https://poisotlab.github.io/Mangal.jl/stable/) [](https://poisotlab.github.io/Mangal.jl/latest/)
 [](https://codecov.io/gh/PoisotLab/Mangal.jl)
[](https://www.repostatus.org/#active) [](https://zenodo.org/badge/latestdoi/144035225)
This package is a wrapper around the *new* API for the mangal ecological
interactions database. It uses [Julia 1.1][jl] to provide a programmatic
interface to *read* the data. Development of this package and the underlying
database was funded by the [Canadian Foundation for Innovation][cfi] and
[NSERC][nserc].
[cfi]: https://www.innovation.ca/
[nserc]: http://www.nserc-crsng.gc.ca/index_eng.asp
[jl]: https://julialang.org/
## Getting started
Enter package mode by pressing `]` from the Julia REPL. Then install:
~~~
add Mangal
~~~
That's it. Now head over to the
[documentation](https://poisotlab.github.io/Mangal.jl/dev/).
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 600 | <!-- Thank you for opening an issue! If your issue is about a question, or a
suggestion, please replace all of the content between curly brackets, {like so},
by the appropriate information. If your issue is a more technical point, or a
suggestion for improvement, feel free to remove all of this and just write as
usual. -->
{short description of the problem}
{what you were trying to do}
```julia
# Please put the relevant code here
```
{what you expected to see}
{what you got instead}
---
Using {operating system}, Julia {version}, EcologicalNetwork {version}
{any additional information}
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 689 | # {describe feature here}
<!-- Thank you for this pull request! Please replace any information between curly brackets {like so} with the relevant information. -->
{short description of what the PR does}
## Related issues
<!-- If relevant, please add links to the relevant issues, using `#00` -->
## Checklist
- [ ] The code is covered by unit tests
- [ ] Additional cases or module in `test/...`
- [ ] Relevant lines in `test/runtests.jl`
- [ ] The code is *documented*
- [ ] Docstrings written for every function
- [ ] If needed, additional lines in `docs/src/...`
- [ ] All contributors have added their name and affiliation to `.zenodo.json`
## Pinging
Pinging @tpoisot
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 848 | ---
name: Bug report
about: Create a report to help us improve
title: ''
labels: bug, need triage
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 617 | ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: enhancement, need triage
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 171 | ---
name: To-do
about: Short notes on development tasks
title: ''
labels: need triage, to do
assignees: ''
---
## What to do?
...
## Why?
...
## Any ideas how?
...
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 837 | # Mangal
This manual describes the functionalities of the `Mangal.jl` package, to query
data from species interaction networks. This package is a wrapper around the
*new* API for the mangal ecological interactions database. It uses [Julia
1.1](https://julialang.org/) to provide a programmatic interface to *read* the
data. Development of this package and the underlying database was funded by the
[Canadian Foundation for Innovation](https://www.innovation.ca/) and the
[Natural Sciences and Engineering Research Council of
Canada](http://www.nserc-crsng.gc.ca/).
## Original publication
Poisot, T. , Baiser, B. , Dunne, J. A., Kéfi, S. , Massol, F. , Mouquet, N. ,
Romanuk, T. N., Stouffer, D. B., Wood, S. A. and Gravel, D. (2016), mangal –
making ecological network analysis simple. Ecography, 39: 384-390.
doi:10.1111/ecog.00976
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 484 | The recommended way to get data is through `Mangal.jl` integration with the
`EcologicalNetworks.jl` package for networks analysis.
There is no obvious way to know in advance if a network is bipartite or not. For
this reason, this wrapper *only* returns unipartite objects. They can be
converted into bipartite networks using the `convert` methods in
`EcologicalNetworks.jl`.
## Generate network objects
```@docs
convert
```
## Increase taxonomic resolution
```@docs
taxonize
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 531 | ## Login
!!! info "Login"
Mangal relies on [ORCID](https://orcid.org/) for authentication and login. As
long as you have an ORCID profile, you can login.
```@docs
Mangal.login
```
## Formatters
```@docs
Mangal.format_mangal_response
Mangal.format_mangal_coordinates
```
## Other functions
```@docs
Mangal.generate_base_header
Mangal.generate_request_query
Mangal.search_objects_by_query
```
## Caching
```@docs
Mangal.cache
```
## Full data retrieval
```@docs
Mangal.get_all_nodes
Mangal.get_all_interactions
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 1840 | This page presents the basic functions to access all of the data stored in
*mangal*.
!!! note "Naming conventions"
Functions whose name is plural (*e.g* `networks`) will return a `Vector` of
their type. Functions whose name is singular (*e.g.* `network`) return a single
object. All functions returning a `Vector` can accept `Pair` object for querying.
In addition to the search by name (when available) and ID (for all objects),
most of the functions have methods to work on other types of objects. For
example, `networks` has a function taking a `MangalDataset` as an object, which
will retrieve the networks belonging to this dataset.
!!! danger "Paging matters!"
The server returns (by default) 200 objects for a given query, and this number
can be increased up to 1000. This may not be sufficient to retrieve the entire
information, for example in networks with more than 200 nodes. Not paying
attention to paging when using these functions directly (as opposed to within
the `EcologicalNetworks` wrappers) means that you are at risk of *not working
with the entire dataset*.
## A note on queries
The Mangal API is built on
[`epilogue`](https://github.com/dchester/epilogue#rest-api) -- this offers
sorting and filtering functionalities. These operations are referred to as
"queries" across the package. All queries are passed as *pairs*. For example,
filtering interactions that are of the mutualist type, and sorting them by `id`,
is done with:
~~~
interactions("type" => "mutualism", "sort" => "id")
~~~
## For datasets
```@docs
datasets
dataset
```
## Networks
```@docs
networks
network
```
## Interactions
```@docs
interactions
interaction
```
## Nodes
```@docs
nodes
node
```
## Reference taxon
```@docs
backbones
backbone
```
## References
```@docs
references
reference
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 459 | All types have three fields in common: `id` (`Int64`), a unique identifier, and
`created` and `updated` (both `DateTime`), which give the time of in-database
creation and last modification.
## Core types
```@docs
MangalDataset
MangalNetwork
MangalInteraction
```
## Taxonomy types
```@docs
MangalNode
MangalReferenceTaxon
```
## Additional information
```@docs
MangalTrait
MangalAttribute
```
## Metadata types
```@docs
MangalUser
MangalReference
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 2548 | # Counting objects and paging
The goal of this vignette is to see how we can *count* objects, *i.e.* get the
number of entries in the database, and how we can then use this information to
*page* through the objects, *i.e.* download all of the matching records. To
illustrate, we will use a simple use case: plotting the relationship between
number of species and number of links in a number of food webs.
```julia
using Mangal
using Plots
```
## Counting and querying
Each type in **Mangal** has its own `count` method, which can be called without
any argument to return the total number of entries in the database. For example,
the total number of networks is:
```julia
count(MangalNetwork)
```
We can also pass queries to the `count` methods. One of the most general way to
query things is to use `"q"`, which will look for a match in all text fields for
the type. In this use case, we want to retrieve the dataset corresponding to
Karl Havens' classical study about food web scaling, so we can look for the
datasets with the string `"havens"` in them:
```julia
Havens_data = first(datasets("q" => "havens"))
```
This information can be used to only count the number of networks that belong to
this dataset:
```julia
count(MangalNetwork, "dataset_id" => Havens_data.id)
```
Note that for convenience, there is a `count` method that will accept a
`MangalDataset` object to return the number of networks in this dataset. As you
may assume, it does nothing more internally than what we did at the step above.
```julia
Havens_count = count(MangalNetwork, Havens_data)
```
## Paging
Paging refers to retrieving multiple records from the database. It is regulated
by two parameters: `"count"`, the number of records to return *per* page
(default: `100`), and `"page"`, the page number (starting at 0). In this
example, we will return 10 objects per page, and so we will need to loop through
multiple pages:
```julia
Havens_networks = networks(Havens_data, "count" => 10)
page = 0
while length(Havens_networks) < Havens_count
global page = page + 1
append!(Havens_networks,
networks(Havens_data, "page" => page, "count" => 10)
)
end
```
## Producing the plot
Finally, we can use additional `count` methods to get the number of nodes
(species) and interactions within each network, to produce the figure:
```julia
LS = [
(count(MangalInteraction, n), count(MangalNode, n)) for n in Havens_networks
]
scatter(LS, c=:white, leg=false, frame=:box)
xaxis!(:log, "Species richness")
yaxis!(:log, "Number of interactions")
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 3389 | # Counting objects and paging
The goal of this vignette is to see how we can *count* objects, *i.e.* get the
number of entries in the database, and how we can then use this information to
*page* through the objects, *i.e.* download all of the matching records. To
illustrate, we will use a simple use case: plotting the relationship between
number of species and number of links in a number of food webs.
````julia
using Mangal
using Plots
````
## Counting and querying
Each type in **Mangal** has its own `count` method, which can be called without
any argument to return the total number of entries in the database. For example,
the total number of networks is:
````julia
count(MangalNetwork)
````
````
1386
````
We can also pass queries to the `count` methods. One of the most general way to
query things is to use `"q"`, which will look for a match in all text fields for
the type. In this use case, we want to retrieve the dataset corresponding to
Karl Havens' classical study about food web scaling, so we can look for the
datasets with the string `"havens"` in them:
````julia
Havens_data = first(datasets("q" => "havens"))
````
````
MangalDataset(15, true, "havens_1992", 1984-06-01T04:00:00, 2019-02-23T01:4
7:06, 2019-02-23T01:47:06, MangalReference(15, 1992, "10.1126/science.257.5
073.1107", missing, missing, "@article{Havens_1992, doi = {10.1126/science.
257.5073.1107}, url = {https://doi.org/10.1126%2Fscience.257.5073.1107}, ye
ar = 1992, month = {aug}, publisher = {American Association for the Advance
ment of Science ({AAAS})}, volume = {257}, number = {5073}, pages = {1107--
1109}, author = {K. Havens}, title = {Scale and Structure in Natural Food W
ebs}, journal = {Science}}", "https://doi.org/10.1126%2Fscience.257.5073.11
07", "URL of the attached data"), 3, "Pelagic communities of small lakes an
d ponds of the Adirondack")
````
This information can be used to only count the number of networks that belong to
this dataset:
````julia
count(MangalNetwork, "dataset_id" => Havens_data.id)
````
````
50
````
Note that for convenience, there is a `count` method that will accept a
`MangalDataset` object to return the number of networks in this dataset. As you
may assume, it does nothing more internally than what we did at the step above.
````julia
Havens_count = count(MangalNetwork, Havens_data)
````
````
50
````
## Paging
Paging refers to retrieving multiple records from the database. It is regulated
by two parameters: `"count"`, the number of records to return *per* page
(default: `100`), and `"page"`, the page number (starting at 0). In this
example, we will return 10 objects per page, and so we will need to loop through
multiple pages:
````julia
Havens_networks = networks(Havens_data, "count" => 10)
page = 0
while length(Havens_networks) < Havens_count
global page = page + 1
append!(Havens_networks,
networks(Havens_data, "page" => page, "count" => 10)
)
end
````
## Producing the plot
Finally, we can use additional `count` methods to get the number of nodes
(species) and interactions within each network, to produce the figure:
````julia
LS = [
(count(MangalInteraction, n), count(MangalNode, n)) for n in Havens_networks
]
scatter(LS, c=:white, leg=false, frame=:box)
xaxis!(:log, "Species richness")
yaxis!(:log, "Number of interactions")
````

| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 3474 | # Integration with `EcologicalNetworks.jl`
The **Mangal** package is integrated with **EcologicalNetworks** for analysis.
```julia
using Mangal
using EcologicalNetworks
```
## A simple example
In this simple example, we will look at a food web from 1956, retrieve it from
the Mangal database, then convert it into a usable object:
```julia
db_version = network("johnston_1956_19560101_947");
db_version.description
```
The conversion to the network is done using the `convert` method, which by
default will return a `UnipartiteNetwork`, where species are the `MangalNode` of
the original network:
```julia
N = convert(UnipartiteNetwork, db_version)
```
We can check that the type of the network is correct:
```julia
eltype(N)
```
We can also confirm that all interactions and node counts match:
```julia
count(MangalInteraction, db_version) == links(N)
```
```julia
count(MangalNode, db_version) == richness(N)
```
## A more complex example
In this
```julia
hp_dataset = dataset("hadfield_2014");
hp_networks = networks(hp_dataset);
```
The next step might take a minute or two, but will consist in downloading every
information related to the network, and converting it into one
`UnipartiteNetwork` for every network in the dataset.
```julia
N = [convert(UnipartiteNetwork, n) for n in hp_networks];
```
Note that the previous step could use queries too, so it would be possible to
restrict the interactions to, *e.g.* a certain type, for networks with multiple
interaction types.
```julia
B = [convert(BipartiteNetwork, n) for n in N];
```
```julia
using Plots
histogram(η.(B), frame=:box, c=:white)
xaxis!("Nestedness", (0,1))
yaxis!("", (0, 25))
```
## Building custom networks
Converting to a network is *always* a wrapper around converting an array of
interactions. Let's imagine that we are interested in representing the network
of species that either consume, or are consumed by, salmonids. The first step
will be to retrieve the `MangalReferenceTaxon` that correspond to these species:
```julia
salmonids = backbones("q" => "Salmo")
```
For every `MangalReferenceTaxon`, we need to retrieve its number of `MangalNode`
-- let's see how many there are, using the shorthand `count` method for this:
```julia
count.(MangalNode, salmonids)
```
Since none of these are very high, we can retrieve the nodes directly:
```julia
salmonids_nodes = vcat(nodes.(salmonids)...);
```
At this point, we may want to count the number of interactions for all of the
nodes, but let us (for the sake of simplicity) trust that there are fewer than
500 in all cases:
```julia
all_int = [Mangal.interactions(salmo, "count" => 500) for salmo in salmonids_nodes];
salmonids_interactions = vcat(all_int...);
```
At this point, we end up with a `Vector{MangalInteraction}`, *i.e.* an array of
interactions.
An interesting consequence of this approach is that we now can look at all the
datasets that are part of this query, and *e.g.* retrieve their DOI:
```julia
salmonids_datasets = unique([i.network.dataset for i in salmonids_interactions])
[d.reference.doi for d in salmonids_datasets]
```
Finally, the array of interactions can be converted into a `UnipartiteNetwork`:
```julia
salmonid_network = convert(UnipartiteNetwork, salmonids_interactions)
```
Get the taxa
```julia
salmonid_resolved_network = taxonize(salmonid_network)
```
show interactions
```julia
for i in salmonid_resolved_network
println("$(i.from.name) -> $(i.to.name)")
end
```
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 6726 | # Integration with `EcologicalNetworks.jl`
The **Mangal** package is integrated with **EcologicalNetworks** for analysis.
````julia
using Mangal
using EcologicalNetworks
````
## A simple example
In this simple example, we will look at a food web from 1956, retrieve it from
the Mangal database, then convert it into a usable object:
````julia
db_version = network("johnston_1956_19560101_947");
db_version.description
````
````
"Predation by short-eared owls on a salicornia salt marsh"
````
The conversion to the network is done using the `convert` method, which by
default will return a `UnipartiteNetwork`, where species are the `MangalNode` of
the original network:
````julia
N = convert(UnipartiteNetwork, db_version)
````
````
19×19 unipartite ecological network (Bool, MangalNode) (L: 58)
````
We can check that the type of the network is correct:
````julia
eltype(N)
````
````
(Bool, MangalNode)
````
We can also confirm that all interactions and node counts match:
````julia
count(MangalInteraction, db_version) == links(N)
````
````
true
````
````julia
count(MangalNode, db_version) == richness(N)
````
````
true
````
## A more complex example
In this
````julia
hp_dataset = dataset("hadfield_2014");
hp_networks = networks(hp_dataset);
````
The next step might take a minute or two, but will consist in downloading every
information related to the network, and converting it into one
`UnipartiteNetwork` for every network in the dataset.
````julia
N = [convert(UnipartiteNetwork, n) for n in hp_networks];
````
Note that the previous step could use queries too, so it would be possible to
restrict the interactions to, *e.g.* a certain type, for networks with multiple
interaction types.
````julia
B = [convert(BipartiteNetwork, n) for n in N];
````
````julia
using Plots
histogram(η.(B), frame=:box, c=:white)
xaxis!("Nestedness", (0,1))
yaxis!("", (0, 25))
````

## Building custom networks
Converting to a network is *always* a wrapper around converting an array of
interactions. Let's imagine that we are interested in representing the network
of species that either consume, or are consumed by, salmonids. The first step
will be to retrieve the `MangalReferenceTaxon` that correspond to these species:
````julia
salmonids = backbones("q" => "Salmo")
````
````
5-element Array{MangalReferenceTaxon,1}:
MangalReferenceTaxon(4173, "Salmo trutta", 47318, 161997, 8032, 10237843,
missing, 2019-02-22T22:40:06, 2019-02-22T22:40:06)
MangalReferenceTaxon(4261, "Salmo gairdneri", missing, 161991, 857570, mis
sing, missing, 2019-02-22T22:40:12, 2019-02-22T22:40:12)
MangalReferenceTaxon(4286, "Salmoninae", 71162, 623286, 504568, missing, m
issing, 2019-02-23T03:04:02, 2019-02-23T03:04:02)
MangalReferenceTaxon(5300, "Salmonidae", 1340, 161931, 8015, 11144860, mis
sing, 2019-02-27T04:10:07, 2019-02-27T04:10:07)
MangalReferenceTaxon(7015, "Salmo salar", 30453, 161996, 8030, 11144931, m
issing, 2019-03-19T19:16:54, 2019-03-19T19:16:54)
````
For every `MangalReferenceTaxon`, we need to retrieve its number of `MangalNode`
-- let's see how many there are, using the shorthand `count` method for this:
````julia
count.(MangalNode, salmonids)
````
````
5-element Array{Int64,1}:
12
1
1
5
1
````
Since none of these are very high, we can retrieve the nodes directly:
````julia
salmonids_nodes = vcat(nodes.(salmonids)...);
````
At this point, we may want to count the number of interactions for all of the
nodes, but let us (for the sake of simplicity) trust that there are fewer than
500 in all cases:
````julia
all_int = [Mangal.interactions(salmo, "count" => 500) for salmo in salmonids_nodes];
salmonids_interactions = vcat(all_int...);
````
At this point, we end up with a `Vector{MangalInteraction}`, *i.e.* an array of
interactions.
An interesting consequence of this approach is that we now can look at all the
datasets that are part of this query, and *e.g.* retrieve their DOI:
````julia
salmonids_datasets = unique([i.network.dataset for i in salmonids_interactions])
[d.reference.doi for d in salmonids_datasets]
````
````
9-element Array{Union{Missing, String},1}:
missing
"10.1126/science.257.5073.1107"
"10.2307/1604"
missing
"10.1080/00288330.2004.9517265"
"10.1016/j.ecolmodel.2010.10.024"
missing
"10.1016/j.pocean.2012.02.002"
"10.2307/1599"
````
Finally, the array of interactions can be converted into a `UnipartiteNetwork`:
````julia
salmonid_network = convert(UnipartiteNetwork, salmonids_interactions)
````
````
408×408 unipartite ecological network (Bool, MangalNode) (L: 406)
````
Get the taxa
````julia
salmonid_resolved_network = taxonize(salmonid_network)
````
````
59×59 unipartite ecological network (Bool, MangalReferenceTaxon) (L: 57)
````
show interactions
````julia
for i in salmonid_resolved_network
println("$(i.from.name) -> $(i.to.name)")
end
````
````
Salmo gairdneri -> Salvelinus fontinalis
Salmo gairdneri -> Semotilus atromaculatus
Salmo gairdneri -> Catostomus commersoni
Salmo gairdneri -> Rhinichthys atratulus
Salmo trutta -> Diporeia
Salmo trutta -> Benthos
Salmo trutta -> Mysis
Salmonidae -> Osmeridae
Salmo trutta -> Alosa pseudoharengus
Salmo trutta -> Cottoidea
Salvelinus fontinalis -> Salmo gairdneri
Salmo gairdneri -> Salmo gairdneri
Salmo gairdneri -> Semotilus corporalis
Salmo gairdneri -> Notropis cornutus
Salmoninae -> Mugilidae
Salmonidae -> Salmonidae
Oncorhynchus kisutch -> Salmonidae
Sebastes -> Salmonidae
Cephalopoda -> Salmonidae
Oncorhynchus tshawytscha -> Salmonidae
Selachimorpha -> Salmonidae
Trachurus symmetricus -> Salmonidae
Squalus -> Salmonidae
Phycidae -> Salmonidae
Anoplopoma fimbria -> Salmonidae
Flatfish -> Salmonidae
Ardenna grisea -> Salmonidae
Uria aalge -> Salmonidae
Laridae -> Salmonidae
Alcidae -> Salmonidae
Aves -> Salmonidae
Phocidae -> Salmonidae
Procellariiformes -> Salmonidae
Mysticeti -> Salmonidae
Odontoceti -> Salmonidae
Hexagrammidae -> Salmonidae
Metacarcinus magister -> Salmonidae
Salmonidae -> Copepods
Salmonidae -> Amphipoda
Salmonidae -> Medusozoa
Salmonidae -> Pacifica
Salmonidae -> Spinifera
Salmonidae -> Clupeidae
Salmonidae -> Engraulidae
Salmonidae -> Mysida
Salmonidae -> Isopoda
Salmo salar -> Baetis
Salmo salar -> Chironomidae
Salmo salar -> Leuctra
Salmo salar -> Diatoms
Salmo salar -> Desmidiales
Salmo salar -> Algae
Salmo salar -> Ephemeroptera
Salmo salar -> Chimarra marginata
Salmo salar -> Protozoa
Salmo salar -> Micronecta poweri
Salmo salar -> Collembola
````
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 2633 | # Introduction to `Mangal.jl`
The goal of this vignette is to explain the core design principles of the
**Mangal** package. Other vignettes present more realistic use-cases.
```julia
using Mangal
```
## Database status
| Object | Total number |
| --------------- | -------------------------------:|
| Datasets | `j count(MangalDataset)` |
| Networks | `j count(MangalNetwork)` |
| Interactions | `j count(MangalInteraction)` |
| Nodes | `j count(MangalNode)` |
| Reference taxon | `j count(MangalReferenceTaxon)` |
## Types
The package exposes resources from the <mangal.io> database in a series of
types, whose fields are all documented in the manual. Every object in the
`mangal.io` hierarchy is represented by its type:
| object | type | definition |
| ------------ | ---------------------- | ---------------------------------------------------- |
| dataset | `MangalDataset` | description of a dataset, and references |
| network | `MangalNetwork` | finer description of a network, including positions |
| interactions | `MangalInteraction` | taxonomic entities involved and metadata |
| node | `MangalNode` | description of the node in the original dataset |
| backbone | `MangalReferenceTaxon` | actual taxonomic entity reconciled against backbones |
| reference | `MangalReference` | bibliographic information |
| attribute | `MangalAttribute` | key/value (used *e.g.* for interaction strength) |
## A note on speed
The package is designed to facilitate the user experience: as objects are nested
within one another, we will retrieve the full hierarchy instead of just pointing
you to the `id` of the parent/childrens. For example, this means than querying a
`MangalInteraction` will return not only the interaction itself, but also the
`MangalNode` for each species involved and their `MangalReferenceTaxon`. This
results in a larger number of queries, *i.e.* you initially wait longer to get
your data.
One clear advantage is that the data that are returned are complete, and so can
be used directly. Note also that the package uses a caching mechanism to speed
up this process; in short, a `MangalNode` or `MangalReferenceTaxon` are only
queried one, and then read from cache when they are next part of an interaction.
## Queries
*Almost* all functions in the package accept query arguments, which are simply
given as a series of pairs.
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.5.0 | df14d6e68cabd69ca97c9b4cb424501370eb4197 | docs | 2533 | # Introduction to `Mangal.jl`
The goal of this vignette is to explain the core design principles of the
**Mangal** package. Other vignettes present more realistic use-cases.
````julia
using Mangal
````
## Database status
| Object | Total number |
| --------------- | -------------------------------:|
| Datasets | 172 |
| Networks | 1386 |
| Interactions | 128331 |
| Nodes | 28408 |
| Reference taxon | 6874 |
## Types
The package exposes resources from the <mangal.io> database in a series of
types, whose fields are all documented in the manual. Every object in the
`mangal.io` hierarchy is represented by its type:
| object | type | definition |
| ------------ | ---------------------- | ---------------------------------------------------- |
| dataset | `MangalDataset` | description of a dataset, and references |
| network | `MangalNetwork` | finer description of a network, including positions |
| interactions | `MangalInteraction` | taxonomic entities involved and metadata |
| node | `MangalNode` | description of the node in the original dataset |
| backbone | `MangalReferenceTaxon` | actual taxonomic entity reconciled against backbones |
| reference | `MangalReference` | bibliographic information |
| attribute | `MangalAttribute` | key/value (used *e.g.* for interaction strength) |
## A note on speed
The package is designed to facilitate the user experience: as objects are nested
within one another, we will retrieve the full hierarchy instead of just pointing
you to the `id` of the parent/childrens. For example, this means than querying a
`MangalInteraction` will return not only the interaction itself, but also the
`MangalNode` for each species involved and their `MangalReferenceTaxon`. This
results in a larger number of queries, *i.e.* you initially wait longer to get
your data.
One clear advantage is that the data that are returned are complete, and so can
be used directly. Note also that the package uses a caching mechanism to speed
up this process; in short, a `MangalNode` or `MangalReferenceTaxon` are only
queried one, and then read from cache when they are next part of an interaction.
## Queries
*Almost* all functions in the package accept query arguments, which are simply
given as a series of pairs.
| Mangal | https://github.com/PoisotLab/Mangal.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 459 | using Documenter, Elfel
makedocs(
modules = [Elfel],
doctest = false, clean = true,
format = Documenter.HTML(prettyurls = false),
authors = "Petr Krysl",
sitename = "Elfel.jl",
pages = Any[
"Home" => "index.md",
"Tutorials" => "tutorials/tutorials.md",
"How to guide" => "guide/guide.md",
"Reference" => "man/reference.md",
"Concepts" => "concepts/concepts.md"
],
)
deploydocs(
repo = "github.com/PetrKryslUCSD/Elfel.jl.git",
)
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 253 | using Literate
for t in readdir(".")
if occursin(r"tut_.*.jl", t)
println("\nTutorial $t in $(pwd())\n")
Literate.markdown(t, "."; documenter=false);
# Literate.notebook(t, "."; execute=false, documenter=false);
end
end
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 9394 | # # Solve the heat conduction equation
# Synopsis: Compute the solution of the Poisson equation of heat conduction with a
# nonzero heat source. Quadrilateral four-node elements are used.
# The problem is linear heat conduction equation posed on a bi-unit square,
# solved with Dirichlet boundary conditions around the circumference. Uniform
# nonzero heat generation rate is present. The exact solution is in this way
# manufactured and hence known. That gives us an opportunity to calculate the
# true error.
# The complete code is in the file [`tut_poisson_q4.jl`](tut_poisson_q4.jl).
# The solution will be defined within a module in order to eliminate conflicts
# with data or functions defined elsewhere.
module tut_poisson_q4
# We'll need some functionality from linear algebra, and the mesh libraries.
# Finally we will need the `Elfel` functionality.
using LinearAlgebra
using MeshCore.Exports
using MeshSteward.Exports
using Elfel.Exports
# This is the top level function.
function run()
# Input parameters:
A = 1.0 # length of the side of the square
kappa = 1.0; # thermal conductivity of the material
Q = -6.0; # internal heat generation rate
tempf(x, y) =(1.0 + x^2 + 2.0 * y^2); # the exact distribution of temperature
N = 1000; # number of element edges along the sides of the square domain
# Generate the computational mesh.
mesh = genmesh(A, N)
# Create the finite element space to represent the temperature solution. The
# degrees of freedom are real numbers (`Float64`), the quadrilaterals are
# defined by the mesh, and each of the elements has the continuity ``H
# ^1``, i. e. both the function values and the derivatives are square
# integrable.
Uh = FESpace(Float64, mesh, FEH1_Q4())
# Apply the essential boundary conditions at the circumference of the square
# domain. We find the boundary incidence relation (`boundary(mesh)`), and
# then the list of all vertices connected by the boundary cells. The
# function `tempf` defines the analytical temperature variation, and hence
# for each of the vertices `i` on the boundary (they are of manifold
# dimension `0`), we set the component of the field (1) to the exact value
# of the temperature at that location.
vl = connectedv(boundary(mesh));
locs = geometry(mesh)
for i in vl
setebc!(Uh, 0, i, 1, tempf(locs[i]...))
end
# Number the degrees of freedom, both the unknowns and the data
# (prescribed) degrees of freedom.
numberdofs!(Uh)
@show ndofs(Uh), nunknowns(Uh)
# Assemble the conductivity matrix and the vector of the heat loads. Refer
# to the definition of this function below.
K, F = assembleKF(Uh, kappa, Q)
# This is a vector to hold all degrees of freedom in the system.
T = fill(0.0, ndofs(Uh))
# Here we collect the data degrees of freedom (the known values).
gathersysvec!(T, Uh)
# The system of linear algebraic equations is solved.
solve!(T, K, F, nunknowns(Uh))
# The values of all the degrees of freedom can now be introduced into the
# finite element space.
scattersysvec!(Uh, T)
# Here we associate the values of the finite element space with the entities
# of the mesh as an attribute.
makeattribute(Uh, "T", 1)
# The correctness of the solution is checked by comparing the values at the
# vertices.
checkcorrectness(Uh, tempf)
# The attribute can now be written out for visualization into a VTK file.
vtkwrite("q4-T", baseincrel(mesh), [(name = "T",)])
true # return success
end
# The domain is a square, meshed with quadrilateral elements. The function
# `Q4block` creates an incidence relation that defines the quadrilateral
# element shapes by the vertices connected into the shapes. This incidence
# relation is then attached to the mesh and the mesh is returned.
function genmesh(A, N)
conn = Q4block(A, A, N, N)
return attach!(Mesh(), conn)
end
# The `assembleKF` function constructs the left-hand side coefficient matrix,
# conductivity matrix, as a sparse matrix, and a vector of the heat loads due
# to the internal heat generation rate `Q`.
# The boundary value problem is expressed in this weak form
# ```math
# \int_{V}(\mathrm{grad}\vartheta)\; \kappa (\mathrm{grad}T
# )^T\; \mathrm{d} V
# -\int_{V} \vartheta Q \; \mathrm{d} V
# = 0
# ```
# where the test function vanishes on the boundary where the temperature is
# prescribed, ``\vartheta(x) =0`` for ``x \in{S_1}``
# Substituting ``\vartheta = N_j `` and ``T = \sum_i N_i T_i`` we obtain the
# linear algebraic equations
# ```math
# \sum_i T_i \int_{V} \mathrm{grad}N_j \; \kappa (\mathrm{grad}N_i)^T\; \mathrm{d} V
# -\int_{V} N_j Q \; \mathrm{d} V = 0 , \quad \forall j.
# ```
# The volume element is ``\mathrm{d} V``, which in our case
# becomes ``1.0\times\mathrm{d} S``, since the thickness of the two
# dimensional domain is assumed to be 1.0.
function assembleKF(Uh, kappa, Q)
# At the top of the `assembleKF` we look at the function `integrate!` to
# evaluate the weak-form integrals. The key to making this calculation
# efficient is type stability. All the arguments coming in must have
# concrete types. This is why the `integrate!` function is an inner
# function: the function barrier allows for all arguments to be resolved to
# concrete types.
function integrate!(am, av, elit, qpit, kappa, Q)
nedof = ndofsperel(elit)
# The local assemblers are just like matrices or vectors
ke = LocalMatrixAssembler(nedof, nedof, 0.0)
fe = LocalVectorAssembler(nedof, 0.0)
for el in elit # Loop over all elements
init!(ke, eldofs(el), eldofs(el)) # zero out elementwise matrix
init!(fe, eldofs(el)) # and vector
for qp in qpit # Now loop over the quadrature points
Jac, J = jacjac(el, qp) # Calculate the Jacobian matrix, Jacobian
gradN = bfungrad(qp, Jac) # Evaluate the spatial gradients
JxW = J * weight(qp) # elementary volume
N = bfun(qp) # Basis function values at the quadrature point
# This double loop evaluates the elementwise conductivity
# matrix and the heat load vector precisely as the formula of
# the weak form dictates; see above.
for i in 1:nedof
for j in 1:nedof
ke[j, i] += dot(gradN[j], gradN[i]) * (kappa * JxW)
end
fe[j] += N[j] * Q * JxW
end
end
# Assemble the calculated contributions from this element
assemble!(am, ke)
assemble!(av, fe)
end
return am, av # Return the updated assemblers
end
# In the `assembleKF` function we first we create the element iterator. We
# can go through all the elements that define the domain of integration
# using this iterator. Each time a new element is accessed, some data are
# precomputed such as the element degrees of freedom.
elit = FEIterator(Uh)
# This is the quadrature point iterator. We know that the elements are
# quadrilateral, which makes the Gauss integration rule the obvious choice.
# We also select order 2 for accuracy. Quadrature-point iterators provide
# access to basis function values and gradients, the Jacobian matrix and
# the Jacobian determinant, the location of the quadrature point and so
# on.
qpit = QPIterator(Uh, (kind = :Gauss, order = 2))
# Next we create assemblers, one for the sparse system matrix and one for
# the system vector.
am = start!(SysmatAssemblerSparse(0.0), ndofs(Uh), ndofs(Uh))
av = start!(SysvecAssembler(0.0), ndofs(Uh))
# Now we call the integration function. The assemblers are modified inside
# this function...
@time integrate!(am, av, elit, qpit, kappa, Q)
# ...so that when the integration is done, we can materialize the sparse
# matrix and the vector and return them.
return finish!(am), finish!(av)
end
# The linear algebraic system is solved by partitioning. The vector `T` is
# initially all zero, except in the degrees of freedom which are prescribed as
# nonzero. Therefore the product of the conductivity matrix and the vector `T`
# are the heat loads due to nonzero essential boundary conditions. To this we
# add the vector of heat loads due to the internal heat generation rate. The
# submatrix of the heat conduction matrix corresponding to the free degrees of
# freedom (unknowns), `K[1:nu, 1:nu]` is then used to solve for the unknowns `T
# [1:nu]`.
function solve!(T, K, F, nu)
@time KT = K * T
@time T[1:nu] = K[1:nu, 1:nu] \ (F[1:nu] - KT[1:nu])
end
# The correctness can be checked in various ways. Here we calculate the mean
# deviation of the calculated temperatures at the nodes relative to the exact
# values of the temperature.
function checkcorrectness(Uh, tempf)
geom = geometry(Uh.mesh)
ir = baseincrel(Uh.mesh)
T = attribute(ir.right, "T")
std = 0.0
for i in 1:length(T)
std += abs(T[i][1] - tempf(geom[i]...))
end
@show (std / length(T)) <= 1.0e-9
end
end # module
# The module can now be used.
using .tut_poisson_q4
tut_poisson_q4.run()
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 17286 | # # Solve the Stokes equation of colliding flow: Hood-Taylor, general formulation
# Synopsis: Compute the solution of the Stokes equation of two-dimensional
# incompressible viscous flow for a manufactured problem of colliding flow.
# Hood-Taylor triangular elements are used.
# The "manufactured" colliding flow example from Elman et al 2014. The
# Hood-Taylor formulation with quadratic triangles for the velocity and
# continuous pressure on linear triangles.
# The pressure is shown here with contours, and the velocities visualized with
# arrows at random points.
# 
# The formulation is the general elasticity-like scheme with
# strain-rate/velocity matrices. It can be manipulated into the one derived
# in Reddy, Introduction to the finite element method, 1993. Page 486 ff.
# The complete code is in the file [`tut_stokes_ht_p2_p1_gen.jl`](tut_stokes_ht_p2_p1_gen.jl).
# The solution will be defined within a module in order to eliminate conflicts
# with data or functions defined elsewhere.
module tut_stokes_ht_p2_p1_gen
# We'll need some functionality from linear algebra, static arrays, and the mesh
# libraries. Some plotting will be produced to visualize structure of the
# stiffness matrix. Finally we will need the `Elfel` functionality.
using LinearAlgebra
using StaticArrays
using MeshCore.Exports
using MeshSteward.Exports
using Elfel.Exports
using UnicodePlots
# The boundary value problem is expressed in this weak form
# ```math
# \int_{V}{\underline{\varepsilon}}(\underline{\delta v})^T\;
# \underline{\underline{D}}\; {\underline{\varepsilon}}(\underline{u})\; \mathrm{d} V
# - \int_{V} \mathrm{div}(\underline{\delta v})\; p\; \mathrm{d} V = 0,\quad \forall \underline{\delta v}
# ```
# ```math
# - \int_{V} \delta q\; \mathrm{div}(\underline{u}) \; \mathrm{d} V = 0,\quad \forall \delta q
# ```
# Here ``\underline{\delta v}`` are the test functions in the velocity space,
# and ``\delta q`` are the pressure test functions. Further ``\underline
# {u}`` is the trial velocity, and ``p`` is the trial pressure.
function run()
mu = 1.0 # dynamic viscosity
# This is the material-property matrix ``\underline{\underline{D}}``:
D = SMatrix{3, 3}(
[2*mu 0 0
0 2*mu 0
0 0 mu])
A = 1.0 # half of the length of the side of the square
N = 100 # number of element edges per side of the square
# These three functions define the true velocity components and the true
# pressure.
trueux = (x, y) -> 20 * x * y ^ 3
trueuy = (x, y) -> 5 * x ^ 4 - 5 * y ^ 4
truep = (x, y) -> 60 * x ^ 2 * y - 20 * y ^ 3
# Construct the two meshes for the mixed method. They need to support the
# velocity and pressure spaces.
vmesh, pmesh = genmesh(A, N)
# Construct the velocity space: it is a vector space with two components. The
# degrees of freedom are real numbers (`Float64`). The velocity mesh
# carries the finite elements of the continuity ``H ^1``, i. e. both the
# function values and the derivatives are square integrable. Each node
# carries 2 degrees of freedom, hence there are two velocity components per
# node.
Uh = FESpace(Float64, vmesh, FEH1_T6(), 2)
# Now we apply the boundary conditions at the nodes around the
# circumference.
locs = geometry(vmesh)
# We use searching based on the presence of the node within a box. The
# entire boundary will be captured within these four boxes, provided we
# inflate those boxes with a little tolerance (we can't rely on those
# nodes to be precisely at the coordinates given, we need to introduce some
# tolerance).
boxes = [[-A A -A -A], [-A -A -A A], [A A -A A], [-A A A A]]
inflate = A / N / 100
for box in boxes
vl = vselect(locs; box = box, inflate = inflate)
for i in vl
# Remember that all components of the velocity are known at the
# boundary.
setebc!(Uh, 0, i, 1, trueux(locs[i]...))
setebc!(Uh, 0, i, 2, trueuy(locs[i]...))
end
end
# No we construct the pressure space. It is a continuous, piecewise linear
# space supported on a mesh of three-node triangles.
Ph = FESpace(Float64, pmesh, FEH1_T3(), 1)
# The pressure in this "enclosed" flow example is only known up to a constant.
# By setting pressure degree of freedom at one node will make the solution
# unique.
atcenter = vselect(geometry(pmesh); nearestto = [0.0, 0.0])
setebc!(Ph, 0, atcenter[1], 1, 0.0)
# Number the degrees of freedom. First all the free degrees of freedom are
# numbered, both velocities and pressures. Next all the data degrees of
# freedom are numbered, again both for the velocities and for the
# pressures.
numberdofs!([Uh, Ph])
# The total number of degrees of freedom is now calculated.
tndof = ndofs(Uh) + ndofs(Ph)
# As is the total number of unknowns.
tnunk = nunknowns(Uh) + nunknowns(Ph)
# Assemble the coefficient matrix.
K = assembleK(Uh, Ph, tndof, D)
# Display the structure of the indefinite stiffness matrix. Note that this
# is the complete matrix, including rows and columns for all the degrees of
# freedom, unknown and known.
p = spy(K, canvas = DotCanvas)
display(p)
# Solve the linear algebraic system. First construct system vector of all
# the degrees of freedom, in the first `tnunk` rows that corresponds to the
# unknowns, and the subsequent rows are for the data degrees of freedom.
U = fill(0.0, tndof)
gathersysvec!(U, [Uh, Ph])
# Note that the vector `U` consists of nonzero numbers in rows are for the
# data degrees of freedom. Multiplying the stiffness matrix with this
# vector will generate a load vector on the right-hand side. Otherwise
# there is no loading, hence the vector `F` consists of all zeros.
F = fill(0.0, tndof)
solve!(U, K, F, tnunk)
# Once we have solved the system of linear equations, we can distribute the
# solution from the vector `U` into the finite element spaces.
scattersysvec!([Uh, Ph], U)
# Given that the solution is manufactured, i. e. exactly known, we can
# calculate the true errors.
@show ep = evaluate_pressure_error(Ph, truep)
@show ev = evaluate_velocity_error(Uh, trueux, trueuy)
# Postprocessing. First we make attributes, scalar nodal attributes,
# associated with the meshes for the pressures and the velocity.
makeattribute(Ph, "p", 1)
makeattribute(Uh, "ux", 1)
makeattribute(Uh, "uy", 2)
# The pressure and the velocity components are then written out into two VTK
# files.
vtkwrite("tut_stokes_ht_p2_p1_gen-p", baseincrel(pmesh), [(name = "p",), ])
vtkwrite("tut_stokes_ht_p2_p1_gen-v", baseincrel(vmesh), [(name = "ux",), (name = "uy",)])
# The method converges very well, but, why not, here is the true pressure
# written out into a VTK file as well. We create a synthetic attribute by
# evaluating the true pressure at the locations of the nodes of the
# pressure mesh.
geom = geometry(Ph.mesh)
ir = baseincrel(Ph.mesh)
ir.right.attributes["pt"] = VecAttrib([truep(geom[i]...) for i in 1:length(geom)])
vtkwrite("tut_stokes_ht_p2_p1_gen-pt", baseincrel(pmesh), [(name = "pt",), ])
return true
end
function genmesh(A, N)
# Hood-Taylor pair of meshes is needed. The first mesh is for the
# velocities, composed of six-node triangles.
vmesh = attach!(Mesh(), T6block(2 * A, 2 * A, N, N), "velocity")
# Now translate so that the center of the square is at the origin of the
# coordinates.
ir = baseincrel(vmesh)
transform(ir, x -> x .- A)
# The second mesh is used for the pressures, and it is composed of
# three-node triangles such that the corner nodes are shared between the
# first and the second mesh.
pmesh = attach!(Mesh(), T6toT3(baseincrel(vmesh, "velocity")), "pressure")
# Return the pair of meshes
return vmesh, pmesh
end
function assembleK(Uh, Ph, tndof, D)
function integrate!(ass, elits, qpits, D)
# Consider the elementwise definition of the test strain rate, ``
# {\underline{\varepsilon}}(\underline{\delta v})``. It is calculated
# from the elementwise degrees of freedom and the associated basis
# functions as
# ```math
# {\underline{\varepsilon}}(\underline{\delta v}) =
# \sum_i{\delta V}_i {\underline{B}_{c(i)}(N_i)}
# ```
# where ``i = 1, \ldots, n_{du}``, and ``n_{du}`` is the number of
# velocity degrees of freedom per element, ``c(i)`` is the number of
# the component corresponding to the degree of freedom ``i``. This is
# either 1 when degree of freedom ``i`` is the ``x``-component of the
# velocity, 2 otherwise(for the ``y``-component of the velocity).
# Analogously for the trial strain rate.
# The strain-rate/velocity matrices are defined as
# ```math
# {\underline{B}_{1}(N_i)} =
# \left[\begin{array}{c}
# \partial{N_i}/\partial{x} \\
# 0 \\
# \partial{N_i}/\partial{y}
# \end{array}\right],
# ```
# and
# ```math
# {\underline{B}_{2}(N_i)} =
# \left[\begin{array}{c}
# 0 \\
# \partial{N_i}/\partial{y} \\
# \partial{N_i}/\partial{x}
# \end{array}\right].
# ```
# This tiny function evaluates the strain-rate/velocity matrices defined above
# from the gradient of a basis function and the given number of the
# component corresponding to the current degree of freedom.
B = (g, k) -> (k == 1 ?
SVector{3}((g[1], 0, g[2])) :
SVector{3}((0, g[2], g[1])))
# This array defines the components for the element degrees of freedom,
# as defined above as ``c(i)``.
c = edofcompnt(Uh)
# These are the totals of the velocity and pressure degrees of freedom
# per element.
n_du, n_dp = ndofsperel.((Uh, Ph))
# The local matrix assemblers are used as if they were ordinary
# elementwise dense matrices. Here they are defined.
kuu = LocalMatrixAssembler(n_du, n_du, 0.0)
kup = LocalMatrixAssembler(n_du, n_dp, 0.0)
for el in zip(elits...)
uel, pel = el
# The local matrix assemblers are initialized with zeros for the
# values, and with the element degree of freedom vectors to be used
# in the assembly. The assembler `kuu` is used for the velocity
# degrees of freedom, and the assembler `kup` collect the coupling
# coefficients between the velocity and the pressure. The function
# `eldofs` collects the global numbers of the degrees of freedom
# either for the velocity space, or for the pressure space
# (`eldofs(pel)`).
init!(kuu, eldofs(uel), eldofs(uel))
init!(kup, eldofs(uel), eldofs(pel))
for qp in zip(qpits...)
uqp, pqp = qp
# The integration is performed using the velocity quadrature points.
Jac, J = jacjac(uel, uqp)
JxW = J * weight(uqp)
gradNu = bfungrad(uqp, Jac) # gradients of the velocity basis functions
Np = bfun(pqp) # pressure basis functions
# This double loop corresponds precisely to the integrals of the
# weak form. This is the matrix in the upper left corner.
for i in 1:n_du
DBi = D * B(gradNu[i], c[i])
for j in 1:n_du
Bj = B(gradNu[j], c[j])
kuu[j, i] += dot(Bj, DBi) * (JxW)
end
end
# And this is the coupling matrix in the top right corner.
for i in 1:n_dp, j in 1:n_du
kup[j, i] += gradNu[j][c[j]] * (-JxW * Np[i])
end
end
# Assemble the matrices. The submatrix off the diagonal is assembled
# twice, once as itself, and once as its transpose.
assemble!(ass, kuu)
assemble!(ass, kup) # top right corner
assemble!(ass, transpose(kup)) # bottom left corner
end
return ass # return the updated assembler of the global matrix
end
# In the `assembleK` function we first we create the element iterators. We
# can go through all the elements, both in the velocity finite element
# space and in the pressure finite element space, that define the domain of
# integration using this iterator. Each time a new element is accessed,
# some data are precomputed such as the element degrees of freedom,
# components of the degree of freedom, etc. Note that we need to iterate
# two finite element spaces, hence we create a tuple of iterators.
elits = (FEIterator(Uh), FEIterator(Ph))
# These are the quadrature point iterators. We know that the elements are
# triangular. We choose the three-point rule, to capture the quadratic
# component in the velocity space. Quadrature-point iterators provide
# access to basis function values and gradients, the Jacobian matrix and
# the Jacobian determinant, the location of the quadrature point and so
# on. Note that we need to iterate the quadrature rules of
# two finite element spaces, hence we create a tuple of iterators.
qargs = (kind = :default, npts = 3,)
qpits = (QPIterator(Uh, qargs), QPIterator(Ph, qargs))
# The matrix will be assembled into this assembler. Which is initialized
# with the total number of degrees of freedom (dimension of the coefficient
# matrix before partitioning into unknowns and data degrees of freedom).
ass = SysmatAssemblerSparse(0.0)
start!(ass, tndof, tndof)
# The integration is carried out, and then...
integrate!(ass, elits, qpits, D)
# ...we materialize the sparse stiffness matrix and return it.
return finish!(ass)
end
# The linear algebraic system is solved by partitioning. The vector `U` is
# initially all zero, except in the degrees of freedom which are prescribed as
# nonzero. Therefore the product of the stiffness matrix and the vector `U`
# are the loads due to nonzero essential boundary conditions. The
# submatrix of the stiffness conduction matrix corresponding to the free degrees of
# freedom (unknowns), `K[1:nu, 1:nu]` is then used to solve for the unknowns `U
# [1:nu]`.
function solve!(U, K, F, nu)
KT = K * U
U[1:nu] = K[1:nu, 1:nu] \ (F[1:nu] - KT[1:nu])
end
# The function `evaluate_pressure_error` evaluates the true ``L^2`` error of
# the pressure. It does that by integrating the square of the difference
# between the approximate pressure and the true pressure, the true pressure
# being provided by the `truep` function.
function evaluate_pressure_error(Ph, truep)
function integrate!(elit, qpit, truep)
n_dp = ndofsperel(elit)
E = 0.0
for el in elit
dofvals = eldofvals(el)
for qp in qpit
Jac, J = jacjac(el, qp)
JxW = J * weight(qp)
Np = bfun(qp)
pt = truep(location(el, qp)...)
pa = 0.0
for j in 1:n_dp
pa += (dofvals[j] * Np[j])
end
E += (JxW) * (pa - pt)^2
end
end
return sqrt(E)
end
elit = FEIterator(Ph)
qargs = (kind = :default, npts = 3,)
qpit = QPIterator(Ph, qargs)
return integrate!(elit, qpit, truep)
end
# The function `evaluate_velocity_error` evaluates the true ``L^2`` error of
# the velocity. It does that by integrating the square of the difference
# between the approximate pressure and the true velocity, the true velocity
# being provided by the `trueux`, `trueuy` functions.
function evaluate_velocity_error(Uh, trueux, trueuy)
function integrate!(elit, qpit, trueux, trueuy)
n_du = ndofsperel(elit)
uedofcomp = edofcompnt(Uh)
E = 0.0
for el in elit
udofvals = eldofvals(el)
for qp in qpit
Jac, J = jacjac(el, qp)
JxW = J * weight(qp)
Nu = bfun(qp)
uxt = trueux(location(el, qp)...)
uyt = trueuy(location(el, qp)...)
uxa = 0.0
uya = 0.0
for j in 1:n_du
(uedofcomp[j] == 1) && (uxa += (udofvals[j] * Nu[j]))
(uedofcomp[j] == 2) && (uya += (udofvals[j] * Nu[j]))
end
E += (JxW) * ((uxa - uxt)^2 + (uya - uyt)^2)
end
end
return sqrt(E)
end
elit = FEIterator(Uh)
qargs = (kind = :default, npts = 3,)
qpit = QPIterator(Uh, qargs)
return integrate!(elit, qpit, trueux, trueuy)
end
end
# To run the example, evaluate this file which will compile the module
# `.tut_stokes_ht_p2_p1_gen`.
using .tut_stokes_ht_p2_p1_gen
tut_stokes_ht_p2_p1_gen.run()
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 17623 | # # Solve the Stokes equation of colliding flow: Reddy formulation
# Synopsis: Compute the solution of the Stokes equation of two-dimensional
# incompressible viscous flow for a manufactured problem of colliding flow.
# Hood-Taylor triangular elements are used.
# The "manufactured" colliding flow example from Elman et al 2014. The
# Hood-Taylor formulation with quadratic triangles for the velocity and
# continuous pressure on linear triangles.
# The pressure is shown here with contours, and the velocities visualized with
# arrows at random points.
# 
# The formulation is the one derived in Reddy, Introduction to the finite
# element method, 1993. Page 486 ff.
# The complete code is in the file [`tut_stokes_ht_p2_p1_reddy.jl`](tut_stokes_ht_p2_p1_reddy.jl).
# The solution will be defined within a module in order to eliminate conflicts
# with data or functions defined elsewhere.
module tut_stokes_ht_p2_p1_reddy
# We'll need some functionality from linear algebra, static arrays, and the mesh
# libraries. Some plotting will be produced to visualize structure of the
# stiffness matrix. Finally we will need the `Elfel` functionality.
using LinearAlgebra
using StaticArrays
using MeshCore.Exports
using MeshSteward.Exports
using Elfel.Exports
using UnicodePlots
# The boundary value problem is expressed in this weak form
# ```math
# \int_{\Omega} \left(2\mu\frac{\partial{w_x}}{\partial{x}}\frac{\partial{u_x}}{\partial{x}}
# +\mu\frac{\partial{w_x}}{\partial{y}}\left(\frac{\partial{u_x}}{\partial{y}}+\frac{\partial{u_y}}{\partial{x}}\right) - \frac{\partial{w_x}}{\partial{x}} p \right) d\Omega = 0
# ```
# ```math
# \int_{\Omega} \left(2\mu\frac{\partial{w_y}}{\partial{y}}\frac{\partial{u_y}}{\partial{y}}
# +\mu\frac{\partial{w_y}}{\partial{x}}\left(\frac{\partial{u_x}}{\partial{y}}+\frac{\partial{u_y}}{\partial{x}}\right) - \frac{\partial{w_y}}{\partial{y}} p \right) d\Omega = 0
# ```
# ```math
# -\int_{\Omega} q \left(\frac{\partial{u_x}}{\partial{x}} + \frac{\partial{u_y}}{\partial{y}}\right)d\Omega = 0
# ```
# Here ``w_x, w_y`` are the test functions in the velocity space,
# and ``q`` is the pressure test functions. Further ``u_x, u_y`` is the trial velocity, and ``p`` is the trial pressure.
function run()
mu = 1.0 # dynamic viscosity
A = 1.0 # half of the length of the side of the square
N = 100 # number of element edges per side of the square
# These three functions define the true velocity components and the true
# pressure.
trueux = (x, y) -> 20 * x * y ^ 3
trueuy = (x, y) -> 5 * x ^ 4 - 5 * y ^ 4
truep = (x, y) -> 60 * x ^ 2 * y - 20 * y ^ 3
# Construct the two meshes for the mixed method. They need to support the
# velocity and pressure spaces.
vmesh, pmesh = genmesh(A, N)
# Construct the velocity spaces: As an alternative to a previous treatment
# with a single vector space for the velocity, here we will use to vector
# spaces, one for each component of the velocity. The degrees of freedom
# are real numbers (`Float64`). The velocity mesh carries the finite
# elements of the continuity ``H ^1``, i. e. both the function values and
# the derivatives are square integrable. Each node carries just one degree of
# freedom (1).
Uxh = FESpace(Float64, vmesh, FEH1_T6(), 1)
Uyh = FESpace(Float64, vmesh, FEH1_T6(), 1)
# Now we apply the boundary conditions at the nodes around the
# circumference.
locs = geometry(vmesh)
# We use searching based on the presence of the node within a box. The
# entire boundary will be captured within these four boxes, provided we
# inflate those boxes with a little tolerance (we can't rely on those
# nodes to be precisely at the coordinates given, we need to introduce some
# tolerance).
boxes = [[-A A -A -A], [-A -A -A A], [A A -A A], [-A A A A]]
inflate = A / N / 100
for box in boxes
vl = vselect(locs; box = box, inflate = inflate)
for i in vl
# Remember that all components of the velocity are known at the
# boundary.
setebc!(Uxh, 0, i, 1, trueux(locs[i]...))
setebc!(Uyh, 0, i, 1, trueuy(locs[i]...))
end
end
# No we construct the pressure space. It is a continuous, piecewise linear
# space supported on a mesh of three-node triangles.
Ph = FESpace(Float64, pmesh, FEH1_T3(), 1)
# The pressure in this "enclosed" flow example is only known up to a constant.
# By setting pressure degree of freedom at one node will make the solution
# unique.
atcenter = vselect(geometry(pmesh); nearestto = [0.0, 0.0])
setebc!(Ph, 0, atcenter[1], 1, 0.0)
# Number the degrees of freedom. First all the free degrees of freedom are
# numbered, both velocities and pressures. Next all the data degrees of
# freedom are numbered, again both for the velocities and for the
# pressures.
numberdofs!([Uxh, Uyh, Ph])
# The total number of degrees of freedom is now calculated.
tndof = ndofs(Uxh) + ndofs(Uyh) + ndofs(Ph)
# As is the total number of unknowns.
tnunk = nunknowns(Uxh) + nunknowns(Uyh) + nunknowns(Ph)
# Assemble the coefficient matrix.
K = assembleK(Uxh, Uyh, Ph, tndof, mu)
# Display the structure of the indefinite stiffness matrix. Note that this
# is the complete matrix, including rows and columns for all the degrees of
# freedom, unknown and known.
p = spy(K, canvas = DotCanvas)
display(p)
# Solve the linear algebraic system. First construct system vector of all
# the degrees of freedom, in the first `tnunk` rows that corresponds to the
# unknowns, and the subsequent rows are for the data degrees of freedom.
U = fill(0.0, tndof)
gathersysvec!(U, [Uxh, Uyh, Ph])
# Note that the vector `U` consists of nonzero numbers in rows are for the
# data degrees of freedom. Multiplying the stiffness matrix with this
# vector will generate a load vector on the right-hand side. Otherwise
# there is no loading, hence the vector `F` consists of all zeros.
F = fill(0.0, tndof)
solve!(U, K, F, tnunk)
# Once we have solved the system of linear equations, we can distribute the
# solution from the vector `U` into the finite element spaces.
scattersysvec!([Uxh, Uyh, Ph], U)
# Given that the solution is manufactured, i. e. exactly known, we can
# calculate the true errors.
@show ep = evaluate_pressure_error(Ph, truep)
@show ev = evaluate_velocity_error(Uxh, Uyh, trueux, trueuy)
# Postprocessing. First we make attributes, scalar nodal attributes,
# associated with the meshes for the pressures and the velocity.
makeattribute(Ph, "p", 1)
makeattribute(Uxh, "ux", 1)
makeattribute(Uyh, "uy", 1)
# The pressure and the velocity components are then written out into two VTK
# files.
vtkwrite("tut_stokes_ht_p2_p1_reddy-p", baseincrel(pmesh), [(name = "p",), ])
vtkwrite("tut_stokes_ht_p2_p1_reddy-v", baseincrel(vmesh), [(name = "ux",), (name = "uy",)])
return true
end
function genmesh(A, N)
# Hood-Taylor pair of meshes is needed. The first mesh is for the
# velocities, composed of six-node triangles.
vmesh = attach!(Mesh(), T6block(2 * A, 2 * A, N, N), "velocity")
# Now translate so that the center of the square is at the origin of the
# coordinates.
ir = baseincrel(vmesh)
transform(ir, x -> x .- A)
# The second mesh is used for the pressures, and it is composed of
# three-node triangles such that the corner nodes are shared between the
# first and the second mesh.
pmesh = attach!(Mesh(), T6toT3(baseincrel(vmesh, "velocity")), "pressure")
# Return the pair of meshes
return vmesh, pmesh
end
function assembleK(Uxh, Uyh, Ph, tndof, mu)
# Here we demonstrate that the coefficient matrix, which is expected to have
# the structure
# ```math
# K = \left[
# \begin{array}{cc}
# A & B^T \\
# B & 0
# \end{array}\right]
# ```
# can be constructed in stages. Refer to the description below. The two
# functions below carry out the integration of two separate parts of the
# coefficient matrix.
function integrateApart!(ass, elits, qpits, mu)
uxnedof, uynedof, pnedof = ndofsperel.(elits)
kuxux = LocalMatrixAssembler(uxnedof, uxnedof, 0.0)
kuyuy = LocalMatrixAssembler(uynedof, uynedof, 0.0)
kuxuy = LocalMatrixAssembler(uxnedof, uynedof, 0.0)
for el in zip(elits...)
uxel, uyel, pel = el
init!(kuxux, eldofs(uxel), eldofs(uxel))
init!(kuyuy, eldofs(uyel), eldofs(uyel))
init!(kuxuy, eldofs(uxel), eldofs(uyel))
for qp in zip(qpits...)
# Step the quadrature point iterators in step: this assumes that
# in fact there is the same number of quadrature points in all
# the quadrature rules.
uxqp, uyqp, pqp = qp
Jac, J = jacjac(pel, pqp)
JxW = J * weight(pqp)
# Note that the gradients of the basis functions are not
# necessarily the same in those two velocity spaces. Hence we
# must grab the gradient from the correct space.
gradNux = bfungrad(uxqp, Jac)
gradNuy = bfungrad(uyqp, Jac)
for j in 1:uxnedof, i in 1:uxnedof
kuxux[i, j] += (mu * JxW) * (2 * gradNux[i][1] * gradNux[j][1] + gradNux[i][2] * gradNux[j][2])
end
for j in 1:uynedof, i in 1:uynedof
kuyuy[i, j] += (mu * JxW) * (gradNuy[i][1] * gradNuy[j][1] + 2 * gradNuy[i][2] * gradNuy[j][2])
end
for j in 1:uynedof, i in 1:uxnedof
kuxuy[i, j] += (mu * JxW) * (gradNux[i][1] * gradNuy[j][2])
end
end
assemble!(ass, kuxux)
assemble!(ass, kuxuy) # off-diagonal matrix needs to be assembled twice
assemble!(ass, transpose(kuxuy))
assemble!(ass, kuyuy)
end
return ass
end
function integrateBBTparts!(ass, elits, qpits)
uxnedof, uynedof, pnedof = ndofsperel.(elits)
kuxp = LocalMatrixAssembler(uxnedof, pnedof, 0.0)
kuyp = LocalMatrixAssembler(uynedof, pnedof, 0.0)
for el in zip(elits...)
# The iterators of the finite elements are stepped in unison.
uxel, uyel, pel = el
# Initialize the local matrix assembler with the global degree of
# freedom numbers, both for the velocity spaces and for the
# pressure space.
init!(kuxp, eldofs(uxel), eldofs(pel))
init!(kuyp, eldofs(uyel), eldofs(pel))
for qp in zip(qpits...)
# Step the quadrature point iterators in step: this assumes that
# in fact there is the same number of quadrature points in all
# the quadrature rules.
uxqp, uyqp, pqp = qp
Jac, J = jacjac(pel, pqp)
JxW = J * weight(pqp)
gradNux = bfungrad(uxqp, Jac)
gradNuy = bfungrad(uyqp, Jac)
Np = bfun(pqp)
for j in 1:pnedof, i in 1:uxnedof
kuxp[i, j] += (-JxW) * (gradNux[i][1] * Np[j])
end
for j in 1:pnedof, i in 1:uynedof
kuyp[i, j] += (-JxW) * (gradNuy[i][2] * Np[j])
end
end
assemble!(ass, kuxp)
assemble!(ass, transpose(kuxp))
assemble!(ass, kuyp)
assemble!(ass, transpose(kuyp))
end
return ass
end
# In the `assembleK` function we first we create the element iterators. We
# can go through all the elements, both in the velocity finite element
# space and in the pressure finite element space, that define the domain of
# integration using this iterator. Each time a new element is accessed,
# some data are precomputed such as the element degrees of freedom,
# components of the degree of freedom, etc. Note that we need to iterate
# two finite element spaces, hence we create a tuple of iterators.
elits = (FEIterator(Uxh), FEIterator(Uyh), FEIterator(Ph))
# These are the quadrature point iterators. We know that the elements are
# triangular. We choose the three-point rule, to capture the quadratic
# component in the velocity space. Quadrature-point iterators provide
# access to basis function values and gradients, the Jacobian matrix and
# the Jacobian determinant, the location of the quadrature point and so on.
# Note that we need to iterate the quadrature rules of three finite element
# spaces, hence we create a tuple of iterators. All of these quadrature
# point iterators refer to the "same" quadrature rule: the same number of
# quadrature points, the same weights, and so on. However, the services
# these quadrature points provide do depend on the finite element space as
# well, for instance they would typically have different basis functions.
qargs = (kind = :default, npts = 3,)
qpits = (QPIterator(Uxh, qargs), QPIterator(Uyh, qargs), QPIterator(Ph, qargs))
# The matrix will be assembled into this assembler. Which is initialized
# with the total number of degrees of freedom (dimension of the coefficient
# matrix before partitioning into unknowns and data degrees of freedom).
ass = SysmatAssemblerSparse(0.0)
start!(ass, tndof, tndof)
# First we calculate the "A" part, using the function below. It
# is "assembled" into the assembler, which means that after this function
# finishes, the assembler represents this intermediate matrix
# ```math
# K = \left[
# \begin{array}{cc}
# A & 0 \\
# 0 & 0
# \end{array}\right]
# ```
integrateApart!(ass, elits, qpits, mu)
# Then the "B-transpose(B)" part using this function is added to the
# assembler. When the function below finishes, the assembler represents the
# entire ``K`` matrix.
integrateBBTparts!(ass, elits, qpits)
# Finally, we materialize the sparse stiffness matrix from the assembler and
# return it.
return finish!(ass)
end
# The linear algebraic system is solved by partitioning. The vector `U` is
# initially all zero, except in the degrees of freedom which are prescribed as
# nonzero. Therefore the product of the stiffness matrix and the vector `U`
# are the loads due to nonzero essential boundary conditions. The
# submatrix of the stiffness conduction matrix corresponding to the free degrees of
# freedom (unknowns), `K[1:nu, 1:nu]` is then used to solve for the unknowns `U
# [1:nu]`.
function solve!(U, K, F, nu)
KT = K * U
U[1:nu] = K[1:nu, 1:nu] \ (F[1:nu] - KT[1:nu])
end
# The function `evaluate_pressure_error` evaluates the true ``L^2`` error of
# the pressure. It does that by integrating the square of the difference
# between the approximate pressure and the true pressure, the true pressure
# being provided by the `truep` function.
function evaluate_pressure_error(Ph, truep)
function integrate!(elit, qpit, truep)
n_dp = ndofsperel(elit)
E = 0.0
for el in elit
dofvals = eldofvals(el)
for qp in qpit
Jac, J = jacjac(el, qp)
JxW = J * weight(qp)
Np = bfun(qp)
pt = truep(location(el, qp)...)
pa = 0.0
for j in 1:n_dp
pa += (dofvals[j] * Np[j])
end
E += (JxW) * (pa - pt)^2
end
end
return sqrt(E)
end
elit = FEIterator(Ph)
qargs = (kind = :default, npts = 3,)
qpit = QPIterator(Ph, qargs)
return integrate!(elit, qpit, truep)
end
# The function `evaluate_velocity_error` evaluates the true ``L^2`` error of
# the velocity. It does that by integrating the square of the difference
# between the approximate pressure and the true velocity, the true velocity
# being provided by the `trueux`, `trueuy` functions.
function evaluate_velocity_error(Uxh, Uyh, trueux, trueuy)
function integrate!(elits, qpits, trueux, trueuy)
n_du, n_du = ndofsperel.(elits)
E = 0.0
for el in zip(elits...)
uxel, uyel = el
uxdofvals = eldofvals(uxel)
uydofvals = eldofvals(uyel)
for qp in zip(qpits...)
uxqp, uyqp = qp
Jac, J = jacjac(uxel, uxqp)
JxW = J * weight(uxqp)
Nu = bfun(uxqp)
uxt = trueux(location(uxel, uxqp)...)
uyt = trueuy(location(uxel, uxqp)...)
uxa = 0.0
uya = 0.0
for j in 1:n_du
uxa += (uxdofvals[j] * Nu[j])
uya += (uydofvals[j] * Nu[j])
end
E += (JxW) * ((uxa - uxt)^2 + (uya - uyt)^2)
end
end
return sqrt(E)
end
elits = (FEIterator(Uxh), FEIterator(Uyh),)
qargs = (kind = :default, npts = 3,)
qpits = (QPIterator(Uxh, qargs), QPIterator(Uyh, qargs),)
return integrate!(elits, qpits, trueux, trueuy)
end
end
# To run the example, evaluate this file which will compile the module
# `.tut_stokes_ht_p2_p1_reddy`.
using .tut_stokes_ht_p2_p1_reddy
tut_stokes_ht_p2_p1_reddy.run()
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
|
[
"MIT"
] | 0.4.0 | d8ab448f2d6c411159c36cc2283853f006ca180c | code | 15801 | # # Solve the Stokes equation of colliding flow: vector Laplacian version
# Synopsis: Compute the solution of the Stokes equation of two-dimensional
# incompressible viscous flow for a manufactured problem of colliding flow.
# Hood-Taylor triangular elements are used.
# The "manufactured" colliding flow example from Elman et al 2014. The
# Hood-Taylor formulation with quadratic triangles for the velocity and
# continuous pressure on linear triangles.
# The pressure is shown here with contours, and the velocities visualized with
# arrows at random points.
# 
# The formulation is the one derived in Donea, Huerta, Introduction to the
# finite element method, 1993. Page 486 ff, and Elman, et al., Finite elements
# and fast iterative solvers, p. 132. In brief, it is the vector
# Laplacian version.
# The complete code is in the file [`tut_stokes_ht_p2_p1_veclap.jl`](tut_stokes_ht_p2_p1_veclap.jl).
# The solution will be defined within a module in order to eliminate conflicts
# with data or functions defined elsewhere.
module tut_stokes_ht_p2_p1_veclap
# We'll need some functionality from linear algebra, static arrays, and the mesh
# libraries. Some plotting will be produced to visualize structure of the
# stiffness matrix. Finally we will need the `Elfel` functionality.
using LinearAlgebra
using StaticArrays
using MeshCore.Exports
using MeshSteward.Exports
using Elfel.Exports
using UnicodePlots
# The boundary value problem is expressed in this weak form
# ```math
# \int_{V}\mu {\underline{\nabla}}\;\underline{\delta v}\; :\;
# {\underline{\nabla}}\;\underline{u}\; \mathrm{d} V
# - \int_{V} \mathrm{div}(\underline{\delta v})\; p\; \mathrm{d} V = 0,\quad \forall \underline{\delta v}
# ```
# ```math
# - \int_{V} \delta q\; \mathrm{div}(\underline{u}) \; \mathrm{d} V = 0,\quad \forall \delta q
# ```
# Here ``\underline{\delta v}`` are the test functions in the velocity space,
# and ``\delta q`` are the pressure test functions. Further ``\underline
# {u}`` is the trial velocity, and ``p`` is the trial pressure. The operator
# ``:`` produces the componentwise scalar product of the gradients,
# ```math
# A\; :\;B = A_{ij}B_{ij}``.
# ```
# Then the first term may be rewritten as
# ```math
# {\underline{\nabla}}\;\underline{\delta v}\; :\;
# {\underline{\nabla}}\;\underline{u} =
# (\underline{\nabla}\delta v_x)^T \underline{\nabla}u_x +
# (\underline{\nabla}\delta v_y)^T \underline{\nabla}u_y
# ```
function run()
mu = 1.0 # dynamic viscosity
A = 1.0 # half of the length of the side of the square
N = 100 # number of element edges per side of the square
# These three functions define the true velocity components and the true
# pressure.
trueux = (x, y) -> 20 * x * y ^ 3
trueuy = (x, y) -> 5 * x ^ 4 - 5 * y ^ 4
truep = (x, y) -> 60 * x ^ 2 * y - 20 * y ^ 3
# Construct the two meshes for the mixed method. They need to support the
# velocity and pressure spaces.
vmesh, pmesh = genmesh(A, N)
# Construct the velocity space: it is a vector space with two components. The
# degrees of freedom are real numbers (`Float64`). The velocity mesh
# carries the finite elements of the continuity ``H ^1``, i. e. both the
# function values and the derivatives are square integrable. Each node
# carries 2 degrees of freedom, hence there are two velocity components per
# node.
Uh = FESpace(Float64, vmesh, FEH1_T6(), 2)
# Now we apply the boundary conditions at the nodes around the
# circumference.
locs = geometry(vmesh)
# We use searching based on the presence of the node within a box. The
# entire boundary will be captured within these four boxes, provided we
# inflate those boxes with a little tolerance (we can't rely on those
# nodes to be precisely at the coordinates given, we need to introduce some
# tolerance).
boxes = [[-A A -A -A], [-A -A -A A], [A A -A A], [-A A A A]]
inflate = A / N / 100
for box in boxes
vl = vselect(locs; box = box, inflate = inflate)
for i in vl
# Remember that all components of the velocity are known at the
# boundary.
setebc!(Uh, 0, i, 1, trueux(locs[i]...))
setebc!(Uh, 0, i, 2, trueuy(locs[i]...))
end
end
# No we construct the pressure space. It is a continuous, piecewise linear
# space supported on a mesh of three-node triangles.
Ph = FESpace(Float64, pmesh, FEH1_T3(), 1)
# The pressure in this "enclosed" flow example is only known up to a constant.
# By setting pressure degree of freedom at one node will make the solution
# unique.
atcenter = vselect(geometry(pmesh); nearestto = [0.0, 0.0])
setebc!(Ph, 0, atcenter[1], 1, 0.0)
# Number the degrees of freedom. First all the free degrees of freedom are
# numbered, both velocities and pressures. Next all the data degrees of
# freedom are numbered, again both for the velocities and for the
# pressures.
numberdofs!([Uh, Ph])
# The total number of degrees of freedom is now calculated.
tndof = ndofs(Uh) + ndofs(Ph)
# As is the total number of unknowns.
tnunk = nunknowns(Uh) + nunknowns(Ph)
# Assemble the coefficient matrix.
K = assembleK(Uh, Ph, tndof, mu)
# Display the structure of the indefinite stiffness matrix. Note that this
# is the complete matrix, including rows and columns for all the degrees of
# freedom, unknown and known.
p = spy(K, canvas = DotCanvas)
display(p)
# Solve the linear algebraic system. First construct system vector of all
# the degrees of freedom, in the first `tnunk` rows that corresponds to the
# unknowns, and the subsequent rows are for the data degrees of freedom.
U = fill(0.0, tndof)
gathersysvec!(U, [Uh, Ph])
# Note that the vector `U` consists of nonzero numbers in rows are for the
# data degrees of freedom. Multiplying the stiffness matrix with this
# vector will generate a load vector on the right-hand side. Otherwise
# there is no loading, hence the vector `F` consists of all zeros.
F = fill(0.0, tndof)
solve!(U, K, F, tnunk)
# Once we have solved the system of linear equations, we can distribute the
# solution from the vector `U` into the finite element spaces.
scattersysvec!([Uh, Ph], U)
# Given that the solution is manufactured, i. e. exactly known, we can
# calculate the true errors.
@show ep = evaluate_pressure_error(Ph, truep)
@show ev = evaluate_velocity_error(Uh, trueux, trueuy)
# Postprocessing. First we make attributes, scalar nodal attributes,
# associated with the meshes for the pressures and the velocity.
makeattribute(Ph, "p", 1)
makeattribute(Uh, "ux", 1)
makeattribute(Uh, "uy", 2)
# The pressure and the velocity components are then written out into two VTK
# files.
vtkwrite("tut_stokes_ht_p2_p1_veclap-p", baseincrel(pmesh), [(name = "p",), ])
vtkwrite("tut_stokes_ht_p2_p1_veclap-v", baseincrel(vmesh), [(name = "ux",), (name = "uy",)])
# The method converges very well, but, why not, here is the true pressure
# written out into a VTK file as well. We create a synthetic attribute by
# evaluating the true pressure at the locations of the nodes of the
# pressure mesh.
geom = geometry(Ph.mesh)
ir = baseincrel(Ph.mesh)
ir.right.attributes["pt"] = VecAttrib([truep(geom[i]...) for i in 1:length(geom)])
vtkwrite("tut_stokes_ht_p2_p1_veclap-pt", baseincrel(pmesh), [(name = "pt",), ])
return true
end
function genmesh(A, N)
# Hood-Taylor pair of meshes is needed. The first mesh is for the
# velocities, composed of six-node triangles.
vmesh = attach!(Mesh(), T6block(2 * A, 2 * A, N, N), "velocity")
# Now translate so that the center of the square is at the origin of the
# coordinates.
ir = baseincrel(vmesh)
transform(ir, x -> x .- A)
# The second mesh is used for the pressures, and it is composed of
# three-node triangles such that the corner nodes are shared between the
# first and the second mesh.
pmesh = attach!(Mesh(), T6toT3(baseincrel(vmesh, "velocity")), "pressure")
# Return the pair of meshes
return vmesh, pmesh
end
function assembleK(Uh, Ph, tndof, mu)
function integrate!(ass, elits, qpits, mu)
# This array defines the components for the element degrees of freedom:
# ``c(i)`` is 1 or 2.
c = edofcompnt(Uh)
# These are the totals of the velocity and pressure degrees of freedom
# per element.
n_du, n_dp = ndofsperel.((Uh, Ph))
# The local matrix assemblers are used as if they were ordinary
# elementwise dense matrices. Here they are defined.
kuu = LocalMatrixAssembler(n_du, n_du, 0.0)
kup = LocalMatrixAssembler(n_du, n_dp, 0.0)
for el in zip(elits...)
uel, pel = el
# The local matrix assemblers are initialized with zeros for the
# values, and with the element degree of freedom vectors to be used
# in the assembly. The assembler `kuu` is used for the velocity
# degrees of freedom, and the assembler `kup` collect the coupling
# coefficients between the velocity and the pressure. The function
# `eldofs` collects the global numbers of the degrees of freedom
# either for the velocity space, or for the pressure space
# (`eldofs(pel)`).
init!(kuu, eldofs(uel), eldofs(uel))
init!(kup, eldofs(uel), eldofs(pel))
for qp in zip(qpits...)
uqp, pqp = qp
# The integration is performed using the velocity quadrature points.
Jac, J = jacjac(uel, uqp)
JxW = J * weight(uqp)
gradNu = bfungrad(uqp, Jac) # gradients of the velocity basis functions
Np = bfun(pqp) # pressure basis functions
# This double loop corresponds precisely to the integrals of the
# weak form: dot product of the gradients of the basis
# functions. This is the matrix in the upper left corner.
for i in 1:n_du, j in 1:n_du
if c[i] == c[j]
kuu[j, i] += (mu * JxW) * (dot(gradNu[j], gradNu[i]))
end
end
# And this is the coupling matrix in the top right corner.
for i in 1:n_dp, j in 1:n_du
kup[j, i] += gradNu[j][c[j]] * (-JxW * Np[i])
end
end
# Assemble the matrices. The submatrix off the diagonal is assembled
# twice, once as itself, and once as its transpose.
assemble!(ass, kuu)
assemble!(ass, kup) # top right corner
assemble!(ass, transpose(kup)) # bottom left corner
end
return ass # return the updated assembler of the global matrix
end
# In the `assembleK` function we first we create the element iterators. We
# can go through all the elements, both in the velocity finite element
# space and in the pressure finite element space, that define the domain of
# integration using this iterator. Each time a new element is accessed,
# some data are precomputed such as the element degrees of freedom,
# components of the degree of freedom, etc. Note that we need to iterate
# two finite element spaces, hence we create a tuple of iterators.
elits = (FEIterator(Uh), FEIterator(Ph))
# These are the quadrature point iterators. We know that the elements are
# triangular. We choose the three-point rule, to capture the quadratic
# component in the velocity space. Quadrature-point iterators provide
# access to basis function values and gradients, the Jacobian matrix and
# the Jacobian determinant, the location of the quadrature point and so
# on. Note that we need to iterate the quadrature rules of
# two finite element spaces, hence we create a tuple of iterators.
qargs = (kind = :default, npts = 3,)
qpits = (QPIterator(Uh, qargs), QPIterator(Ph, qargs))
# The matrix will be assembled into this assembler. Which is initialized
# with the total number of degrees of freedom (dimension of the coefficient
# matrix before partitioning into unknowns and data degrees of freedom).
ass = SysmatAssemblerSparse(0.0)
start!(ass, tndof, tndof)
# The integration is carried out, and then...
integrate!(ass, elits, qpits, mu)
# ...we materialize the sparse stiffness matrix and return it.
return finish!(ass)
end
# The linear algebraic system is solved by partitioning. The vector `U` is
# initially all zero, except in the degrees of freedom which are prescribed as
# nonzero. Therefore the product of the stiffness matrix and the vector `U`
# are the loads due to nonzero essential boundary conditions. The
# submatrix of the stiffness conduction matrix corresponding to the free degrees of
# freedom (unknowns), `K[1:nu, 1:nu]` is then used to solve for the unknowns `U
# [1:nu]`.
function solve!(U, K, F, nu)
KT = K * U
U[1:nu] = K[1:nu, 1:nu] \ (F[1:nu] - KT[1:nu])
end
# The function `evaluate_pressure_error` evaluates the true ``L^2`` error of
# the pressure. It does that by integrating the square of the difference
# between the approximate pressure and the true pressure, the true pressure
# being provided by the `truep` function.
function evaluate_pressure_error(Ph, truep)
function integrate!(elit, qpit, truep)
n_dp = ndofsperel(elit)
E = 0.0
for el in elit
dofvals = eldofvals(el)
for qp in qpit
Jac, J = jacjac(el, qp)
JxW = J * weight(qp)
Np = bfun(qp)
pt = truep(location(el, qp)...)
pa = 0.0
for j in 1:n_dp
pa += (dofvals[j] * Np[j])
end
E += (JxW) * (pa - pt)^2
end
end
return sqrt(E)
end
elit = FEIterator(Ph)
qargs = (kind = :default, npts = 3,)
qpit = QPIterator(Ph, qargs)
return integrate!(elit, qpit, truep)
end
# The function `evaluate_velocity_error` evaluates the true ``L^2`` error of
# the velocity. It does that by integrating the square of the difference
# between the approximate pressure and the true velocity, the true velocity
# being provided by the `trueux`, `trueuy` functions.
function evaluate_velocity_error(Uh, trueux, trueuy)
function integrate!(elit, qpit, trueux, trueuy)
n_du = ndofsperel(elit)
uedofcomp = edofcompnt(Uh)
E = 0.0
for el in elit
udofvals = eldofvals(el)
for qp in qpit
Jac, J = jacjac(el, qp)
JxW = J * weight(qp)
Nu = bfun(qp)
uxt = trueux(location(el, qp)...)
uyt = trueuy(location(el, qp)...)
uxa = 0.0
uya = 0.0
for j in 1:n_du
(uedofcomp[j] == 1) && (uxa += (udofvals[j] * Nu[j]))
(uedofcomp[j] == 2) && (uya += (udofvals[j] * Nu[j]))
end
E += (JxW) * ((uxa - uxt)^2 + (uya - uyt)^2)
end
end
return sqrt(E)
end
elit = FEIterator(Uh)
qargs = (kind = :default, npts = 3,)
qpit = QPIterator(Uh, qargs)
return integrate!(elit, qpit, trueux, trueuy)
end
end
# To run the example, evaluate this file which will compile the module
# `.tut_stokes_ht_p2_p1_veclap`.
using .tut_stokes_ht_p2_p1_veclap
tut_stokes_ht_p2_p1_veclap.run()
| Elfel | https://github.com/PetrKryslUCSD/Elfel.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.