licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 0.1.1 | 98fea05b931563024ba7559a3d9e3cc498cf814a | code | 1253 | using SQLDataFrameTools
using Test
using DataFrames
using Dates
df = DataFrame([[1, 2, 3, 4, 5, 6]], ["a"])
sql_fn_stub(args...;kwargs...) = df
@testset "SQLDataFrameTools.jl" begin
@test SQLDataFrameTools.expired(SQLDataFrameTools.QueryCache("SELECT 1", ()->true, ".", :jdf), SQLDataFrameTools.Dates.now())
@test SQLDataFrameTools.expired(SQLDataFrameTools.QueryCache("SELECT 2", ()->true, ".", :jdf), SQLDataFrameTools.Dates.Day(1))
@test endswith(SQLDataFrameTools.cachepath("a", "hash", :jdf), "hash.jdf")
@test isa(SQLDataFrameTools.QueryCache("SELECT 3", ()->true, ".", :jdf), SQLDataFrameTools.QueryCache)
@test isa(SQLDataFrameTools.QueryCache("SELECT 4", ()->true, ".", :jdf, dictencode=false), SQLDataFrameTools.QueryCache)
@test isa(SQLDataFrameTools.QueryCache("SELECT 5", ()->true, ".", :jdf, subformat=:zip), SQLDataFrameTools.QueryCache)
@test isa(SQLDataFrameTools.QueryCache("SELECT 6", ()->true, ".", :jdf, dictencode=false, subformat=:zip), SQLDataFrameTools.QueryCache)
@test SQLDataFrameTools.df_cached(SQLDataFrameTools.QueryCache("UNUSED", sql_fn_stub, ".", :arrow), now()) == df
@test SQLDataFrameTools.df_cached(SQLDataFrameTools.QueryCache("UNUSED", sql_fn_stub, ".", :arrow), Day(30)) == df
end
| SQLDataFrameTools | https://github.com/lawless-m/SQLDataFrameTools.jl.git |
|
[
"MIT"
] | 0.1.1 | 98fea05b931563024ba7559a3d9e3cc498cf814a | docs | 394 | # SQLDataFrameTools.jl
Wrapper around DataFrameTools to cache SQL result sets, with expiration
[](https://github.com/lawless-m/SQLDataFrameTools.jl/actions/workflows/CI.yml?query=branch%3Amain)
[Documentation](https://lawless-m.github.io/SQLDataFrameTools.jl/dev/)
registering
| SQLDataFrameTools | https://github.com/lawless-m/SQLDataFrameTools.jl.git |
|
[
"MIT"
] | 0.1.1 | 98fea05b931563024ba7559a3d9e3cc498cf814a | docs | 223 | # SQLDataFrameTools.jl
Documentation for SQLDataFrameTools.jl
```@docs
QueryCache
```
```@docs
df_cached
```
```@docs
expired
```
```@docs
select_fn
```
```@docs
fetch_and_combine
```
```@docs
Dfetch_and_combine
```
| SQLDataFrameTools | https://github.com/lawless-m/SQLDataFrameTools.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 1569 | # Usage:
# julia benchmark/run_benchmarks.jl
using BenchmarkTools
using ImageContrastAdjustment
using ImageTransformations
using TestImages
using ImageCore
on_CI = haskey(ENV, "GITHUB_ACTIONS")
img = testimage("mandril_gray")
tst_sizes = on_CI ? (64, ) : (64, 128)
tst_types = (N0f8, Float32, Gray{N0f8}, Gray{Float32}, RGB{N0f8}, RGB{Float32})
const SUITE = BenchmarkGroup()
# TODO: add MidwayEqualization
alg_list = (( "LinearStretching", LinearStretching()),
( "AdaptiveEqualization", AdaptiveEqualization()),
( "ContrastStretching", ContrastStretching()),
( "Equalization", Equalization()),
( "GammaCorrection", GammaCorrection()),
( "Matching", Matching))
function add_algorithm_benchmark!(suite, img, alg_name, alg;
tst_sizes, tst_types)
haskey(suite, alg_name) || (suite[alg_name] = BenchmarkGroup())
for T in tst_types
haskey(suite[alg_name], T) || (suite[alg_name][T] = BenchmarkGroup())
for sz in tst_sizes
tst_img = imresize(T.(img), (sz, sz))
if alg === Matching
tst_alg = alg(targetimg = tst_img)
else
tst_alg = alg
end
suite[alg_name][T]["$szΓ$sz"] = @benchmarkable adjust_histogram($tst_img, $tst_alg)
end
end
end
for (alg_name, alg) in alg_list
add_algorithm_benchmark!(SUITE, img, alg_name, alg;
tst_sizes=tst_sizes,
tst_types=tst_types)
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 207 | # To run it locally, BenchmarkCI should be added to root project
using BenchmarkCI
on_CI = haskey(ENV, "GITHUB_ACTIONS")
BenchmarkCI.judge()
on_CI ? BenchmarkCI.postjudge() : BenchmarkCI.displayjudgement()
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 238 | using Documenter, ImageContrastAdjustment
makedocs(sitename="Documentation",
format = Documenter.HTML(prettyurls = get(ENV, "CI", nothing) == "true"))
deploydocs(repo = "github.com/JuliaImages/ImageContrastAdjustment.jl.git")
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 1296 | module ImageContrastAdjustment
using ImageCore
using ImageBase
using ImageTransformations: imresize
# Where possible we avoid a direct dependency to reduce the number of [compat] bounds
using ImageCore.MappedArrays
using Parameters: @with_kw # Same as Base.@kwdef but works on Julia 1.0
# TODO Relax this to all image color types
const GenericGrayImage = AbstractArray{<:Union{Number, AbstractGray}}
# TODO: port HistogramAdjustmentAPI to ImagesAPI
include("HistogramAdjustmentAPI/HistogramAdjustmentAPI.jl")
import .HistogramAdjustmentAPI: AbstractHistogramAdjustmentAlgorithm,
adjust_histogram, adjust_histogram!
include("build_histogram.jl")
include("algorithms/common.jl")
include("algorithms/adaptive_equalization.jl")
include("algorithms/equalization.jl")
include("algorithms/linear_stretching.jl")
include("algorithms/contrast_stretching.jl")
include("algorithms/gamma_correction.jl")
include("algorithms/matching.jl")
include("algorithms/midway_equalization.jl")
include("compat.jl")
export
# main types and functions
AdaptiveEqualization,
Equalization,
MidwayEqualization,
Matching,
GammaCorrection,
LinearStretching,
ContrastStretching,
build_histogram,
adjust_histogram,
adjust_histogram!
end # module
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 7743 | function partition_interval(nbins::Integer, minval::Real, maxval::Real)
return range(minval, step=(maxval - minval) / nbins, length=nbins)
end
function partition_interval(nbins::Integer, minval::AbstractGray, maxval::AbstractGray)
return partition_interval(nbins, gray(minval), gray(maxval))
end
"""
edges, counts = rebin(edges0, counts0, nbins, minval, maxval)
Re-bin a histogram (represented by `edges0` and `counts0`) to have `nbins` spanning
`minval` to `maxval`.
"""
function rebin(edges0, counts0, nbins, minval, maxval)
edges = partition_interval(nbins, minval, maxval)
counts = fill(0, 0:nbins)
o = Base.Order.Forward
for (i, e) in enumerate(edges0)
index = searchsortedlast(edges, e, o)
counts[index] += counts0[i]
end
counts[0] += counts0[0]
return edges, counts
end
"""
```
edges, count = build_histogram(img) # For 8-bit images only
edges, count = build_histogram(img, nbins)
edges, count = build_histogram(img, nbins; minval, maxval)
edges, count = build_histogram(img, edges)
```
Generates a histogram for the image over `nbins` spread between `[minval, maxval]`.
Color images are automatically converted to grayscale.
# Output
Returns `edges` which is a `AbstractRange` type that specifies how the interval
`[minval, maxval]` is divided into bins, and an array `count` which records the
concomitant bin frequencies. In particular, `count` has the following
properties:
* `count[0]` is the number satisfying `x < edges[1]`
* `count[i]` is the number of values `x` that satisfy `edges[i] <= x < edges[i+1]`
* `count[end]` is the number satisfying `x >= edges[end]`.
* `length(count) == length(edges)+1`.
# Details
One can consider a histogram as a piecewise-constant model of a probability
density function ``f`` [1]. Suppose that ``f`` has support on some interval ``I =
[a,b]``. Let ``m`` be an integer and ``a = a_1 < a_2 < \\ldots < a_m < a_{m+1} =
b`` a sequence of real numbers. Construct a sequence of intervals
```math
I_1 = [a_1,a_2], I_2 = (a_2, a_3], \\ldots, I_{m} = (a_m,a_{m+1}]
```
which partition ``I`` into subsets ``I_j`` ``(j = 1, \\ldots, m)`` on which
``f`` is constant. These subsets satisfy ``I_i \\cap I_j = \\emptyset, \\forall
i \\neq j``, and are commonly referred to as *bins*. Together they encompass the
entire range of data values such that ``\\sum_j |I_j | = | I |``. Each bin has
width ``w_j = |I_j| = a_{j+1} - a_j`` and height ``h_j`` which is the constant
probability density over the region of the bin. Integrating the constant
probability density over the width of the bin ``w_j`` yields a probability mass
of ``\\pi_j = h_j w_j`` for the bin.
For a sample ``x_1, x_2, \\ldots, x_N``, let
```math
n_j = \\sum_{n = 1}^{N}\\mathbf{1}_{(I_j)}(x_n),
\\quad \\text{where} \\quad
\\mathbf{1}_{(I_j)}(x) =
\\begin{cases}
1 & \\text{if} \\; x \\in I_j,\\\\
0 & \\text{otherwise},
\\end{cases},
```
represent the number of samples falling into the interval ``I_j``. An estimate
for the probability mass of the ``j``th bin is given by the relative frequency
``\\hat{\\pi} = \\frac{n_j}{N}``, and the histogram estimator of the probability
density function is defined as
```math
\\begin{aligned}
\\hat{f}_n(x) & = \\sum_{j = 1}^{m}\\frac{n_j}{Nw_j} \\mathbf{1}_{(I_j)}(x) \\\\
& = \\sum_{j = 1}^{m}\\frac{\\hat{\\pi}_j}{w_j} \\mathbf{1}_{(I_j)}(x) \\\\
& = \\sum_{j = 1}^{m}\\hat{h}_j \\mathbf{1}_{(I_j)}(x).
\\end{aligned}
```
The function ``\\hat{f}_n(x)`` is a genuine density estimator because ``\\hat{f}_n(x) \\ge 0`` and
```math
\\begin{aligned}
\\int_{-\\infty}^{\\infty}\\hat{f}_n(x) \\operatorname{d}x & = \\sum_{j=1}^{m} \\frac{n_j}{Nw_j} w_j \\\\
& = 1.
\\end{aligned}
```
# Options
Various options for the parameters of this function are described in more detail
below.
## Choices for `nbins`
You can specify the number of discrete bins for the histogram. When specifying
the number of bins consider the maximum number of graylevels that your image
type supports. For example, with an image of type `N0f8` there is a maximum
of 256 possible graylevels. Hence, if you request more than 256 bins for
that type of image you should expect to obtain zero counts for numerous bins.
## Choices for `minval`
You have the option to specify the lower bound of the interval over which the
histogram will be computed. If `minval` is not specified then the minimum
value present in the image is taken as the lower bound.
## Choices for `maxval`
You have the option to specify the upper bound of the interval over which the
histogram will be computed. If `maxval` is not specified then the maximum
value present in the image is taken as the upper bound.
## Choices for `edges`
If you do not designate the number of bins, nor the lower or upper bound of the
interval, then you have the option to directly stipulate how the intervals will
be divided by specifying a `AbstractRange` type.
# Example
Compute the histogram of a grayscale image.
```julia
using TestImages, FileIO, ImageView
img = testimage("mandril_gray");
edges, counts = build_histogram(img, 256, minval = 0, maxval = 1)
```
Given a color image, compute the histogram of the red channel.
```julia
img = testimage("mandrill")
r = red.(img)
edges, counts = build_histogram(r, 256, minval = 0, maxval = 1)
```
# References
[1] E. Herrholz, "Parsimonious Histograms," Ph.D. dissertation, Inst. of Math. and Comp. Sci., University of Greifswald, Greifswald, Germany, 2011.
"""
function build_histogram(img::GenericGrayImage, nbins::Integer = 256;
minval::Union{Real,AbstractGray}=minimum_finite(img),
maxval::Union{Real,AbstractGray}=maximum_finite(img))
edges = partition_interval(nbins, minval, maxval)
build_histogram(img, edges)
end
function build_histogram(img::AbstractArray{C}, nbins::Integer = 256; kwargs...) where C<:Color
build_histogram(mappedarray(Gray{eltype(C)}, img), nbins; kwargs...)
end
# Performance optimizations for build_histogram(img, nbins)
function build_histogram(img::AbstractArray{T}) where T<:Union{N0f8, AbstractGray{N0f8}}
edges = range(N0f8(0), step=eps(N0f8), length=256)
counts = fill(0, 0:256)
@simd for v in img
@inbounds counts[reinterpret(gray(v))+1] += 1
end
return edges, counts
end
build_histogram(img::AbstractArray{C}) where C<:Color{N0f8} =
build_histogram(mappedarray(Gray{N0f8}, img))
function build_histogram(img::AbstractArray{T}, nbins::Integer;
minval::Union{Real,AbstractGray}=minimum_finite(img),
maxval::Union{Real,AbstractGray}=maximum_finite(img)) where T<:Union{N0f8, AbstractGray{N0f8}}
edgesraw, countsraw = build_histogram(img)
return rebin(edgesraw, countsraw, nbins, minval, maxval)
end
# build_histogram(img, edges)
function build_histogram(img::AbstractArray{C}, edges::AbstractRange) where C<:Color
build_histogram(mappedarray(Gray{eltype(C)}, img), edges)
end
function build_histogram(img::GenericGrayImage, edges::AbstractRange)
Base.has_offset_axes(edges) && throw(ArgumentError("edges must be indexed starting with 1"))
lb = first(axes(edges,1))-1
ub = last(axes(edges,1))
first_edge, last_edge = first(edges), last(edges)
inv_step_size = 1/step(edges)
counts = fill(0, lb:ub)
@inbounds for val in img
if isnan(val)
continue
else
if val >= last_edge
counts[ub] += 1
elseif val < first_edge
counts[lb] += 1
else
index = floor(Int, gray((val-first_edge)*inv_step_size)) + 1
counts[index] += 1
end
end
end
edges, counts
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 143 | # https://github.com/JuliaLang/julia/pull/29679
if VERSION < v"1.1.0-DEV.472"
isnothing(::Any) = false
isnothing(::Nothing) = true
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 789 | # This is a temporary module to validate `AbstractImageFilter` idea
# proposed in https://github.com/JuliaImages/ImagesAPI.jl/pull/3
module HistogramAdjustmentAPI
using ImageCore # ColorTypes is sufficient
# TODO Relax this to all image color types
using ..ImageContrastAdjustment: GenericGrayImage
"""
AbstractImageAlgorithm
The root of image algorithms type system
"""
abstract type AbstractImageAlgorithm end
"""
AbstractImageFilter <: AbstractImageAlgorithm
Filters are image algorithms whose input and output are both images
"""
abstract type AbstractImageFilter <: AbstractImageAlgorithm end
include("histogram_adjustment.jl")
# we do not export any symbols since we don't require
# package developers to implemente all the APIs
end # module HistogramAdjustmentAPI
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 5428 | # usage example for package developer:
#
# import HistogramAdjustmentAPI: AbstractHistogramAdjustmentAlgorithm,
# adjust_histogram, adjust_histogram!
"""
AbstractHistogramAdjustmentAlgorithm <: AbstractImageFilter
The root type for `ImageContrastAdjustment` package.
Any concrete histogram adjustment algorithm shall subtype it to support
[`adjust_histogram`](@ref) and [`adjust_histogram!`](@ref) APIs.
# Examples
All histogram adjustment algorithms in ImageContrastAdjustment are called in the
following pattern:
```julia
# first generate an algorithm instance
f = LinearStretching()
# then pass the algorithm to `adjust_histogram`
img_adjusted = adjust_histogram(img, f)
# or use in-place version `adjust_histogram!`
img_adjusted = similar(img)
adjust_histogram!(img_adjusted, img, f)
```
Some algorithms also receive additional information as an argument,
e.g., `nbins` of `Equalization`.
```julia
# you can explicit specify the parameters
f = Equalization(nbins = 32)
```
For more examples, please check [`adjust_histogram`](@ref),
[`adjust_histogram!`](@ref) and concrete algorithms.
"""
abstract type AbstractHistogramAdjustmentAlgorithm <: AbstractImageFilter end
adjust_histogram!(out::Union{GenericGrayImage, AbstractArray{<:Color3}},
img,
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) =
f(out, img, args...; kwargs...)
# TODO: Relax this to all color types
function adjust_histogram!(img::Union{GenericGrayImage, AbstractArray{<:Color3}},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...)
tmp = copy(img)
f(img, tmp, args...; kwargs...)
return img
end
function adjust_histogram(::Type{T},
img,
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T
out = similar(Array{T}, axes(img))
adjust_histogram!(out, img, f, args...; kwargs...)
return out
end
adjust_histogram(img::AbstractArray{T},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Colorant =
adjust_histogram(T, img, f, args...; kwargs...)
# Do not promote Number to Gray{<:Number}
adjust_histogram(img::AbstractArray{T},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Number =
adjust_histogram(T, img, f, args...; kwargs...)
# Handle instance where the input is a sequence of images.
adjust_histogram!(out_sequence::Vector{T},
img_sequence,
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Union{GenericGrayImage, AbstractArray{<:Color3}} =
f(out_sequence, img_sequence, args...; kwargs...)
# TODO: Relax this to all color types
function adjust_histogram!(img_sequence::Vector{T},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Union{GenericGrayImage, AbstractArray{<:Color3}}
tmp = copy(img_sequence)
f(img_sequence, tmp, args...; kwargs...)
return img_sequence
end
function adjust_histogram(::Type{T},
img_sequence::Vector{<:AbstractArray},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T
N = length(img_sequence)
out_sequence = [similar(Array{T}, axes(img_sequence[n])) for n = 1:N]
adjust_histogram!(out_sequence, img_sequence, f, args...; kwargs...)
return out_sequence
end
adjust_histogram(img_sequence::Vector{<:AbstractArray{T}},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Colorant =
adjust_histogram(T, img_sequence, f, args...; kwargs...)
# Do not promote Number to Gray{<:Number}
adjust_histogram(img_sequence::Vector{<:AbstractArray{T}},
f::AbstractHistogramAdjustmentAlgorithm,
args...; kwargs...) where T <: Number =
adjust_histogram(T, img_sequence, f, args...; kwargs...)
### Docstrings
"""
adjust_histogram!([out,] img, f::AbstractHistogramAdjustmentAlgorithm, args...; kwargs...)
Adjust histogram of `img` using algorithm `f`.
# Output
If `out` is specified, it will be changed in place. Otherwise `img` will be changed in place.
# Examples
Just simply pass an algorithm to `adjust_histogram!`:
```julia
img_adjusted = similar(img)
adjust_histogram!(img_adjusted, img, f)
```
For cases you just want to change `img` in place, you don't necessarily need to manually
allocate `img_adjusted`; just use the convenient method:
```julia
adjust_histogram!(img, f)
```
See also: [`adjust_histogram`](@ref)
"""
adjust_histogram!
"""
adjust_histogram([T::Type,] img, f::AbstractHistogramAdjustmentAlgorithm, args...; kwargs...)
Adjust histogram of `img` using algorithm `f`.
# Output
The return image `img_adjusted` is an `Array{T}`.
If `T` is not specified, then it's inferred.
# Examples
Just simply pass the input image and algorithm to `adjust_histogram`
```julia
img_adjusted = adjust_histogram(img, f)
```
This reads as "`adjust_histogram` of image `img` using algorithm `f`".
You can also explicitly specify the return type:
```julia
img_adjusted_float32 = adjust_histogram(Gray{Float32}, img, f)
```
See also [`adjust_histogram!`](@ref) for in-place histogram adjustment.
"""
adjust_histogram
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 23327 | """
```
AdaptiveEqualization <: AbstractHistogramAdjustmentAlgorithm
AdaptiveEqualization(; nbins = 256, minval = 0, maxval = 1, rblocks = 8, cblocks = 8, clip = 0.1)
adjust_histogram([T,] img, f::AdaptiveEqualization)
adjust_histogram!([out,] img, f::AdaptiveEqualization)
```
Performs Contrast Limited Adaptive Histogram Equalisation (CLAHE) on the input
image. It differs from ordinary histogram equalization in the respect that the
adaptive method computes several histograms, each corresponding to a distinct
section of the image, and uses them to redistribute the lightness values of the
image. It is therefore suitable for improving the local contrast and enhancing
the definitions of edges in each region of an image.
# Details
Histogram equalisation was initially conceived to improve the contrast in a
single-channel grayscale image. The method transforms the distribution of the
intensities in an image so that they are as uniform as possible [1]. The natural
justification for uniformity is that the image has better contrast if the
intensity levels of an image span a wide range on the intensity scale. As it
turns out, the necessary transformation is a mapping based on the cumulative
histogram---see [Equalization](@ref) for more details.
A natural extension of histogram equalisation is to apply the contrast
enhancement locally rather than globally [2]. Conceptually, one can imagine that
the process involves partitioning the image into a grid of rectangular regions
and applying histogram equalisation based on the local CDF of each contextual
region. However, to smooth the transition of the pixels from one contextual
region to another, the mapping of a pixel is not necessarily done soley based
on the local CDF of its contextual region. Rather, the mapping of a pixel may be
interpolated based on the CDF of its contextual region, and the CDFs of the
immediate neighbouring regions.
In adaptive histogram equalisation the image ``\\mathbf{G}`` is partitioned into
``P \\times Q`` equisized submatrices,
```math
\\mathbf{G} = \\begin{bmatrix}
\\mathbf{G}_{11} & \\mathbf{G}_{12} & \\ldots & \\mathbf{G}_{1C} \\\\
\\mathbf{G}_{21} & \\mathbf{G}_{22} & \\ldots & \\mathbf{G}_{2C} \\\\
\\vdots & \\vdots & \\ldots & \\vdots \\\\
\\mathbf{G}_{R1} & \\mathbf{G}_{R2} & \\ldots & \\mathbf{G}_{RC} \\\\
\\end{bmatrix}.
```
For each submatrix ``\\mathbf{G}_{rc}``, one computes a concomitant CDF, which we
shall denote by ``T_{rc}(G_{i,j})``. In the most general case, we will require
four CDFs
```math
\\begin{aligned}
T_1(v) & \\triangleq T_{rc}(G_{i,j}) \\\\
T_2(v) & \\triangleq T_{(r+1)c}(G_{i,j}) \\\\
T_3(v) & \\triangleq T_{(r+1)(c+1)}(G_{i,j}) \\\\
T_4(v) & \\triangleq T_{r(c+1)}(G_{i,j}).
\\end{aligned}
```
In order to determine which particular CDFs will be
used in the interpolation step, it is useful to (i) introduce the function
```math
\\Phi(\\mathbf{G}_{rc}) = \\left( \\phi_{rc}, \\phi'_{rc}\\right) \\triangleq \\left(rP - \\frac{P}{2}, cQ - \\frac{Q}{2} \\right),
```
(ii) form the sequences ``\\left(\\phi_{11}, \\phi_{21}, \\ldots, \\phi_{R1} \\right)``
and ``\\left(\\phi'_{11}, \\phi'_{12}, \\ldots, \\phi'_{1C} \\right)``, and (iii) define
```math
\\begin{aligned}
t & \\triangleq \\frac{i - \\phi_{r1}}{\\phi_{(r+1)1} - \\phi_{r1} } \\\\
u & \\triangleq \\frac{j - \\phi'_{1c}}{\\phi'_{1(c+1)} - \\phi'_{1c} }.
\\end{aligned}
```
#### Case I (Interior)
For a pixel ``G_{i,j}`` in the range
```math
P - \\frac{P}{2} \\le i \\le RP - \\frac{P}{2} \\quad \\text{and} \\quad Q - \\frac{Q}{2} \\le j \\le CQ - \\frac{Q}{2}.
```
values of ``r`` and ``c`` are implicitly defined by the solution to the inequalities
```math
\\phi_{r1} \\le i < \\phi_{(r+1)1} \\quad \\text{and} \\quad \\phi'_{1c} \\le j < \\phi'_{1(c+1)}.
```
The *bilinearly interpolated* transformation that maps an intensity ``v`` at location ``(i,j)`` in the image
to an intensity ``v'`` is given by [3]
```math
v' \\triangleq \\bar{T}(v) = (1-t) (1-u)T_1(G_{i,j}) + t(1-u)T_2(G_{i,j}) + tuT_3(G_{i,j}) +(1-t)uT_4(G_{i,j}).
```
#### Case II (Vertical Border)
For a pixel ``G_{i,j}`` in the range
```math
P - \\frac{P}{2} \\le i \\le RP - \\frac{P}{2} \\quad \\text{and} \\quad 1 \\le j < Q - \\frac{Q}{2} \\; \\cup \\; CQ - \\frac{Q}{2} < j \\le CQ,
```
``r`` is implicitly defined by the solution to the inequality ``\\phi_{r1} \\le i < \\phi_{(r+1)1}``, while
```math
c = \\begin{cases}
1, & \\text{if } \\quad 1 \\le j < Q - \\frac{Q}{2} \\\\
C, & \\text{if } \\quad CQ - \\frac{Q}{2} < j \\le CQ.
\\end{cases}
```
The *linearly interpolated* transformation that maps an intensity ``v`` at location ``(i,j)`` in the image
to an intensity ``v'`` is given by
```math
v' \\triangleq \\bar{T}(v) = (1-t)T_1(G_{i,j}) + tT_2(G_{i,j}).
```
#### Case III (Horizontal Border)
For a pixel ``G_{i,j}`` in the range
```math
1 \\le i < P - \\frac{P}{2} \\;\\cup \\; RP - \\frac{P}{2} < i \\le RP \\quad \\text{and} \\quad Q - \\frac{Q}{2} \\le j \\le CQ - \\frac{Q}{2},
```
``c`` is implicitly defined by the solution to the inequality ``\\phi'_{1c} \\le j < \\phi'_{1(c+1)}``, while
```math
r = \\begin{cases}
1, & \\text{if } \\quad 1 \\le i < P - \\frac{P}{2} \\\\
R, & \\text{if } \\quad RP - \\frac{P}{2} < i \\le RP .
\\end{cases}
```
The *linearly interpolated* transformation that maps an intensity ``v`` at location ``(i,j)`` in the image
to an intensity ``v'`` is given by
```math
v' \\triangleq \\bar{T}(v) = (1-u)T_1(G_{i,j}) + uT_4(G_{i,j}).
```
#### Case IV (Corners)
For a pixel ``G_{i,j}`` in the range
```math
1 \\le i < \\frac{P}{2} \\;\\cup \\; RP - \\frac{P}{2} < i \\le RP \\quad \\text{and} \\quad 1 \\le j < CQ - \\frac{Q}{2} \\; \\cup \\; CQ - \\frac{Q}{2} < j \\le CQ ,
```
we have
```math
r = \\begin{cases}
1, & \\text{if } \\quad 1 \\le i < P - \\frac{P}{2} \\\\
R, & \\text{if } \\quad RP - \\frac{P}{2} < i \\le RP
\\end{cases}
\\quad \\text{and} \\quad
c = \\begin{cases}
1, & \\text{if } \\quad 1 \\le j < Q - \\frac{Q}{2} \\\\
C, & \\text{if } \\quad CQ - \\frac{Q}{2} < j \\le CQ.
\\end{cases}
```
The transformation that maps an intensity ``v`` at location ``(i,j)`` in the image
to an intensity ``v'`` is given by
```math
v' \\triangleq \\bar{T}(v) = T_1(G_{i,j}).
```
## Limiting Contrast
An unfortunate side-effect of contrast enhancement is that it has a tendency to
amplify the level of noise in an image, especially when the magnitude of the
contrast enhancement is very high. The magnitude of contrast enhancement is
associated with the gradient of ``T(\\cdot)``, because the gradient determines the
extent to which consecutive input intensities are stretched across the
grey-level spectrum. One can diminish the level of noise amplification by
limiting the magnitude of the contrast enhancement, that is, by limiting the
magnitude of the gradient.
Since the derivative of ``T(\\cdot)`` is the empirical density ``\\hat{f}_{G}``,
the slope of the mapping function at any input intensity is proportional to the
height of the histogram ``\\hat{f}_{G}`` at that intensity. Therefore,
limiting the slope of the local mapping function is equivalent to clipping the
height of the histogram. A detailed description of the implementation details
of the clipping process can be found in [2].
# Options
Various options for the parameters of this function are described in more detail
below.
## Choices for `img`
The function can handle a variety of input types. The returned image
depends on the input type.
For coloured images, the input is converted to
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the Y channel is equalised.
This is the combined with the I and Q channels and the resulting image converted
to the same type as the input.
## Choices for `nbins` in `AdaptiveEqualization`
You can specify the total number of bins in the histogram of each local region.
## Choices for `rblocks` and `cblocks` in `AdaptiveEqualization`
The `rblocks` and `cblocks` specify the number of blocks to divide the input
image into in each direction. By default both values are set to eight.
## Choices for `clip` in `AdaptiveEqualization`
The `clip` parameter must be a value between 0 and 1. It defines an implicit
threshold at which a histogram is clipped. Counts that exceed the threshold are
redistributed as equally as possible so that no bin exceeds the threshold limit.
A value of zero means no clipping, whereas a value of one sets the threshold at
the smallest feasible bin limit. A bin limit is feasible if all bin counts can be
redistributed such that no bin count exceeds the limit. In practice, a `clip` value
of zero corresponds to maximal contrast enhancement, whereas a `clip` value of
one corredponds to minimal contrast enhancement. The default value is `0.1`.
## Choices for `minval` and `maxval` in `AdaptiveEqualization`
If `minval` and `maxval` are specified then intensities are equalized to the range
[`minval`, `maxval`]. The default values are 0 and 1.
# Example
```julia
using TestImages, FileIO, ImageView
img = testimage("mandril_gray")
imgeq = adjust_histogram(img, AdaptiveEqualization(nbins = 256, rblocks = 4, cblocks = 4, clip = 0.2))
imshow(img)
imshow(imgeq)
```
# References
1. R. C. Gonzalez and R. E. Woods. *Digital Image Processing (3rd Edition)*. Upper Saddle River, NJ, USA: Prentice-Hall, 2006.
2. S. M. Pizer, E. P. Amburn, J. D. Austin, R. Cromartie, A. Geselowitz, T. Greer, B. ter Haar Romeny, J. B. Zimmerman and K. Zuiderveld βAdaptive histogram equalization and its variations,β *Computer Vision, Graphics, and Image Processing*, vol. 38, no. 1, p. 99, Apr. 1987. [10.1016/S0734-189X(87)80186-X](https://doi.org/10.1016/s0734-189x(87)80156-1)
3. W. H. Press, S. A. Teukolsky, W. T. Vetterling, and B. P. Flannery. *Numerical Recipes: The Art of Scientific Computing (3rd Edition)*. New York, NY, USA: Cambridge University Press, 2007.
"""
@with_kw struct AdaptiveEqualization{Tβ <: Union{Real,AbstractGray},
Tβ <: Union{Real,AbstractGray},
Tβ <: Real} <: AbstractHistogramAdjustmentAlgorithm
nbins::Int = 256
minval::Tβ = 0.0
maxval::Tβ = 1.0
rblocks::Int = 8
cblocks::Int = 8
clip::Tβ = 0.1
end
function (f::AdaptiveEqualization)(out::GenericGrayImage, img::GenericGrayImage)
validate_parameters(f)
height, width = length.(axes(img))
# If necessary, resize the image so that the requested number of blocks fit exactly.
resized_height = ceil(Int, height / (2 * f.rblocks)) * 2 * f.rblocks
resized_width = ceil(Int, width / (2 * f.cblocks)) * 2 * f.cblocks
must_resize = (resized_height != height) || (resized_width != width) ? true : false
if must_resize
img_tmp = imresize(img, (resized_height, resized_width))
out_tmp = copy(img_tmp)
else
img_tmp = img
out_tmp = out
end
# Determine the relevant pixel coordinates for each block.
block_height = resized_height Γ· f.rblocks
block_width = resized_width Γ· f.cblocks
r_intervals = [StepRange((r - 1) * block_height + 1, 1, r * block_height) for r = 1:f.rblocks]
c_intervals = [StepRange((c - 1) * block_width + 1, 1, c * block_width) for c = 1:f.cblocks]
function construct_hist(roi::AbstractArray)
edges, histogram = build_histogram(roi, f.nbins, minval = f.minval, maxval = f.maxval)
end
function construct_cdf(edges::AbstractArray, histogram::AbstractArray)
lb = first(axes(histogram, 1))
ub = last(axes(histogram, 1))
csum = cumsum(histogram[lb:ub])
cdf = csum / csum[end]
return edges, cdf
end
# Construct a histogram for each block in the image.
block_hist = [construct_hist(view(img_tmp, r_intervals[r], c_intervals[c], :)) for r = 1:f.rblocks, c = 1:f.cblocks]
# Redistribute histogram counts in accordance with the clip weight.
map!(x-> (x[1], clip_histogram!(x[2], f.clip)), block_hist, block_hist)
# Construct a CDF for each block in the image.
block_cdf = [construct_cdf(block_hist[r,c]...) for r = 1:f.rblocks, c = 1:f.cblocks]
block_centroid_r = [r * block_height - block_height Γ· 2 for r = 1:f.rblocks]
block_centroid_c = [c * block_width - block_width Γ· 2 for c = 1:f.cblocks]
intensity_range = (f.minval, f.maxval)
# Transform pixels using linear and bilinear interpolation of the block CDFs.
transform_image!(out_tmp, img_tmp, block_centroid_r, block_centroid_c,
block_width, block_height, intensity_range,
f.cblocks, f.rblocks, block_cdf)
out .= must_resize ? imresize(out_tmp, (height, width)) : out_tmp
return out
end
function validate_parameters(f::AdaptiveEqualization)
!(0 <= f.clip <= 1) && throw(ArgumentError("The parameter `clip` must be in the range [0..1]."))
!(1 <= f.rblocks && 1 <= f.cblocks) && throw(ArgumentError("The parameters `rblocks` and `cblocks` must be greater than 0."))
end
function (f::AdaptiveEqualization)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(img)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
#=
TODO: Understand the cause and solution of this error.
When I pass a view I run into this error on Julia 1.1.
ERROR: ArgumentError: an array of type `Base.ReinterpretArray` shares memory with another argument and must
make a preventative copy of itself in order to maintain consistent semantics,
but `copy(A)` returns a new array of type `Array{Float64,3}`. To fix, implement:
`Base.unaliascopy(A::Base.ReinterpretArray)::typeof(A)`
=#
#adjust_histogram!(view(yiq_view,1,:,:), f)
y = comp1.(yiq)
adjust_histogram!(y, f)
yiq_view[1, :, :] .= y
out .= convert.(T, yiq)
end
(f::AdaptiveEqualization)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
function clip_histogram!(histogram::AbstractArray, clip_weight::Number)
limit, unfilled_bin_count = determine_threshold(histogram, clip_weight)
initial_excess = determine_excess!(histogram, limit)
remaining_excess = perform_initial_redistribution!(histogram, limit, initial_excess, unfilled_bin_count)
perform_iterative_redistribution!(histogram, limit, remaining_excess)
return histogram
end
function determine_excess!(histogram::AbstractArray, limit::Number)
excess = zero(limit)
for n in eachindex(histogram)
val = histogram[n]
if val > limit
excess = excess + (val - limit)
histogram[n] = limit
end
end
return excess
end
function determine_threshold(histogram::AbstractArray, clip_weight::Number)
sorted_hist = sort(collect(histogram), rev = true)
required_capacity = zeros(axes(sorted_hist))
available_capacity = zeros(axes(sorted_hist))
# If we pick an empirical bin count as the actual limit, determine how many
# counts we will need to move (required_capacity) and how much space do
# we have in the bins (available_capacity).
for i = 1:length(sorted_hist)
required_capacity[i] = sum(sorted_hist[1:i] .- sorted_hist[i])
available_capacity[i] = sum(sorted_hist[i] .- sorted_hist[i:end])
end
# Find the smallest feasible bin limit.
largest_feasible_index = 1
for i in eachindex(required_capacity)
if available_capacity[i] < required_capacity[i]
largest_feasible_index = i - 1
break
end
end
# The target bin limit is a convex combination of the highest bin count
# and the smallest feasible bin count.
smallest_feasible_limit = sorted_hist[largest_feasible_index]
target_limit = (1 - clip_weight) * first(sorted_hist) + (clip_weight) * smallest_feasible_limit
# Set the limit to be the empirical count that is close to the target limit.
chosen_index = searchsortedlast(sorted_hist, round(Int,target_limit); rev = true)
threshold = sorted_hist[chosen_index]
# Knowing how many bins have spare capacity will help us distribute the
# excess bin counts more efficiently in the perform_initial_redistribution!
# function.
unfilled_bin_count = length(sorted_hist) - chosen_index
return threshold, unfilled_bin_count
end
function perform_initial_redistribution!(histogram::AbstractArray, limit::Number, excess::Number, N::Integer)
m = excess Γ· N
for n in eachindex(histogram)
val = histogram[n]
if excess > 0
if val < limit - m
histogram[n] = histogram[n] + m
excess = excess - m
elseif val < limit
excess = excess - (limit - val)
histogram[n] = limit
end
end
end
return excess
end
function perform_iterative_redistribution!(histogram::AbstractArray, limit::Number, excess::Number)
while excess > 0
for n in eachindex(histogram)
val = histogram[n]
if excess > 0
if val < limit
excess = excess - 1
histogram[n] = histogram[n] + 1
end
end
end
end
end
function apply_cdf_transform(val::Union{Real,AbstractGray}, minval::Union{Real,AbstractGray}, maxval::Union{Real,AbstractGray}, edges::AbstractArray, cdf::AbstractArray)
val, minval, maxval = gray(val), gray(minval), gray(maxval)
first_edge = first(edges)
inv_step_size = 1 / step(edges)
scale = (maxval - minval) / (cdf[end] - first(cdf))
if val >= edges[end]
newval = cdf[end]
elseif val < first_edge
newval = first(cdf)
else
index = floor(Int, (val - first_edge) * inv_step_size) + 1
newval = cdf[index]
end
# Scale the new intensity value to so that it lies in the range [minval, maxval].
newval = minval + (newval - first(cdf)) * scale
end
function transform_image!(out, img, block_centroid_r, block_centroid_c, block_width, block_height, intensity_range, cblocks, rblocks, block_cdf)
height, width = length.(axes(out))
rβ = first(block_centroid_r) + 1
rβ = last(block_centroid_r) - 1
cβ = first(block_centroid_c) + 1
cβ = last(block_centroid_c) - 1
block_dimensions = (block_width, block_height)
# Interior
bounds = (rβ:rβ, cβ:cβ)
block_centroids = (block_centroid_r, block_centroid_c)
transform_interior!(out, img, bounds, block_centroids, block_dimensions,
intensity_range, block_cdf)
# West
bounds = (rβ:rβ, 1:(cβ-1))
block_c_idx = 1
transform_vertical_strip!(out, img, bounds, block_centroid_r, block_c_idx,
block_height, intensity_range, block_cdf)
# East
bounds = (rβ:rβ, (cβ+1):width)
block_c_idx = cblocks
transform_vertical_strip!(out, img, bounds, block_centroid_r, block_c_idx,
block_height, intensity_range, block_cdf)
# North
bounds = (1:rβ-1, cβ:cβ)
block_r_idx = 1
transform_horizontal_strip!(out, img, bounds, block_centroid_c, block_r_idx,
block_width, intensity_range, block_cdf)
# South
bounds = (rβ+1:height, cβ:cβ)
block_r_idx = rblocks
transform_horizontal_strip!(out, img, bounds, block_centroid_c, block_r_idx,
block_width, intensity_range, block_cdf)
# North-West
bounds = (1:(rβ - 1), 1:(cβ - 1))
block_r_idx = 1
block_c_idx = 1
transform_corner!(out, img, bounds, block_r_idx, block_c_idx,
intensity_range, block_cdf)
# North-East
bounds = (1:(rβ - 1), (cβ + 1):width)
block_r_idx = 1
block_c_idx = cblocks
transform_corner!(out, img, bounds, block_r_idx, block_c_idx,
intensity_range, block_cdf)
# South-West
bounds = ((rβ + 1):height, 1:(cβ - 1))
block_r_idx = rblocks
block_c_idx = 1
transform_corner!(out, img, bounds, block_r_idx , block_c_idx,
intensity_range, block_cdf)
# South-East
bounds = ((rβ + 1):height, (cβ + 1):width)
block_r_idx = rblocks
block_c_idx = cblocks
transform_corner!(out, img, bounds, block_r_idx, block_c_idx,
intensity_range, block_cdf)
end
function transform_interior!(out, img, bounds, block_centroids, block_dimensions, intensity_range, block_cdf)
rows, cols = bounds
block_centroid_r, block_centroid_c = block_centroids
block_width, block_height = block_dimensions
minval, maxval = intensity_range
inv_block_height = 1 / block_height
inv_block_width = 1 / block_width
for r in rows
for c in cols
rα΅’ = round(Int, r * inv_block_height)
cα΅’ = round(Int, c * inv_block_width)
t = (r - block_centroid_r[rα΅’]) / block_height
u = (c - block_centroid_c[cα΅’]) / block_width
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’]...)
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’ + 1, cα΅’]...)
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’ + 1, cα΅’ + 1]...)
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’ + 1]...)
interpolated_val = (1 - t)*(1 - u)*Tβ + t*(1 - u)*Tβ + t*u*Tβ + (1 - t)*u*Tβ
out[r,c] = eltype(img) <: Integer ? ceil(interpolated_val) : interpolated_val
end
end
end
function transform_vertical_strip!(out, img, bounds, block_centroid_r, cα΅’, block_height, intensity_range, block_cdf)
rows, cols = bounds
minval, maxval = intensity_range
inv_block_height = 1 / block_height
for r in rows
for c in cols
rα΅’ = round(Int, r * inv_block_height)
t = (r - block_centroid_r[rα΅’]) / block_height
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’]...)
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’ + 1, cα΅’]...)
interpolated_val = (1 - t)*Tβ + t*Tβ
out[r,c] = eltype(img) <: Integer ? ceil(interpolated_val) : interpolated_val
end
end
end
function transform_horizontal_strip!(out, img, bounds, block_centroid_c, rα΅’, block_width, intensity_range, block_cdf)
rows, cols = bounds
minval, maxval = intensity_range
inv_block_width = 1 / block_width
for r in rows
for c in cols
cα΅’ = round(Int, c * inv_block_width)
u = (c - block_centroid_c[cα΅’]) / block_width
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’]...)
Tβ = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’ + 1]...)
interpolated_val = (1 - u)*Tβ + u*Tβ
out[r,c] = eltype(img) <: Integer ? ceil(interpolated_val) : interpolated_val
end
end
end
function transform_corner!(out, img, bounds, rα΅’, cα΅’, intensity_range, block_cdf)
rows, cols = bounds
minval, maxval = intensity_range
for r in rows
for c in cols
val = apply_cdf_transform(img[r,c], minval, maxval, block_cdf[rα΅’, cα΅’]...)
out[r,c] = eltype(img) <: Integer ? ceil(val) : val
end
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 3148 | function transform_density!(out::GenericGrayImage, img::GenericGrayImage, edges::AbstractRange, newvals::AbstractVector)
first_edge, last_edge = first(edges), last(edges)
first_newval, last_newval = first(newvals), last(newvals)
inv_step_size = 1/step(edges)
function transform(val)
val = gray(val)
if val >= last_edge
return last_newval
elseif val < first_edge
return first_newval
else
index = floor(Int, (val-first_edge)*inv_step_size) + 1
@inbounds newval = newvals[index]
return newval
end
end
map!(transform, out, img)
end
function transform_density!(out::GenericGrayImage, img::GenericGrayImage{T}, edges::AbstractRange, newvals::AbstractVector) where T<:Union{N0f8,AbstractGray{N0f8}}
# When dealing with 8-bit images, we can improve computational performance by precomputing the lookup table
# for how the intensities transform (there are only 256 calculations of intensities rather than `length(img)`
# calculations of intensities).
lookup = Vector{eltype(newvals)}(undef, 256)
invoke(transform_density!, Tuple{GenericGrayImage,GenericGrayImage,AbstractRange,AbstractVector},
lookup, zero(T):eps(T):oneunit(T), edges, newvals)
map!(out, img) do val
lookup[uint8val(val)+1]
end
end
uint8val(x::N0f8) = reinterpret(x)
uint8val(x::AbstractGray) = uint8val(gray(x))
function build_lookup(cdf, minval::T, maxval::T) where T
first_cdf = first(cdf)
# Scale the new intensity value to so that it lies in the range [minval, maxval].
scale = (maxval - minval) / (cdf[end] - first_cdf)
if T <: Integer
return T[ceil(minval + (x - first_cdf) * scale) for x in cdf]
end
return T[minval + (x - first_cdf) * scale for x in cdf]
end
function construct_pdfs(img::GenericGrayImage, targetimg::AbstractArray, edges::AbstractRange)
_, histogram = build_histogram(img, edges)
_, target_histogram = build_histogram(targetimg, edges)
return edges, histogram / sum(histogram), target_histogram / sum(target_histogram)
end
function construct_pdfs(img::GenericGrayImage, targetimg::AbstractArray, nbins::Integer = 256)
if eltype(img) <: AbstractGray
imin, imax = 0, 1
else
imin, imax = min(minimum_finite(img), minimum_finite(targetimg)), max(maximum_finite(img), maximum_finite(targetimg))
end
edges, histogram = build_histogram(img, nbins, minval = imin, maxval = imax)
_, target_histogram = build_histogram(targetimg, edges)
return edges, histogram / sum(histogram), target_histogram / sum(target_histogram)
end
function lookup_icdf(cdf::AbstractArray, targetcdf::AbstractArray)
lookup_table = zeros(Int, length(cdf))
i = 1
for j = 1:length(cdf)
p = cdf[j]
while i < length(targetcdf) && targetcdf[i+1] <= p
i += 1
end
lookup_table[j] = i
end
lookup_table
end
function cdf2pdf!(pdf::AbstractArray, cdf::AbstractArray)
pdf[1] = cdf[1]
for i = length(cdf)-1:-1:2
pdf[i] = cdf[i] - cdf[i-1]
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 3347 |
"""
```
ContrastStretching <: AbstractHistogramAdjustmentAlgorithm
ContrastStretching(; t = 0.5, slope = 1.0)
adjust_histogram([T,] img, f::ContrastStretching)
adjust_histogram!([out,] img, f::ContrastStretching)
```
Returns an image where intensities below `t` are compressed into a narrower
range of dark intensities, and values above `t` are compressed into a narrower
band of light intensities.
# Details
Contrast stretching is a transformation that enhances or reduces (for `slope` >
1 or < 1, respectively) the contrast near saturation (0 and 1).
It is given by the relation
```math
f(x) = \\frac{1}{1 + \\left(\\frac{t}{x} \\right)^s}, \\; s \\in \\mathbb{R},
```
where ``s`` represents the `slope` argument.
# Options
Various options for the parameters of the `adjust_histogram` and
`ContrastStretching` type are described in more detail below.
## Choices for `img`
The function can handle a variety of input types. The returned
image depends on the input type.
For colored images, the input is converted to the
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the intensities of the Y
channel are stretched to the specified range. The modified Y channel is then
combined with the I and Q channels and the resulting image converted to the same
type as the input.
## Choice for `t`
The value of `t` needs to be in the unit interval. If left unspecified a
default value of 0.5 is utilized.
## Choice for `slope`
The value of `slope` can be any real number. If left unspecified a
default value of 1.0 is utilized.
# Example
```julia
using ImageContrastAdjustment, ImageView, TestImages
img = testimage("mandril_gray")
ret = adjust_histogram(img, ContrastStretching(t = 0.6, slope = 3))
```
# References
1. Gonzalez, R. C., Woods, R. E., & Eddins, S. L. (2004). *Digital image processing using MATLAB* (Vol. 624). Upper Saddle River, New Jersey: Pearson-Prentice-Hall.
"""
@with_kw struct ContrastStretching{Tβ <: Union{Real,AbstractGray},
Tβ <: Union{Real,AbstractGray}} <: AbstractHistogramAdjustmentAlgorithm
t::Tβ = 0.5
slope::Tβ = 1.0
Ο΅::Union{Tβ,Nothing} = nothing
end
ContrastStretching(t::Tβ, slope::Tβ, Ο΅::Union{Real,AbstractGray}) where {Tβ <: Union{Real,AbstractGray},
Tβ <: Union{Real,AbstractGray}} =
ContrastStretching{Tβ,Tβ}(t, slope, Tβ(Ο΅))
function (f::ContrastStretching)(out::GenericGrayImage, img::GenericGrayImage)
T = eltype(out)
Ο΅ = f.Ο΅ === nothing ? eps(T) : f.Ο΅
out .= img
map!(out,out) do val
if isnan(val)
return val
else
newval = contrast_stretch(val, f.t, f.slope, Ο΅)
return T <: Integer ? round(Int, newval ) : newval
end
end
end
function (f::ContrastStretching)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(img)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
adjust_histogram!(view(yiq_view,1,:,:), f)
out .= convert.(T, yiq)
end
(f::ContrastStretching)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
function contrast_stretch(x, t, s, Ο΅)
return 1 / (1 + (t / (x+Ο΅))^s)
end
contrast_stretch(x::Union{FixedPoint,AbstractGray{<:FixedPoint}}, t, s, Ο΅) = contrast_stretch(float(x), t, s, Ο΅)
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 4738 | """
```
Equalization <: AbstractHistogramAdjustmentAlgorithm
Equalization(; nbins = 256, minval = 0, maxval = 1)
adjust_histogram([T,] img, f::Equalization)
adjust_histogram!([out,] img, f::Equalization)
```
Returns a histogram equalized image with a granularity of `nbins` number of bins.
# Details
Histogram equalization was initially conceived to improve the contrast in a
single-channel grayscale image. The method transforms the
distribution of the intensities in an image so that they are as uniform as
possible [1]. The natural justification for uniformity
is that the image has better contrast if the intensity levels of an image span
a wide range on the intensity scale. As it turns out, the necessary
transformation is a mapping based on the cumulative histogram.
One can consider an ``L``-bit single-channel ``I \\times J`` image with gray
values in the set ``\\{0,1,\\ldots,L-1 \\}``, as a collection of independent and
identically distributed random variables. Specifically, let the sample space
``\\Omega`` be the set of all ``IJ``-tuples ``\\omega
=(\\omega_{11},\\omega_{12},\\ldots,\\omega_{1J},\\omega_{21},\\omega_{22},\\ldots,\\omega_{2J},\\omega_{I1},\\omega_{I2},\\ldots,\\omega_{IJ})``,
where each ``\\omega_{ij} \\in \\{0,1,\\ldots, L-1 \\}``. Furthermore, impose a
probability measure on ``\\Omega`` such that the functions ``\\Omega \\ni
\\omega \\to \\omega_{ij} \\in \\{0,1,\\ldots,L-1\\}`` are independent and
identically distributed.
One can then regard an image as a matrix of random variables ``\\mathbf{G} =
[G_{i,j}(\\omega)]``, where each function ``G_{i,j}: \\Omega \\to \\mathbb{R}``
is defined by
```math
G_{i,j}(\\omega) = \\frac{\\omega_{ij}}{L-1},
```
and each ``G_{i,j}`` is distributed according to some unknown density ``f_{G}``.
While ``f_{G}`` is unknown, one can approximate it with a normalized histogram
of gray levels,
```math
\\hat{f}_{G}(v)= \\frac{n_v}{IJ},
```
where
```math
n_v = \\left | \\left\\{(i,j)\\, |\\, G_{i,j}(\\omega) = v \\right \\} \\right |
```
represents the number of times a gray level with intensity ``v`` occurs in
``\\mathbf{G}``. To transform the distribution of the intensities so that
they are as uniform as possible one needs to find a mapping ``T(\\cdot)`` such
that ``T(G_{i,j}) \\thicksim U ``. The required mapping turns out to be the
cumulative distribution function (CDF) of the empirical density
``\\hat{f}_{G}``,
```math
T(G_{i,j}) = \\int_0^{G_{i,j}}\\hat{f}_{G}(w)\\mathrm{d} w.
```
# Options
Various options for the parameters of the `adjust_histogram` function and
`Equalization` type are described in more detail below.
## Choices for `img`
The `adjust_histogram` function can handle a variety of
input types. By default type of the returned image matches the input type.
For colored images, the input is converted to
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the Y channel is equalized.
This is the combined with the I and Q channels and the resulting image converted
to the same type as the input.
## Choices for `nbins` in `Equalization`
You can specify the total number of bins in the histogram.
## Choices for `minval` and `maxval` in `Equalization`
If `minval` and `maxval` are specified then intensities are equalized to the range
[`minval`, `maxval`]. The default values are 0 and 1.
# Example
```julia
using TestImages, FileIO, ImageView
img = testimage("mandril_gray")
imgeq = adjust_histogram(img, Equalization(nbins = 256, minval = 0, maxval = 1))
imshow(img)
imshow(imgeq)
```
# References
1. R. C. Gonzalez and R. E. Woods. *Digital Image Processing (3rd Edition)*. Upper Saddle River, NJ, USA: Prentice-Hall, 2006.
"""
@with_kw struct Equalization{Tβ <: Union{Real,AbstractGray},
Tβ <: Union{Real,AbstractGray}} <: AbstractHistogramAdjustmentAlgorithm
nbins::Int = 256
minval::Tβ = 0.0
maxval::Tβ = 1.0
end
function (f::Equalization)(out::GenericGrayImage, img::GenericGrayImage)
minval, maxval = convert(eltype(out), f.minval), convert(eltype(out), f.maxval)
edges, histogram = build_histogram(img, f.nbins, minval = minval, maxval = maxval)
lb = first(axes(histogram,1))
ub = last(axes(histogram,1))
N = length(img)
cdf = cumsum(histogram[lb:ub]/N)
newvals = build_lookup(cdf, minval, maxval)
transform_density!(out, img, edges, newvals)
end
function (f::Equalization)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(img)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
adjust_histogram!(view(yiq_view,1,:,:), f)
out .= convert.(T, yiq)
end
(f::Equalization)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 4022 |
"""
```
GammaCorrection <: AbstractHistogramAdjustmentAlgorithm
GammaCorrection(; gamma = 1)
adjust_histogram([T,] img, f::GammaCorrection)
adjust_histogram!([out,] img, f::GammaCorrection)
```
Returns a gamma corrected image.
# Details
Gamma correction is a non-linear transformation given by the relation
```math
f(x) = x^\\gamma \\quad \\text{for} \\; x \\in \\mathbb{R}, \\gamma > 0.
```
It is called a *power law* transformation because one quantity varies as a power
of another quantity.
Gamma correction has historically been used to preprocess
an image to compensate for the fact that the intensity of light generated by a
physical device is not usually a linear function of the applied signal but
instead follows a power law [1]. For example, for many Cathode Ray Tubes (CRTs) the
emitted light intensity on the display is approximately equal to the voltage
raised to the power of Ξ³, where Ξ³ β [1.8, 2.8]. Hence preprocessing a raw image with
an exponent of 1/Ξ³ would have ensured a linear response to brightness.
Research in psychophysics has also established an [empirical power law
](https://en.wikipedia.org/wiki/Stevens%27s_power_law) between light intensity and perceptual
brightness. Hence, gamma correction often serves as a useful image enhancement
tool.
# Options
Various options for the parameters of the `adjust_histogram` function and the
`Gamma` type are described in more detail below.
## Choices for `img`
The function can handle a variety of input types. The returned
image depends on the input type.
For colored images, the input is converted to YIQ type and the Y channel is
gamma corrected. This is the combined with the I and Q channels and the
resulting image converted to the same type as the input.
## Choice for `gamma`
The `gamma` value must be a non-zero positive number. A `gamma` value less than
one will yield a brighter image whereas a value greater than one will produce a
darker image. If left unspecified a default value of one is assumed.
# Example
```julia
using ImageContrastAdjustment, ImageView
# Create an example image consisting of a linear ramp of intensities.
n = 32
intensities = 0.0:(1.0/n):1.0
img = repeat(intensities, inner=(20,20))'
# Brighten the dark tones.
imgadj = adjust_histogram( img, GammaCorrection(gamma = 1/2))
# Display the original and adjusted image.
imshow(img)
imshow(imgadj)
```
# References
1. W. Burger and M. J. Burge. *Digital Image Processing*. Texts in Computer Science, 2016. [doi:10.1007/978-1-4471-6684-9](https://doi.org/10.1007/978-1-4471-6684-9)
"""
@with_kw struct GammaCorrection{T <: Real} <: AbstractHistogramAdjustmentAlgorithm
gamma::T = 1.0
end
function (f::GammaCorrection)(out::GenericGrayImage, img::GenericGrayImage)
out .= img
correct_gamma!(out, f.gamma)
return out
end
# TODO Expand this to more generic gray color types.
function correct_gamma!(img::AbstractArray{Gray{T}}, gamma::Real) where T <: FixedPointNumbers.Normed
Ξ³ = Float64(gamma)
# Create a lookup-table for the gamma transformation of the grayvalues.
raw_type = FixedPointNumbers.rawtype(T)
table = zeros(T, typemax(raw_type) + 1)
for i in zero(raw_type):typemax(raw_type)
table[i + 1] = T((i / typemax(raw_type))^Ξ³)
end
# Map the pixels to their new grayvalues.
map!(img,img) do p
table[p.val.i + 1]
end
end
function correct_gamma!(img::GenericGrayImage, gamma::Real)
Ξ³ = Float64(gamma)
T = eltype(img)
map!(img,img) do val
if isnan(val)
return val
else
return T <: Integer ? round(Int, val^Ξ³) : val^Ξ³
end
end
end
function (f::GammaCorrection)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(out)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
adjust_histogram!(view(yiq_view,1,:,:), f)
out .= convert.(T, yiq)
end
(f::GammaCorrection)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 9583 |
"""
```
LinearStretching <: AbstractHistogramAdjustmentAlgorithm
LinearStretching(; [src_minval], [src_maxval],
dst_minval=0, dst_maxval=1,
no_clamp=false)
LinearStretching((src_minval, src_maxval) => (dst_minval, dst_maxval))
LinearStretching((src_minval, src_maxval) => nothing)
LinearStretching(nothing => (dst_minval, dst_maxval))
adjust_histogram([T,] img, f::LinearStretching)
adjust_histogram!([out,] img, f::LinearStretching)
```
Returns an image where the range of the intensities spans the interval [`dst_minval`, `dst_maxval`].
# Details
Linear stretching (also called *normalization*) is a contrast enhancing
transformation that is used to modify the dynamic range of the image. In
particular, suppose that the input image has gray values in the range [A,B] and
one wishes to change the dynamic range to [a,b] using a linear mapping, then the
necessary transformation is given by the relation
```math
f(x) = (x-A) \\frac{b-a}{B-A} + a.
```
# Options
Various options for the parameters of the `adjust_histogram` and
`LinearStretching` type are described in more detail below.
## Choices for `img`
The function can handle a variety of input types. The returned
image depends on the input type.
For colored images, the input is converted to the
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the intensities of the Y
channel are stretched to the specified range. The modified Y channel is then
combined with the I and Q channels and the resulting image converted to the same
type as the input.
## Choices for `dst_minval` and `dst_maxval`
If destination value range `dst_minval` and `dst_maxval` are specified then intensities are
mapped to the range [`dst_minval`, `dst_maxval`]. The default values are 0 and 1.
## Choices for `src_minval` and `src_maxval`
The source value range `src_minval` and `src_maxval` specifies the intensity range of input
image. By default, the values are `extrema(img)` (finite). If custom values are provided,
the output intensity value will be clamped to range `[dst_minval, dst_maxval]` if it exceeds that.
## `no_clamp`
Setting `no_clamp=true` to disable the automatic clamp even if the output intensity value
exceeds the range `[dst_minval, dst_maxval]`. Note that a clamp is still applied for types
that has limited value range, for example, if the input eltype is `N0f8`, then the output will
be clamped to `[0.0N0f8, 1.0N0f8]` even if `no_clamp==true`.
# Example
```julia
using ImageContrastAdjustment, TestImages
img = testimage("mandril_gray")
# Stretches the contrast in `img` so that it spans the unit interval.
imgo = adjust_histogram(img, LinearStretching(dst_minval = 0, dst_maxval = 1))
```
For convenience, Constructing a `LinearStretching` object using `Pair` is also supported
```julia
# these two constructors are equivalent
LinearStretching(src_minval=0.1, src_maxval=0.9, dst_minval=0.05, dst_maxval=0.95)
LinearStretching((0.1, 0.9) => (0.05, 0.95))
# replace the part with `nothing` to use default values, e.g.,
# specify only destination value range
LinearStretching(nothing => (0.05, 0.95))
# specify only source value range and use default destination value range, i.e., (0, 1)
LinearStretching((0.1, 0.9) => nothing)
```
# References
1. W. Burger and M. J. Burge. *Digital Image Processing*. Texts in Computer Science, 2016. [doi:10.1007/978-1-4471-6684-9](https://doi.org/10.1007/978-1-4471-6684-9)
"""
@with_kw struct LinearStretching{T} <: AbstractHistogramAdjustmentAlgorithm
src_minval::T = nothing
src_maxval::T = nothing
dst_minval::T = 0.0f0
dst_maxval::T = 1.0f0
minval::T = nothing
maxval::T = nothing
no_clamp::Bool = false
function LinearStretching(src_minval::T1,
src_maxval::T2,
dst_minval::T3,
dst_maxval::T4,
minval::T5=nothing,
maxval::T6=nothing,
no_clamp::Bool=false) where {T1 <: Union{Nothing,Real,AbstractGray},
T2 <: Union{Nothing,Real,AbstractGray},
T3 <: Union{Nothing,Real,AbstractGray},
T4 <: Union{Nothing,Real,AbstractGray},
T5 <: Union{Nothing,Real,AbstractGray},
T6 <: Union{Nothing,Real,AbstractGray}}
# in order to deprecate old fields we have to introduce new fields if we still want to use @with_kw
# https://github.com/JuliaImages/ImageContrastAdjustment.jl/pull/28#discussion_r395751301
if !isnothing(minval)
dst_minval = minval
Base.depwarn("deprecated: use `dst_minval` for keyword `minval`", :LinearStretching)
end
if !isnothing(maxval)
dst_maxval = maxval
Base.depwarn("deprecated: use `dst_maxval` for keyword `maxval`", :LinearStretching)
end
dst_minval <= dst_maxval || throw(ArgumentError("dst_minval $dst_minval should be less than dst_maxval $dst_maxval"))
if !(isnothing(src_minval) || isnothing(src_maxval))
src_minval <= src_maxval || throw(ArgumentError("src_minval $src_minval should be less than src_maxval $src_maxval"))
end
T = promote_type(T1, T2, T3, T4, T5, T6)
new{T}(convert(T, src_minval), convert(T, src_maxval),
convert(T, dst_minval), convert(T, dst_maxval),
convert(T, dst_minval), convert(T, dst_maxval),
no_clamp)
end
end
function LinearStretching(rangemap::Pair{Tuple{T1, T2}, Tuple{T3, T4}}; no_clamp=false) where {T1, T2, T3, T4}
LinearStretching(rangemap.first..., rangemap.second..., nothing, nothing, no_clamp)
end
function LinearStretching(rangemap::Pair{Nothing, Tuple{T3, T4}}; no_clamp=false) where {T3, T4}
LinearStretching(nothing, nothing, rangemap.second..., nothing, nothing, no_clamp)
end
function LinearStretching(rangemap::Pair{Tuple{T1, T2}, Nothing}; no_clamp=false) where {T1, T2}
LinearStretching(src_minval=rangemap.first[1], src_maxval=rangemap.first[2], no_clamp=no_clamp)
end
function (f::LinearStretching)(out::GenericGrayImage, img::GenericGrayImage)
T = eltype(out)
FT = eltype(floattype(T))
img_min, img_max = minimum_finite(img), maximum_finite(img)
# explicit annotation is needed because the ?: line mixes three value types:
# Nothing, T, and typeof(f.src_minval)
src_minval::FT = isnothing(f.src_minval) ? img_min : f.src_minval
src_maxval::FT = isnothing(f.src_maxval) ? img_max : f.src_maxval
dst_minval::FT = f.dst_minval
dst_maxval::FT = f.dst_maxval
# the kernel operation `r * x - o` is equivalent to `(x-A) * ((b-a)/(B-A)) + a`
# precalculate these and make inner loop contains only multiplication and addition
# to get better performance
r = (dst_maxval - dst_minval) / (src_maxval - src_minval)
o = (src_minval*dst_maxval - src_maxval*dst_minval) / (src_maxval - src_minval)
if 1 β r && 0 β o
# when image intensity is already adjusted, there's no need to do it again
# it's a trivial but common case in practice
out === img || (out .= img)
return out
end
# In most cases, we don't need to clamp the output
# this is only used when user specifies custom parameters
out_minval = r * img_min - o
out_maxval = r * img_max - o
do_clamp = f.no_clamp ? false : (out_minval < dst_minval) || (out_maxval > dst_maxval)
# re-adjust clamp option to avoid ArgumentError
if !do_clamp && (out_minval < typemin(T) || out_maxval > typemax(T))
do_clamp = true
dst_minval = convert(typeof(dst_minval), typemin(eltype(T)))
dst_maxval = convert(typeof(dst_maxval), typemax(eltype(T)))
else
do_clamp = do_clamp || out_minval < typemin(T) || out_maxval > typemax(T)
dst_minval = max(dst_minval, convert(typeof(dst_minval), typemin(eltype(T))))
dst_maxval = min(dst_maxval, convert(typeof(dst_maxval), typemax(eltype(T))))
end
# although slightly faster, clamp before linear stretching has some rounding issue, e.g,
# it's likely to get results like -9.313226f-10, which can't be directly converted to N0f8
# thus the line below is commented out and not used
# do_clamp && (img = clamp.(img, src_minval, src_maxval))
# tweak the performance of FixedPoint by fusing operations into one broadcast
# for Float32 the fallback implementation is faster
if eltype(T) <: FixedPoint
# ?: is faster than if-else
@. out = do_clamp ? clamp(r * img - o, dst_minval, dst_maxval) : r * img - o
return out
end
# fallback implementation
@inbounds @simd for p in eachindex(img)
val = img[p]
if isnan(val)
out[p] = val
else
newval = r * val - o
do_clamp && (newval = clamp(newval, dst_minval, dst_maxval))
out[p] = T <: Integer ? round(Int, newval) : newval
end
end
out
end
function (f::LinearStretching)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(out)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
adjust_histogram!(view(yiq_view,1,:,:), f)
out .= convert.(T, yiq)
end
(f::LinearStretching)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 6168 |
"""
```
Matching <: AbstractHistogramAdjustmentAlgorithm
Matching(targetimg; nbins = 256, edges = nothing)
adjust_histogram([T,] img, f::Matching)
adjust_histogram!([out,] img, f::Matching)
```
Returns a histogram matched image with a granularity of `nbins` number of bins.
The first argument `img` is the image to be matched, whereas the argument
`targetimg` in `Matching()` is the image having the desired histogram to be
matched to.
# Details
The purpose of histogram matching is to transform the intensities in a source
image so that the intensities distribute according to the histogram of a
specified target image. If one interprets histograms as piecewise-constant
models of probability density functions (see [`build_histogram`](@ref
build_histogram(::AbstractArray, ::Integer, ::Union{Real,AbstractGray},
::Union{Real,AbstractGray}))), then the histogram matching task can be modelled
as the problem of transforming one probability distribution into another [1].
It turns out that the solution to this transformation problem involves the
cumulative and inverse cumulative distribution functions of the source and
target probability density functions.
In particular, let the random variables ``x \\thicksim p_{x} `` and ``z
\\thicksim p_{z}`` represent an intensity in the source and target image
respectively, and let
```math
S(x) = \\int_0^{x}p_{x}(w)\\mathrm{d} w \\quad \\text{and} \\quad
T(z) = \\int_0^{z}p_{z}(w)\\mathrm{d} w
```
represent their concomitant cumulative distribution functions. Then the
sought-after mapping ``Q(\\cdot)`` such that ``Q(x) \\thicksim p_{z} `` is given
by
```math
Q(x) = T^{-1}\\left( S(x) \\right),
```
where ``T^{-1}(y) = \\operatorname{min} \\{ x \\in \\mathbb{R} : y \\leq T(x)
\\}`` is the inverse cumulative distribution function of ``T(x)``.
The mapping suggests that one can conceptualize histogram matching as performing
histogram equalization on the source and target image and relating the two
equalized histograms. Refer to [`adjust_histogram`](@ref
adjust_histogram(::Equalization, ::AbstractArray, ::Integer,
::Union{Real,AbstractGray}, ::Union{Real,AbstractGray})) for more details on
histogram equalization.
# Options
Various options for the parameters of the `adjust_histogram` function and `Matching`
type are described in more detail below.
## Choices for `img` and `targetimg`
The `adjust_histogram(img, Matching())` function can handle a variety of input
types. The type of the returned image matches the input type.
For colored images, the inputs are converted to
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the distributions of the Y
channels are matched. The modified Y channel is then combined with the I and Q
channels and the resulting image converted to the same type as the input.
## Choices for `nbins`
You can specify the total number of bins in the histogram. If you do not
specify the number of bins then a default value of 256 bins is utilized.
## Choices for `edges`
If you do not designate the number of bins, then you have the option to directly
stipulate how the intervals will be divided by specifying a `AbstractRange`
type.
# Example
```julia
using Images, TestImages, ImageView
img_source = testimage("mandril_gray")
img_target = adjust_histogram(img_source, GammaCorrection(gamma = 0.5))
img_transformed = adjust_histogram(img_source, Matching(targetimg = img_target))
#=
A visual inspection confirms that img_transformed resembles img_target
much more closely than img_source.
=#
imshow(img_source)
imshow(img_target)
imshow(img_transformed)
```
# References
1. W. Burger and M. J. Burge. *Digital Image Processing*. Texts in Computer Science, 2016. [doi:10.1007/978-1-4471-6684-9](https://doi.org/10.1007/978-1-4471-6684-9)
"""
@with_kw struct Matching{Tβ <: AbstractArray,
Tβ <: Union{Integer, Nothing},
Tβ <: Union{AbstractRange, Nothing}} <: AbstractHistogramAdjustmentAlgorithm
targetimg::Tβ
nbins::Tβ = 256
edges::Tβ = nothing
end
function (f::Matching)(out::GenericGrayImage, img::GenericGrayImage)
#TODO Throw error/warning if user specifies both edges and nbins simultaneously.
out .= img
edges, pdf, target_pdf = isnothing(f.edges) ? construct_pdfs(out, f.targetimg, f.nbins) : construct_pdfs(out, f.targetimg, f.edges)
match_pdf!(out, edges, pdf, target_pdf)
return out
end
function (f::Matching)(out::AbstractArray{<:Color3}, img::AbstractArray{<:Color3})
T = eltype(img)
yiq = convert.(YIQ, img)
yiq_view = channelview(yiq)
targetimg = f.targetimg
yiq_targetimg = convert.(YIQ, targetimg)
yiq_targetimg_view = channelview(yiq_targetimg)
adjust_histogram!(view(yiq_view,1,:,:), Matching(targetimg = view(yiq_targetimg_view,1,:,:),
edges = f.edges,
nbins = f.nbins))
out .= convert.(T, yiq)
end
(f::Matching)(out::GenericGrayImage, img::AbstractArray{<:Color3}) =
f(out, of_eltype(Gray, img))
function match_pdf!(img::GenericGrayImage, edges::AbstractArray, pdf::AbstractArray, target_pdf::AbstractArray)
cdf = parent(cumsum(pdf))
target_cdf = parent(cumsum(target_pdf))
# Precompute the inverse cummulative distribution function of target_cdf.
lookup_table = lookup_icdf(cdf, target_cdf)
# Transform the intensities in img so that they are distributed according
# to the distribution of the target_histogram.
T = eltype(img)
step_size = step(edges)
first_edge = first(edges)
last_edge = last(edges)
map!(img, img) do val
val = gray(val)
if isnan(val)
return val
else
if val >= last_edge
newval = edges[last(lookup_table)-1] + step_size
elseif val < first_edge
newval = edges[first(lookup_table)]
else
index = Int(Base.div(val-first_edge,step_size)) + 1
newval = edges[lookup_table[index]]
end
return T <: Integer ? ceil(newval) : newval
end
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 7028 |
"""
```
MidwayEqualization <: AbstractHistogramAdjustmentAlgorithm
MidwayEqualization(; nbins = 256, minval = 0, maxval = 1)
adjust_histogram([T,] img_sequence, f::MidwayEqualization(nbins = 256, edges = nothing))
adjust_histogram!([out_sequence,] img_sequence, f::MidwayEqualization(nbins = 256, edges = nothing))
```
Gives a pair of images the same histogram whilst maintaining as
much as possible their previous grey level dynamics.
# Details
The purpose of midway histogram equalization is to transform the intensities in
a pair of images so that the intensities distribute according to a common
"midway" distribution. The histogram representing the common distribution is
chosen so that the original gray level dynamics of the images are preserved as
much as possible. If one interprets histograms as piecewise-constant models of
probability density functions (see [`build_histogram`](@ref
build_histogram(::AbstractArray, ::Integer, ::Union{Real,AbstractGray},
::Union{Real,AbstractGray}))), then the midway histogram equalization task can
be modeled as the problem of transforming one probability distribution into
another (see [`adjust_histogram`](@ref adjust_histogram(::Matching,::AbstractArray, ::AbstractArray, ::Integer))).
It turns out that the solution to this transformation problem involves the
cumulative and inverse cumulative distribution functions of the source and
"midway" probability density functions. In particular, let the random variables ``X_i \\thicksim p_{x_i} \\; (i = 1,2)``,
and ``Z \\thicksim p_{z}`` represent an intensity in the first, second and
"midway" image respectively, and let
```math
S_{X_i}(x) = \\int_0^{x}p_{x_i}(w)\\mathrm{d} w \\; \\quad \\text{and} \\quad
T_{Z}(x) = \\frac{2}{\\frac{1}{S_{X_1}(x)} + \\frac{1}{S_{X_2}(x)}}
```
represent the cumulative distribution functions of the two input images, and
their *harmonic mean*, respectively. Then the sought-after mapping
``Q_{X_i}(\\cdot)`` ``(i = 1,2)`` such that ``Q_{X_i}(x) \\thicksim p_{z} `` is
given by
```math
Q_{X_i}(x) = T_{Z}^{-1}\\left( S_{X_i}(x) \\right),
```
where ``T_{Z}^{-1}(y) = \\operatorname{min} \\{ x \\in \\mathbb{R} : y \\leq
T_{Z}(x) \\}`` is the inverse cumulative distribution function of ``T_{Z}(x)``.
# Options
Various options for the parameters of the `adjust_histogram` function and
`MidwayEqualization` types are described in more detail below.
## Choices for `img_sequence`
The function `adjust_histogram` expects a length-2 `Vector` of images (the pair
of images) and returns a length-2 `Vector` of modified images. The function
can handle a variety of input types. The type of the returned image matches the
input type.
For colored images, the inputs are converted to
[YIQ](https://en.wikipedia.org/wiki/YIQ) type and the distributions of the Y
channels are transformed according to a "midway" distribution. The modified Y
channel is then combined with the I and Q channels and the resulting image
converted to the same type as the input.
## Choices for `nbins`
You can specify the total number of bins in the histogram. If you do not
specify the number of bins then a default value of 256 bins is utilized.
## Choices for `edges`
If you do not designate the number of bins, then you have the option to directly
stipulate how the intervals will be divided by specifying a `AbstractRange`
type.
# Example
```julia
using Images, TestImages, ImageView, ImageContrastAdjustment
img = testimage("mandril_gray")
# The same image but with different intensitiy distributions
img1 = adjust_histogram(img, GammaCorrection(gamma = 2))
img2 = adjust_histogram(img, GammaCorrection(gamma = 1.2))
# Midway histogram equalization will transform these two images so that their
# intensity distributions are almost identical.
img_sequence = adjust_histogram([img1, img2], MidwayEqualization(nbins = 256))
img1o = first(img_sequence)
img2o = last(img_sequence)
```
# References
1. T. Guillemot and J. Delon, β*Implementation of the Midway Image Equalization*,β Image Processing On Line, vol. 5, pp. 114β129, Jun. 2016. [doi:10.5201/ipol.2016.140](https://doi.org/10.5201/ipol.2016.140)
"""
@with_kw struct MidwayEqualization{Tβ <: Union{Integer, Nothing},
Tβ <: Union{AbstractRange, Nothing}} <: AbstractHistogramAdjustmentAlgorithm
nbins::Tβ = 256
edges::Tβ = nothing
end
function (f::MidwayEqualization)(out_sequence::Vector{<:GenericGrayImage}, in_sequence::Vector{<:GenericGrayImage})
length(out_sequence) == 2 || error("Please supply a length-2 output vector to store the pair of images.")
length(in_sequence) == 2 || error("Please supply a length-2 input vector storing the image pair.")
out1 = first(out_sequence)
out2 = last(out_sequence)
in1 = first(in_sequence)
in2 = last(in_sequence)
out1 .= in1
out2 .= in2
edges, pdf1, pdf2 = isnothing(f.edges) ? construct_pdfs(out1, out2, f.nbins) : construct_pdfs(out1, out2, f.edges)
midway_pdf = zero(pdf1)
cdf1 = cumsum(pdf1)
cdf2 = cumsum(pdf2)
# midway_cdf is the harmonic mean between cdf1 and cdf2.
midway_cdf = similar(cdf1)
for i in eachindex(cdf1)
if cdf1[i] == 0 || cdf2[i] == 0
midway_cdf[i] = 0
else
midway_cdf[i] = 2 / (1/cdf1[i] + 1/cdf2[i])
end
end
cdf2pdf!(midway_pdf, midway_cdf)
indexβ = firstindex(out_sequence)
indexβ = lastindex(out_sequence)
out_sequence[indexβ] = match_pdf!(out1, edges, pdf1, midway_pdf)
out_sequence[indexβ] = match_pdf!(out2, edges, pdf2, midway_pdf)
end
function (f::MidwayEqualization)(out_sequence::Vector{<:AbstractArray{<:Color3}}, in_sequence::Vector{<:AbstractArray{<:Color3}})
length(out_sequence) == 2 || error("Please supply a length-2 output vector to store the pair of images.")
length(in_sequence) == 2 || error("Please supply a length-2 input vector storing the image pair.")
out1 = first(out_sequence)
out2 = last(out_sequence)
in1 = first(in_sequence)
in2 = last(in_sequence)
Tβ = eltype(out1)
Tβ = eltype(out2)
out1 .= in1
out2 .= in2
in_yiq1 = convert.(YIQ, in1)
in_yiq1_view = channelview(in_yiq1)
in_yiq2 = convert.(YIQ, in2)
in_yiq2_view = channelview(in_yiq2)
in_yiq_view_sequence = [view(in_yiq1_view,1,:,:), view(in_yiq2_view,1,:,:)]
out_yiq1 = convert.(YIQ, out1)
out_yiq1_view = channelview(out_yiq1)
out_yiq2 = convert.(YIQ, out2)
out_yiq2_view = channelview(out_yiq2)
out_yiq_view_sequence = [view(out_yiq1_view,1,:,:), view(out_yiq2_view,1,:,:)]
adjust_histogram!(out_yiq_view_sequence, in_yiq_view_sequence, f)
indexβ = firstindex(out_sequence)
indexβ = lastindex(out_sequence)
out_sequence[indexβ] .= convert.(Tβ, out_yiq1)
out_sequence[indexβ] .= convert.(Tβ, out_yiq2)
end
(f::MidwayEqualization)(out_sequence::Vector{<:GenericGrayImage}, img_sequence::Vector{<:AbstractArray{<:Color3}}) =
f(out_sequence, map(img -> of_eltype(Gray, img), img_sequence))
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 5976 | @testset "Contrast Limited Adaptive Histogram Equalisation" begin
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
# Create a graylevel ramp
img = Gray{Float32}.([i/255.0 for i = 64:127, j = 1:64])
img = T.(img)
for rblocks in 2:4
for cblocks in 2:4
# There should be no change in intensity along a row.
algo = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1,
rblocks = rblocks, cblocks = cblocks,
clip = 0.0)
imgeq = adjust_histogram(img, algo)
target = repeat(imgeq[:,1], inner=(1, 1), outer=(1, 64))
@test all(imgeq .β target)
# There should be no change in intensity along a column
imgeq = adjust_histogram(transpose(img), algo)
target = transpose(repeat(imgeq[1, :], inner=(1, 1), outer=(1, 64)))
@test all(imgeq .β target)
end
end
end
#=
When rblocks and cblocks equal one and clip is zero, then the method boils
down to canonical histogram equalization. Hence, we repeat the same tests
here that we have in histgoram_equalization.jl.
=#
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
#=
Create an image that spans a narrow graylevel range. Then quantize
the 256 bins down to 32 and determine how many bins have non-zero
counts.
=#
img = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = T.(img)
_, counts_before = build_histogram(img, 32, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Equalize the image histogram. Then quantize the 256 bins down to 32
and verify that all 32 bins have non-zero counts. This will confirm
that the dynamic range of the original image has been increased.
=#
algo = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1,
rblocks = 1, cblocks = 1, clip = 0.0)
imgeq = adjust_histogram(img, algo)
edges, counts_after = build_histogram(imgeq, 32, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 32
end
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
#=
Create a color image that spans a narrow graylevel range. Then
quantize the 256 bins down to 32 and determine how many bins have
non-zero counts.
=#
imgg = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = colorview(RGB,imgg,imgg,imgg)
img = T.(img)
_, counts_before = build_histogram(img, 32, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Equalize the histogram. Then quantize the 256 bins down to 32 and
verify that all 32 bins have non-zero counts. This will confirm that
the dynamic range of the original image has been increased.
=#
algo = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1,
rblocks = 1, cblocks = 1, clip = 0.0)
imgeq = adjust_histogram(img, algo)
edges, counts_after = build_histogram(imgeq, 32, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 32
end
# Verify that the minimum and maximum values of the equalised image match the
# specified minimum and maximum values, i.e. that the intensities of the equalised
# image are in the interval [minvalue, maxvalue].
algo = AdaptiveEqualization(nbins = 256, minval = 64, maxval = 128,
rblocks = 1, cblocks = 1, clip = 0.0)
imgeq = adjust_histogram(hcat(collect(0:1:255), collect(0:1:255)), algo)
@test all(imgeq[1:65, :] .== 64)
@test all(imgeq[128+1:end, :] .== 128)
algo = AdaptiveEqualization(nbins = 256, minval = 64/255, maxval = 128/255,
rblocks = 1, cblocks = 1, clip = 0.0)
imgeq = adjust_histogram(hcat(collect(0:1/255:1), collect(0:1/255:1)), algo)
@test all(imgeq[1:65, :] .== 64/255)
@test all(imgeq[128+1:end, :] .== 128/255)
# Verify that increasing the clip value reduces the strength of the contrast
# adjustment
img = Gray{Float32}.(testimage("mandril_gray"))
# As we increase the clip factor the contrast adjustment is diminished and we
# should approach the original image.
algoβ = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 0.0)
algoβ = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 0.25)
algoβ = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 0.5)
algoβ = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 0.75)
algoβ
= AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 1.0)
algos = [algoβ, algoβ, algoβ, algoβ, algoβ
]
differences = [norm(adjust_histogram(img, algos[i]) .- img) for i = 1:5]
@test all([differences[i] > differences[i+1] for i = 1:4])
@test last(differences) < 2.8
# Verify that auotmatically converting a color image and storing the histogram
# adjusted result "inplace" works correctly.
algo = AdaptiveEqualization(nbins = 256, minval = 0, maxval = 1, rblocks = 1, cblocks = 1, clip = 0)
imggβ = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
imggβ = copy(imggβ)
img = colorview(RGB,imggβ,imggβ,imggβ)
imgeqβ = adjust_histogram!(imggβ,img, algo)
imgeqβ = adjust_histogram(imggβ, algo)
@test norm(imgeqβ .- imgeqβ) β 0.0
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 3805 | using ImageCore
using ImageContrastAdjustment
using Test
@testset "Contrast Stretching" begin
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
#=
Create an image that spans a narrow graylevel range. Then
quantize the 256 bins down to 16 and determine how many bins have
non-zero counts.
=#
img = T.([i/255.0 for i = 64:128, j = 1:10])
_, counts_before = build_histogram(img,16, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Stretch the histogram. Then quantize the 256 bins down to 16 and
verify that all 16 bins have non-zero counts. This will confirm that
the dynamic range of the original image has been increased.
=#
ret = adjust_histogram(img, ContrastStretching(t = 0.4, slope = 17))
edges, counts_after = build_histogram(ret,16, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 16
@test eltype(img) == eltype(ret)
if eltype(T) <: FixedPoint
@test ret β T.(adjust_histogram(float.(img), ContrastStretching(t = 0.4, slope = 17)))
end
#=
Verify that the function can cope with a NaN value.
=#
if T <: Gray{Float32} || T <: Gray{Float64}
img[1] = NaN
ret = adjust_histogram(img, ContrastStretching(t = 0.4, slope = 17))
edges, counts_after = build_histogram(ret,16, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 16
@test eltype(img) == eltype(ret)
end
#=
Verify that when the slope is set to a very large value the contrast
streching behaves like a thresholding function.
=#
ret = adjust_histogram(img, ContrastStretching(t = 0.37, slope = 1000))
edges, counts_after = build_histogram(ret,16, minval = 0, maxval = 1)
@test sum(counts_after .!= 0) == 2
end
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
#=
Create a color image that spans a narrow graylevel range. Then
quantize the 256 bins down to 16 and determine how many bins have
non-zero counts.
=#
imgg = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = colorview(RGB,imgg,imgg,imgg)
img = T.(img)
_, counts_before = build_histogram(img, 16, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Stretch the histogram. Then quantize the 256 bins down to 16 and
verify that all 16 bins have non-zero counts. This will confirm that
the dynamic range of the original image has been increased.
=#
ret = adjust_histogram(img, ContrastStretching(t = 0.4, slope = 17))
edges, counts_after = build_histogram(ret, 16, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 16
@test eltype(img) == eltype(ret)
#=
Verify that when the slope is set to a very large value the contrast
streching behaves like a thresholding function.
=#
ret = adjust_histogram(img, ContrastStretching(t = 0.37, slope = 1000))
edges, counts_after = build_histogram(ret, 16, minval = 0, maxval = 1)
@test sum(counts_after .!= 0) == 2
end
# Issue #58
img = Gray{N0f8}.([1 0; 0 1])
@test adjust_histogram(img, ContrastStretching(t=0.3, slope=0.4)) ==
Gray{N0f8}.(adjust_histogram(float.(img), ContrastStretching(t=0.3, slope=0.4, Ο΅=eps(N0f8))))
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 1033 | @testset "Core" begin
A = [NaN 1 2 3;
NaN 6 5 4]
@test ImageContrastAdjustment.minimum_finite(A) == 1
@test ImageContrastAdjustment.maximum_finite(A) == 6
A = rand(10:20, 5, 5)
@test ImageContrastAdjustment.minimum_finite(A) == minimum(A)
@test ImageContrastAdjustment.maximum_finite(A) == maximum(A)
A = reinterpret(N0f8, rand(0x00:0xff, 5, 5))
@test ImageContrastAdjustment.minimum_finite(A) == minimum(A)
@test ImageContrastAdjustment.maximum_finite(A) == maximum(A)
A = rand(Float32,3,5,5)
img = colorview(RGB, A)
dc = ImageContrastAdjustment.minimum_finite(img)-RGB{Float32}(minimum(A, dims=(2,3))...)
@test norm(dc) < 1e-6
dc = ImageContrastAdjustment.maximum_finite(img)-RGB{Float32}(maximum(A, dims=(2,3))...)
@test norm(dc) < 1e-6
@test ImageContrastAdjustment.minimum_finite(x->x^2,[NaN,10,2]) == 4
@test ImageContrastAdjustment.maximum_finite(x->x^2,[NaN,10,2]) == 100
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 1954 | @testset "Gamma Correction" begin
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
img = fill(oneunit(T),10,10)
ret = adjust_histogram(img, GammaCorrection(gamma = 1))
@test img == ret
@test eltype(ret) == eltype(img)
imgp = padarray(img, Fill(zero(T), (2,2)))
retp = adjust_histogram(imgp, GammaCorrection(gamma = 1))
@test imgp == retp
@test eltype(retp) == eltype(imgp)
end
# ERROR: MethodError: no method matching ^(::AGray{Normed{UInt8,8}}, ::Float64)
img = fill(oneunit(AGray{N0f8}),10,10)
@test_broken adjust_histogram(img, GammaCorrection(gamma = 0.5))
# ERROR: MethodError: no method matching ^(::AGray{Normed{UInt8,8}}, ::Float64)
img = fill(oneunit(ARGB{N0f8}),10,10)
@test_broken adjust_histogram(img, GammaCorrection(gamma = 0.5))
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float64})
img = fill(oneunit(T),10,10)
ret = adjust_histogram(img, GammaCorrection(gamma = 1))
@test all(map((i, r) -> isapprox(i, r), img, ret))
@test eltype(ret) == eltype(img)
imgp = padarray(img, Fill(zero(T), (2,2)))
retp = adjust_histogram(imgp, GammaCorrection(gamma = 1))
@test all(map((i, r) -> isapprox(i, r), imgp, retp))
@test eltype(retp) == eltype(imgp)
end
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
img = T.(collect(reshape(1/100:1/100:1, 10, 10)))
for i = 0.5:0.27:2
ret = adjust_histogram(img, GammaCorrection(gamma = i))
@test ret == T.(img .^ i)
end
end
# Since the function returns the same output type as the input type
# there is an implicit rounding operation when dealing with integer values.
img = reshape(1:1:100, 10, 10)
for i = 0.5:0.28:2
ret = adjust_histogram(img, GammaCorrection(gamma = i))
@test ret == round.(Int, img .^ i)
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 5669 | @testset "Histogram Construction" begin
# Consider an image where each intensity occurs only once and vary the number
# of bins used in the histogram in powers of two. With the exception of the
# first bin (with index 0), all other bins should have equal counts.
expected_counts = [2^i for i = 0:7]
bins = [2^i for i = 8:-1:1]
for i = 1:length(bins)
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
edges, counts = build_histogram(T.(collect(0:1/255:1)),bins[i], minval = 0, maxval = 1)
@test length(edges) == length(counts) - 1
@test all(counts[1:end] .== expected_counts[i]) && counts[0] == 0
@test axes(counts) == (0:length(edges),)
end
# Verify that the function can also take a color image as an input.
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
imgg = collect(0:1/255:1)
img = colorview(RGB,imgg,imgg,imgg)
edges, counts = build_histogram(T.(img),bins[i], minval = 0, maxval = 1)
@test length(edges) == length(counts) - 1
@test all(counts[1:end] .== expected_counts[i]) && counts[0] == 0
@test axes(counts) == (0:length(edges),)
end
# Consider also integer-valued images.
edges, counts = build_histogram(0:1:255,bins[i], minval = 0, maxval = 255)
@test length(edges) == length(counts) - 1
@test all(counts[1:end] .== expected_counts[i]) && counts[0] == 0
@test axes(counts) == (0:length(edges),)
end
# Consider truncated intervals.
for T in (Int, Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
if T == Int
edges, counts = build_histogram(0:1:255,4, minval = 128, maxval = 192)
@test length(edges) == length(counts) - 1
@test collect(counts) == [128; 16; 16; 16; 80]
@test axes(counts) == (0:length(edges),)
else
img = collect(0:1/255:1)
edges, counts = build_histogram(T.(img),4, minval = 128/255, maxval = 192/255)
@test length(edges) == length(counts) - 1
@test collect(counts) == [128; 16; 16; 16; 80]
@test axes(counts) == (0:length(edges),)
end
if T == Int
edges, counts = build_histogram(0:1:255,4, minval = 120,maxval = 140)
@test length(edges) == length(counts) - 1
@test collect(counts) == [120, 5, 5, 5, 121]
@test axes(counts) == (0:length(edges),)
else
img = collect(0:1/255:1)
edges, counts = build_histogram(T.(img),4,minval = 120/255, maxval = 140/255)
@test length(edges) == length(counts) - 1
@test axes(counts) == (0:length(edges),)
# Due to roundoff errors the bins are not the same as in the
# integer case above.
@test all([120, 4, 4, 4, 121] .<= collect(counts) .<= [120, 6, 6, 6, 121])
@test sum(counts) == length(img)
end
end
# Consider the case where the minimum and maximum values are not the start and
# end points of the dynamic range. Because of numerical precision, the
# results will be slightly different depending on the Image type.
for T in (Int, Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
if T == Int
edges, counts = build_histogram(200:1:240,4, minval = 200, maxval = 240)
@test length(edges) == length(counts) - 1
@test collect(counts) == [0, 10, 10, 10, 11]
@test axes(counts) == (0:length(edges),)
edges, counts = build_histogram(200:1:240,4)
@test length(edges) == length(counts) - 1
@test collect(counts) == [0, 10, 10, 10, 11]
@test axes(counts) == (0:length(edges),)
else
img = 200/255:1/255:240/255
edges, counts = build_histogram(T.(img),4, minval = 200/255, maxval = 240/255)
@test length(edges) == length(counts) - 1
@test all([0, 9, 9, 9, 11] .<= collect(counts) .<= [0, 11, 11, 11, 11])
@test sum(counts) == length(img)
@test axes(counts) == (0:length(edges),)
edges, counts = build_histogram(T.(img),4)
@test axes(counts) == (0:length(edges),)
@test length(edges) == length(counts) - 1
@test all([0, 9, 9, 9, 10] .<= collect(counts) .<= [0, 11, 11, 11, 11])
@test sum(counts) == length(img)
end
end
# Consider the effect of NaN on the histogram counts.
for j = 2:255
for T in (Gray{Float32}, Gray{Float64})
img = collect(0:1/255:1)
img[j] = NaN
edges, counts = build_histogram(T.(img),256, minval = 0, maxval = 1)
target = [1 for k = 1:length(counts)-1]
target[j] -= 1
@test length(edges) == length(counts) - 1
@test counts[1:end] == target && counts[0] == 0
@test axes(counts) == (0:length(edges),)
# Verify that the minimum value (0) and maximum value (1) is
# determined automatically even when there are NaN values
# between these two bounds.
img = collect(0:1/255:1)
img[j] = NaN
edges, counts = build_histogram(T.(img),256)
target = [1 for k = 1:length(counts)-1]
target[j] -= 1 # Because of the NaN value in the image
@test length(edges) == length(counts) - 1
@test counts[1:end] == target && counts[0] == 0
@test axes(counts) == (0:length(edges),)
end
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 2730 | @testset "Histogram Equalisation" begin
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
#=
Create an image that spans a narrow graylevel range. Then quantize
the 256 bins down to 32 and determine how many bins have non-zero
counts.
=#
img = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = T.(img)
_, counts_before = build_histogram(img, 32, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Equalize the image histogram. Then quantize the 256 bins down to 32
and verify that all 32 bins have non-zero counts. This will confirm
that the dynamic range of the original image has been increased.
=#
imgeq = adjust_histogram(img, Equalization(nbins = 256, minval = 0, maxval = 1))
edges, counts_after = build_histogram(imgeq, 32, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 32
end
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
#=
Create a color image that spans a narrow graylevel range. Then
quantize the 256 bins down to 32 and determine how many bins have
non-zero counts.
=#
imgg = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = colorview(RGB,imgg,imgg,imgg)
img = T.(img)
_, counts_before = build_histogram(img, 32, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Equalize the histogram. Then quantize the 256 bins down to 32 and
verify that all 32 bins have non-zero counts. This will confirm that
the dynamic range of the original image has been increased.
=#
imgeq = adjust_histogram(img,Equalization(nbins = 256, minval = 0, maxval = 1))
edges, counts_after = build_histogram(imgeq, 32, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 32
end
# Verify that the minimum and maximum values of the equalised image match the
# specified minimum and maximum values, i.e. that the intensities of the equalised
# image are in the interval [minvalue, maxvalue].
imgeq = adjust_histogram(collect(0:1:255), Equalization(nbins = 256, minval = 64, maxval = 128))
@test all(imgeq[1:65] .== 64)
@test all(imgeq[128+1:end] .== 128)
imgeq = adjust_histogram(collect(0:1/255:1), Equalization(nbins = 256, minval = 64/255, maxval = 128/255))
@test all(imgeq[1:65] .== 64/255)
@test all(imgeq[128+1:end] .== 128/255)
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 4989 | @testset "Histogram Matching" begin
# Construct a target image by adjusting the gamma of a standard
# test image. Then transform the distribution of the test image
# so that the intensity distribution matches the gamma-adjusted target
# image. With Float32 and Float64 types the distributions match
# exactly. For N0f8 and N0f16 types there are some round-off errors and
# some of the bins of the transformed distributions are shifted
# by one bin when compared to the target distribution. Hence, we
# omit N0f8 and N0f16 from this particular test.
for T in (Gray{Float32}, Gray{Float64})
img = T.(testimage("mandril_gray"))
imgo = T.(adjust_histogram(img, GammaCorrection(gamma = 0.5)))
result = adjust_histogram(img, Matching(targetimg = imgo, nbins = 256))
edges_target, counts_target = build_histogram(imgo, 256, minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
# Check that we get the same result if we explicitly pass the
# bin edges.
edges, _ = build_histogram(img,256, minval = 0, maxval = 1)
result = adjust_histogram(img, Matching(targetimg = imgo, edges = edges))
edges_target, counts_target = build_histogram(imgo, 256,minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
end
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float16}, Gray{Float32})
img = T.([i < 64 ? i/255 : 0 for i = 0:255])
imgo = T.([i > 64 && i < 128 ? i/255 : 0 for i = 0:255])
result = adjust_histogram(img, Matching(targetimg = imgo, nbins = 256))
edges_source, counts_source = build_histogram(img, 256, minval = 0, maxval = 1)
edges_target, counts_target = build_histogram(imgo,256, minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result,256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
end
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
imgg = Gray{N0f8}.([i < 64 ? i/255 : 0 for i = 0:255])
img = T.(imgg,imgg,imgg)
imggo = Gray{N0f8}.([i > 64 && i < 128 ? i/255 : 0 for i = 0:255])
imgo = T.(imggo,imggo,imggo)
result = adjust_histogram(img, Matching(targetimg = imgo, nbins = 256))
edges_source, counts_source = build_histogram(img, 256, minval = 0, maxval = 1)
edges_target, counts_target = build_histogram(imgo, 256, minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
# Check that we get the same result if we explicitly pass the
# bin edges.
edges = 0.0:0.0019454658031463623:0.4960937798023224
result = adjust_histogram(img, Matching(targetimg = imgo, edges = edges))
edges_source, counts_source = build_histogram(img, 256, minval = 0, maxval = 1)
edges_target, counts_target = build_histogram(imgo, 256, minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
end
# Modification of Tim's example:
# https://github.com/JuliaImages/Images.jl/pull/752
img = Gray.([0.4 0.45; 0.5 0.55])
imgo = Gray.([0.1 0.3; 0.7 0.9])
result = adjust_histogram(img, Matching(targetimg = imgo))
edges_target, counts_target = build_histogram(imgo, 256, minval = 0, maxval = 1)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 1)
@test all(counts_target .== counts_result)
# Verify idempotency.
img = collect(0:1:255)
result = adjust_histogram(img, Matching(targetimg = img, nbins = 256))
edges_target, counts_target = build_histogram(img, 256, minval = 0, maxval = 255)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 255)
@test all(counts_target .== counts_result)
# Verify that the algorithm works on integers.
img = ([i <= 64 ? i : 0 for i = 0:255])
imgo = ([i > 255-64 && i <= 255 ? i : 0 for i = 0:255])
result = adjust_histogram(img, Matching(targetimg = imgo, nbins = 256))
edges_source, counts_source = build_histogram(img, 256, minval = 0, maxval = 255)
edges_target, counts_target = build_histogram(imgo, 256, minval = 0, maxval = 255)
edges_result, counts_result = build_histogram(result, 256, minval = 0, maxval = 255)
@test all(counts_target .== counts_result)
# Verify that the algorithm can cope with NaN.
img_nan = Float64.(collect(0:1/255:1))
img_nan[1:128] .= NaN
reference_nan = adjust_histogram(img_nan, Matching(targetimg = img_nan))
@test all(isapprox.(img_nan[129:end], reference_nan[129:end]; atol = 1e-1))
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 3169 | @testset "Midway Histogram Equalization" begin
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
# Define two images consisting of luminance gradients with different
# (non-overlapping) intensities. The "midway" equalization should produce
# two new images for which the cummulative distribution function of the
# image intensities is identical.
img1 = T.(zeros(256,10))
for i = 32:96
for j = 1:10
img1[i,j] = i/255.0
end
end
img2 = T.(zeros(256,10))
for i = 128:192
for j = 1:10
img2[i,j] = i/255.0
end
end
img1o, img2o = adjust_histogram([img1, img2], MidwayEqualization(nbins = 256))
edges1, counts1 = build_histogram(img1o, 256, minval = 0, maxval = 1)
edges2, counts2 = build_histogram(img2o, 256, minval = 0, maxval = 1)
@test sum(cumsum(counts2) - cumsum(counts1)) == 0
edges, _ = build_histogram(img1, 256, minval = 0, maxval = 1)
img1o, img2o = adjust_histogram([img1, img2], MidwayEqualization(edges = edges))
edges1, counts1 = build_histogram(img1o, 256, minval = 0, maxval = 1)
edges2, counts2 = build_histogram(img2o, 256, minval = 0, maxval = 1)
@test sum(cumsum(counts2) - cumsum(counts1)) == 0
end
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
imgg1 = zeros(Gray{Float32},256,10)
for i = 32:96
for j = 1:10
imgg1[i,j] = i/255.0
end
end
img1 = colorview(RGB,imgg1,imgg1,imgg1)
img1 = T.(img1)
imgg2 = zeros(Gray{Float32},256,10)
for i = 128:192
for j = 1:10
imgg2[i,j] = i/255.0
end
end
img2 = colorview(RGB,imgg2,imgg2,imgg2)
img2 = T.(img2)
img1o, img2o = ImageContrastAdjustment.adjust_histogram([img1, img2], ImageContrastAdjustment.MidwayEqualization(nbins = 256))
edges1, counts1 = ImageContrastAdjustment.build_histogram(img1o, 256, minval = 0, maxval = 1)
edges2, counts2 = ImageContrastAdjustment.build_histogram(img2o, 256, minval = 0, maxval = 1)
# The algorithm equalizes the Y channels from the YIQ decomposition of the RGB images and then
# constructs new RGB images by combining the equalised Y channels with the IQ components.
# The build_histogram function then implicitly converts the "midway" RGB images to Gray
# in order to construct the histogram. After this process the cummulative distribution functions
# of these luminance gradients are no longer identical but still close.
@test abs(sum(cumsum(counts2) - cumsum(counts1))) <= 20
edges, _ = build_histogram(img1, 256, minval = 0, maxval = 1)
img1o, img2o = adjust_histogram([img1, img2], MidwayEqualization(edges = edges))
edges1, counts1 = build_histogram(img1o, 256, minval = 0, maxval = 1)
edges2, counts2 = build_histogram(img2o, 256, minval = 0, maxval = 1)
@test sum(cumsum(counts2) - cumsum(counts1)) == 0
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 6503 | @testset "Linear Stretching" begin
@testset "constructors" begin
@test LinearStretching() === LinearStretching(nothing, nothing, 0.0f0, 1.0f0, nothing, nothing, false)
@test LinearStretching(src_minval=0.1f0, src_maxval=0.9f0, dst_minval=0.05f0, dst_maxval=0.95f0, no_clamp=true) ===
LinearStretching(0.1f0, 0.9f0, 0.05f0, 0.95f0, nothing, nothing, true)
@test LinearStretching((0.1f0, 0.9f0)=>(0.2f0, 0.8f0)) === LinearStretching(0.1f0, 0.9f0, 0.2f0, 0.8f0)
@test LinearStretching((0.1f0, 0.9f0)=>(0.2f0, 0.8f0), no_clamp=true) ===
LinearStretching(0.1f0, 0.9f0, 0.2f0, 0.8f0, nothing, nothing, true)
@test LinearStretching(nothing=>(0.2f0, 0.8f0)) === LinearStretching((nothing, nothing)=>(0.2f0, 0.8f0))
@test LinearStretching(nothing=>(0.2f0, 0.8f0), no_clamp=true) ===
LinearStretching((nothing, nothing)=>(0.2f0, 0.8f0), no_clamp=true)
@test LinearStretching((0.1f0, 0.9f0)=>nothing, no_clamp=true) ===
LinearStretching(0.1f0, 0.9f0, 0.0f0, 1.0f0, nothing, nothing, true)
@test_throws MethodError LinearStretching(0.1f0, 0.9f0)
@test_throws MethodError LinearStretching((0.1f0, 0.9f0), (0.0f0, 1.0f0))
@test_throws ArgumentError LinearStretching((0.9f0, 0.1f0)=>nothing)
@test_throws ArgumentError LinearStretching(nothing=>(0.9f0, 0.1f0))
@test_throws ArgumentError LinearStretching((0.9f0, 0.1f0)=>(0.9f0, 0.1f0))
end
for T in (Gray{N0f8}, Gray{N0f16}, Gray{Float32}, Gray{Float64})
#=
Stretching an image consisting of a linear ramp should not change the image
if the specified minimum and maximum values match the minimum and maximum of
the image.
=#
img = T.(collect(reshape(1/100:1/100:1, 10, 10)))
minval = minimum(img)
maxval = maximum(img)
ret = adjust_histogram(img, LinearStretching(nothing=>(minval, maxval)))
if T <: Gray{Float32} || T <: Gray{Float64}
@test all(map((i, r) -> isapprox(i, r), img, ret))
else
@test ret == img
end
# Verify that NaN is also handled correctly.
if T <: Gray{Float32} || T <: Gray{Float64}
img[10] = NaN
ret = adjust_histogram(img, LinearStretching(nothing=>(minval, maxval)))
@test isapprox(first(img), first(ret))
@test isapprox(last(img), last(ret))
@test isnan(ret[10])
end
# Verify that the smallest and largest values match the specified minval and maxval.
img = T.(collect(reshape(1/100:1/100:1, 10, 10)))
minval = minimum(img)
maxval = maximum(img)
ret = adjust_histogram(img, LinearStretching(nothing=>(0, 1)))
@test isapprox(0, first(ret))
@test isapprox(1, last(ret))
@test isapprox(0, minimum(ret[.!isnan.(ret)]))
@test isapprox(1, maximum(ret[.!isnan.(ret)]))
# Verify that the return type matches the input type.
img = T.(testimage("mandril_gray"))
ret = adjust_histogram(img, LinearStretching(nothing=>(0, 1)))
@test eltype(ret) == eltype(img)
@test isapprox(0, minimum(ret))
@test isapprox(1, maximum(ret))
ret = adjust_histogram(img, LinearStretching(nothing=>(0.2, 0.8)))
@test eltype(ret) == eltype(img)
@test isapprox(0.2, minimum(ret))
@test isapprox(0.8, maximum(ret))
# Verify that results are correctly clamped to [0.2, 0.9] if it exceeds the range
ret = adjust_histogram(img, LinearStretching((0.1, 0.8)=>(0.2, 0.9)))
@test eltype(ret) == eltype(img)
@test isapprox(T(0.2), minimum(ret))
@test isapprox(T(0.9), maximum(ret), atol=1e-2)
end
# Verify that no_clamp option handles different input types correctly without ArgumentError
img = Float32.(testimage("mandril_gray"))
ret_clamp = adjust_histogram(img, LinearStretching((0.3, 0.8)=>(0.1, 1.1), no_clamp=true))
@test eltype(ret_clamp) == eltype(img)
@test isapprox(-0.5f0, minimum(ret_clamp))
@test isapprox(1.272549f0, maximum(ret_clamp))
ret_noclamp = adjust_histogram(img, LinearStretching((0.3, 0.8)=>(0.1, 1.1), no_clamp=false))
@test eltype(ret_noclamp) == eltype(img)
@test isapprox(0.1f0, minimum(ret_noclamp))
@test isapprox(1.1f0, maximum(ret_noclamp))
# when no_clamp==true, the output is still clamped by (typemin(T), typemax(T))
img = N0f8.(testimage("mandril_gray"))
ret_clamp = adjust_histogram(img, LinearStretching((0.3, 0.8)=>(0.1, 1.1), no_clamp=true))
@test eltype(ret_clamp) == eltype(img)
@test isapprox(0.0N0f8, minimum(ret_clamp))
@test isapprox(1.0N0f8, maximum(ret_clamp))
ret_noclamp = adjust_histogram(img, LinearStretching((0.3, 0.8)=>(0.1, 1.1), no_clamp=false))
@test eltype(ret_noclamp) == eltype(img)
@test isapprox(0.1N0f8, minimum(ret_noclamp))
@test isapprox(1.0N0f8, maximum(ret_noclamp))
@test ret_clamp != ret_noclamp
for T in (RGB{N0f8}, RGB{N0f16}, RGB{Float32}, RGB{Float64})
#=
Create a color image that spans a narrow graylevel range. Then
quantize the 256 bins down to 32 and determine how many bins have
non-zero counts.
=#
imgg = Gray{Float32}.([i/255.0 for i = 64:128, j = 1:10])
img = colorview(RGB,imgg,imgg,imgg)
img = T.(img)
_, counts_before = build_histogram(img,32, minval = 0, maxval = 1)
nonzero_before = sum(counts_before .!= 0)
#=
Stretch the histogram. Then quantize the 256 bins down to 32 and
verify that all 32 bins have non-zero counts. This will confirm that
the dynamic range of the original image has been increased.
=#
ret = adjust_histogram(img, LinearStretching(nothing=>(0, 1)))
edges, counts_after = build_histogram(ret, 32, minval = 0, maxval = 1)
nonzero_after = sum(counts_after .!= 0)
@test nonzero_before < nonzero_after
@test nonzero_after == 32
@test eltype(img) == eltype(ret)
end
@testset "deprecations" begin
@info "four depwarns are expected"
@test LinearStretching(minval = 0.1) === LinearStretching(dst_minval = 0.1)
@test LinearStretching(maxval = 0.9) === LinearStretching(dst_maxval = 0.9)
@test LinearStretching(minval = 0.1, maxval = 0.9) === LinearStretching(dst_minval = 0.1, dst_maxval = 0.9)
end
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | code | 664 | using ImageContrastAdjustment
using Test, ImageCore, ImageFiltering, TestImages, LinearAlgebra
using Aqua
if Base.VERSION >= v"1.6"
@testset "Aqua" begin
# TODO: fix the ambiguity test
Aqua.test_all(ImageContrastAdjustment; ambiguities=false)
end
end
@testset "ImageContrastAdjustment.jl" begin
include("core.jl")
include("adaptive_equalization.jl")
include("histogram_construction.jl")
include("histogram_matching.jl")
include("histogram_equalization.jl")
include("histogram_midway_equalization.jl")
include("gamma_adjustment.jl")
include("linear_stretching.jl")
include("contrast_stretching.jl")
end
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | docs | 877 | # ImageContrastAdjustment
A Julia package for enhancing and manipulating image contrast.
[](https://travis-ci.com/JuliaImages/ImageContrastAdjustment.jl)
[](https://codecov.io/gh/JuliaImages/ImageContrastAdjustment.jl)
[](https://juliaimages.org/ImageContrastAdjustment.jl/stable)
[](https://juliaimages.org/ImageContrastAdjustment.jl/dev)
[![PkgEval][pkgeval-img]][pkgeval-url]
[pkgeval-img]: https://juliaci.github.io/NanosoldierReports/pkgeval_badges/I/ImageContrastAdjustment.svg
[pkgeval-url]: https://juliaci.github.io/NanosoldierReports/pkgeval_badges/report.html
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | docs | 5854 | # ImageContrastAdjustment.jl Documentation
A Julia package for enhancing and manipulating image contrast.
```@contents
Depth = 3
```
## Getting started
This package is part of a wider Julia-based image processing
[ecosystem](https://github.com/JuliaImages). If you are starting out, then you
may benefit from [reading](https://juliaimages.org/latest/quickstart/) about
some fundamental conventions that the ecosystem utilizes that are markedly
different from how images are typically represented in OpenCV, MATLAB, ImageJ or
Python.
The usage examples in the `ImageContrastAdjustment.jl` package assume that you have
already installed some key packages. Notably, the examples assume that you are
able to load and display an image. Loading an image is facilitated through the
[FileIO.jl](https://github.com/JuliaIO/FileIO.jl) package, which uses
[QuartzImageIO.jl](https://github.com/JuliaIO/QuartzImageIO.jl) if you are on
`MacOS`, and [ImageMagick.jl](https://github.com/JuliaIO/ImageMagick.jl)
otherwise. Depending on your particular system configuration, you might
encounter problems installing the image loading packages, in which case you can
refer to the [troubleshooting
guide](https://juliaimages.org/latest/troubleshooting/#Installation-troubleshooting-1).
Image display is typically handled by the
[ImageView.jl](https://github.com/JuliaImages/ImageView.jl) package.
Alternatives include the various plotting packages, including
[Makie.jl](https://github.com/JuliaPlots/Makie.jl). There is
also the [ImageShow.jl](https://github.com/JuliaImages/ImageShow.jl) package
which facilitates displaying images in `Jupyter` notebooks via
[IJulia.jl](https://github.com/JuliaLang/IJulia.jl).
Finally, one can also obtain a useful preview of an image in the REPL using the
[ImageInTerminal.jl](https://github.com/JuliaImages/ImageInTerminal.jl) package.
However, this package assumes that the terminal uses a monospace font, and tends
not to produce adequate results in a Windows environment.
Another package that is used to illustrate the functionality in
`ImageContrastAdjustment.jl` is the
[TestImages.jl](https://github.com/JuliaImages/TestImages.jl) which serves as a
repository of many standard image processing test images.
## Basic usage
Each contrast manipulation algorithm in `ImageContrastAdjustment.jl` is an
[`AbstractHistogramAdjustmentAlgorithm`](@ref
ImageContrastAdjustment.HistogramAdjustmentAPI.AbstractHistogramAdjustmentAlgorithm).
Suppose one wants to enhance the contrast an image. This can be achieved by
simply choosing an appropriate algorithm and calling [`adjust_histogram`](@ref)
or [`adjust_histogram!`](@ref) in the image. The contrast will be automatically
enhanced.
Let's see a simple demo:
```@example
using TestImages, ImageContrastAdjustment
using FileIO # hide
img = testimage("cameraman")
alg = Equalization(nbins = 256)
img_adjusted = adjust_histogram(img, alg)
save("images/demo.jpg", hcat(img, img_adjusted)) # hide
```
```@raw html
<img src="images/demo.jpg" width="400px" alt="demo image" />
```
This usage reads as "`adjust_histogram` of the image `img` with algorithm `alg`"
For more advanced usage, please check [function reference](@ref function_reference) page.
## Examples
Below are some examples of contrast adjustment operations that this package facilitates.
```@raw html
<table width="500" border="0" cellpadding="5">
<tr>
<td align="center" valign="center">
<img src="images/contrast_stretching.gif" width="100px" alt="Contrast Stretching" />
<br />
Contrast Stretching
</td>
<td align="center" valign="center">
<img src="images/contrast_stretching_col.gif" width="100px" alt="Contrast Stretching" />
<br />
Contrast Stretching
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/linear_stretching.gif" width="100px" alt="Linear Stretching (Normalization)" />
<br />
Linear Stretching/Normalization
</td>
<td align="center" valign="center">
<img src="images/linear_stretching_col.gif" width="100px" alt="Linear Stretching (Normalization)" />
<br />
Linear Stretching/Normalization
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/gamma_correction.gif" width="100px" alt="Gamma Correction" />
<br />
Gamma Correction
</td>
<td align="center" valign="center">
<img src="images/gamma_correction_col.gif" width="100px" alt="Gamma Correction" />
<br />
Gamma Correction
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/equalization.gif" width="100px" alt="Histogram Equalization" />
<br />
Histogram Equalization
</td>
<td align="center" valign="center">
<img src="images/equalization_col.gif" width="100px" alt="Histogram Equalization" />
<br />
Histogram Equalization
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/adaptive_equalization.gif" width="100px" alt="Contrast Limited Adaptive Histogram Equalization" />
<br />
Contrast Limited Adaptive Histogram Equalization
</td>
<td align="center" valign="center">
<img src="images/adaptive_equalization_col.gif" width="100px" alt="Contrast Limited Adaptive Histogram Equalization" />
<br />
Contrast Limited Adaptive Histogram Equalization
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/midway_equalization.gif" width="100px" alt="Midway Histogram Equalization" />
<br />
Midway Histogram Equalization
</td>
<td align="center" valign="center">
<img src="images/midway_equalization_col.gif" width="100px" alt="Midway Histogram Equalization" />
<br />
Midway Histogram Equalization
</td>
</tr>
<tr>
<td align="center" valign="center">
<img src="images/matching.gif" width="100px" alt="Histogram Matching" />
<br />
Histogram Matching
</td>
<td align="center" valign="center">
<img src="images/matching_col.gif" width="100px" alt="Histogram Matching" />
<br />
Histogram Matching
</td>
</tr>
</table>
```
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.3.12 | eb3d4365a10e3f3ecb3b115e9d12db131d28a386 | docs | 604 | # [Function References](@id function_reference)
```@contents
Pages = ["reference.md"]
Depth = 3
```
## General function
```@docs
adjust_histogram
adjust_histogram!
build_histogram
```
## Algorithms
```@docs
ImageContrastAdjustment.HistogramAdjustmentAPI.AbstractHistogramAdjustmentAlgorithm
```
### AdaptiveEqualization
```@docs
AdaptiveEqualization
```
### ContrastStretching
```@docs
ContrastStretching
```
### Equalization
```@docs
Equalization
```
### LinearStretching
```@docs
LinearStretching
```
### Matching
```@docs
Matching
```
### MidwayEqualization
```@docs
MidwayEqualization
```
| ImageContrastAdjustment | https://github.com/JuliaImages/ImageContrastAdjustment.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 14059 | __precompile__()
module VerTeX
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2018 Michael Reed
export dict2toml, tex2dict, tex2toml, dict2tex, toml2tex, toml2dict
export zathura, latexmk, pdf, texedit
using Pkg, UUIDs, Dates, REPL, Requires
using Pkg.TOML, REPL.TerminalMenus
VERSION < v"1.4" && (using Pkg.Pkg2)
function drawgraph end
global AUTHOR = "anonymous"
checkhome(path::String) = occursin(r"^~/",path) ? joinpath(homedir(),path[3:end]) : path
function relhome(path::String,home::String=homedir())
reg = Regex("(?<=^$(replace(home,'/'=>"\\/")))\\/?\\X+")
return occursin(reg,path) ? '~'*match(reg,path).match : path
end
function preamble(path::String="default.tex",repo::String="julia")
depos = getdepot()
!haskey(depos,repo) && throw(error("did not load preamble, $repo depot not found"))
load = ""
try
open(joinpath(checkhome(depos[repo]),path), "r") do f
load = read(f,String)
end
catch
open(joinpath("..","vtx",path), "r") do f
load = read(f,String)
end
end
dep = repo β "julia" ? "$repo:~:" : ""
return load*"%vtx:$dep$(relhome(path))"
end
function article(str::String,pre::String=preamble()*"\n")
return pre*"\n\\begin{document}\n"*str*"\n\\end{document}"
end
function dict2toml(data::Dict)
io = IOBuffer()
TOML.print(io,data)
return String(take!(io))
end
toml2dict(toml::String) = TOML.parse(toml)
const regtextag = "[[:alnum:] !\"#\$%&'()*+,\\-.\\/:;<=>?@\\[\\]^_`|~]+"
function textagdel(tag::Symbol,tex::String)
join(split(tex,Regex("\n?(\\\\$tag{)"*regtextag*"(})")))
end
function texlocate(tag::Symbol,tex::String,neg::String="none")
out = collect((m.match for m = eachmatch(Regex("(?<=\\\\$tag{)"*regtextag*"(?=})"), tex)))
return isempty(out) ? [neg] : join.(out)
end
function lbllocate(tex::String,neg::String="none")
out = collect((m.match for m = eachmatch(Regex("(?<=\\\\lbl{)"*regtextag*"(}{)"*regtextag*"(?=})"), tex)))
return isempty(out) ? [neg] : replace.(join.(out), Ref("}{"=>":"))
end
tomlocate(tag::String,data::Dict,neg::String="none") = haskey(data,tag) ? data[tag] : neg
setval!(d::Dict,key,val) = haskey(d,key) ? (d[key] = val) : push!(d,key=>val)
addval!(d::Dict,key,val) = haskey(d,key) ? (val β d[key] && push!(d[key],val)) : push!(d,key=>Any[val])
addkey!(d::Dict,key,val,pair) = !haskey(d[key],val) && push!(d[key],val=>pair)
preview_vertex(title,author,date,doc) = @info "$title by $author ($date)\n$doc"
preview_vertex(data::Dict) = preview_vertex(data["title"],data["author"],data["date"],data["tex"])
function checkmerge(a::DateTime,data,title,author,date,doc,msg="Merge conflict detected, proceed?")
val = 2
errors = ["",""]
if a β DateTime(data["revised"])
errors[1] = "VerTeX: unable to merge into \"$(data["dir"])\" ($(data["revised"]))"
errors[2] = "BUFFER: invalid date stamp $a"
end
if errors β ["",""]
@warn "$(errors[1])"
preview_vertex(data)
@warn "$(errors[2])"
preview_vertex(title,author,date,doc)
val = request(msg,RadioMenu(["skip / discard","merge / replace"]))
end
return val
end
checkmerge(a::String,data,title,author,date,doc,msg="Merge conflict detected, proceed?") = checkmerge(DateTime(a),data,title,author,date,doc,msg)
function tex2dict(tex::String,data=nothing,disp=false,sav::Array=[])
tim = Dates.unix2datetime(time())
uid = UUIDs.uuid1()
pd = String.(split(split(tex,"\n\\end{document}")[1],"\n\\begin{document}\n"))
(pre,doc) = length(pd) == 1 ? ["default",pd] : pd
## locate identifying info
unk = "unknown"
author = texlocate(:author,pre,data β nothing ? data["author"] : unk)[1]
date = texlocate(:date,pre,data β nothing ? data["date"] : unk)[1]
title = texlocate(:title,pre,data β nothing ? data["title"] : unk)[1]
for item β [:author,:date,:title]
pre = textagdel(item,pre)
end
prereg = "%vtx:"*regtextag*"\n?"
occursin(Regex(prereg),pre) && (pre = match(Regex("(?:"*prereg*")\\X+"),pre).match)
pre = replace(pre,r"\n+$"=>"")
## date check
docre = isempty(doc) ? [SubString(doc)] : rsplit(doc,"%rev:";limit=2)
if (length(docre) == 1)
docdc = tim
elseif occursin(r"%vtx:",docre[2])
docdc = tim
else
doc = docre[1]
docdc = DateTime(match(Regex(regtextag*"(?<=\n)?"),docre[2]).match)
end
out = deepcopy(data)
if (data β nothing)
cmv = checkmerge(docdc,data,title,author,date,doc)
if cmv == 0
throw(error("VerTeX unable to proceed due to merge failure"))
elseif cmv < 2
doc = preload(data,true,false)
author = data["author"]
date = data["date"]
title = data["title"]
pre = data["pre"]
end
end
## deconstruct VerTeX
cp = split(doc,r"%extend:((true)|(false))\n?";limit=2)
choice = match(r"(?<=%extend:)((true)|(false))(?=\n)?",doc)
extend = choice β nothing ? Meta.parse(choice.match) : true
compact = !occursin(r"%vtx:",cp[1]) && !((length(cp) > 1) ? extend : true)
remdoc = ""
if compact
doc = cp[1]
elseif choice β nothing
remdoc = "$(cp[2])\n"
if occursin(r"%vtx:",cp[1])
doc = split(cp[1],r"%vtx:")[1]
else
doc = cp[1]
end
else
remdoc = "$doc\n"
doc = split(doc,Regex(prereg);limit=2)[1]
end
occursin(r"%vtx:",cp[1]) && (remdoc = match(Regex(prereg*"\\X+"),docre[1]).match)
doc = join(chomp(doc))
if |(([author,date,title] .== unk)...)
@info "Missing VerTeX metadata for $uid"
println(disp ? (data β nothing ? data["tex"] : doc) : doc)
println("%rev:",disp ? (data β nothing ? data["revised"] : tim) : tim)
print("title: ")
title == unk ? (title = readline()) : println(title)
print("author: ")
author == unk ? (author = readline()) : println(author)
print("date: ")
date == unk ? (date = readline()) : println(date)
end
if out == nothing
out = Dict(
"editor" => AUTHOR,
"author" => author,
"pre" => pre,
"tex" => doc,
"date" => date,
"title" => title,
"created" => "$tim",
"revised" => "$tim",
"uuid" => "$uid",
"version" => ["VerTeX", "v\"0.1.0\""],
"compact" => "$compact") #twins
else
setval!(out,"editor",AUTHOR)
setval!(out,"author",author)
setval!(out,"pre",pre)
setval!(out,"tex",doc)
setval!(out,"date",date)
setval!(out,"title",title)
setval!(out,"edit","$tim")
setval!(out,"compact","$compact")
end
## parse additional vertices
extra = []
comments = []
(data β nothing) && haskey(data,"save") && (sav = data["save"]) #push!(sav,data["save"])
haskey(out,"show") && pop!(out,"show")
haskey(out,"comments") && pop!(out,"comments")
if !compact
while occursin(Regex(prereg),remdoc)
ms = join.(collect((m.match for m = eachmatch(Regex(prereg),remdoc))))[1]
sp = split(remdoc,ms;limit=3)
push!(comments,compact ? "" : join(chomp(sp[1])))
re = rsplit(sp[2],"%rev:";limit=2)
dc = length(re) == 1 ? tim : DateTime(match(Regex(regtextag*"(?<=\n)?"),re[2]).match)
# try to open it, to see if update
df = split(join(match(Regex("(?<=%vtx:)"*regtextag*"(?<=\n)?"),ms).match),":~:";limit=2)
file = join(df[end])
depo = length(df) > 1 ? join(df[1]) : "julia"
ods = nothing
add2q = true
try
for s β sav
if haskey(s,"depot") && haskey(s,"dir") &&
s["depot"] == depo && s["dir"] == file
ods = s
break
end
end
ods == nothing && (ods = load(file,depo))
add2q = DateTime(ods["revised"]) == dc && ods["tex"] β join(chomp(re[1]))
# check date and compare if opened
# terminal menu if date is not a mach
catch
end
ds = tex2dict(pre*"\n\\begin{document}\n"*sp[2]*"\n\\end{document}",ods,!add2q,sav)
push!(extra,repr(!Meta.parse(ds["compact"])))
addval!(out,"show",[ds["uuid"],depo,file])
#addkey!(out,"ids",ds["uuid"],[ds["uuid"], depo, file])
# add to save queue, for when actual save happens
!haskey(ds,"dir") && setval!(ds,"dir",file)
!haskey(ds,"depot") && setval!(ds,"depot",depo)
ins = 0
haskey(out,"save") && for k β 1:length(out["save"])
out["save"][k]["uuid"] == ds["uuid"] && (ins = k; break)
end
add2q && (ins > 0 ? (out["save"][ins] = ds) : addval!(out,"save",ds))
remdoc = join(sp[3])
end
push!(comments,join(chomp(remdoc)))
choice == nothing && popfirst!(comments)
setval!(out,"comments",comments)
setval!(out,"extend",extra)
else
for item β ["show","comments","extend"]
haskey(out,item) && pop!(out,item)
end
end
## double check reference nodes
bins = ["ref","used","deps"]
items = [bins...,"cite","label"] # "refby", "citeby", "depsby"
# update referral lists
for item β items
temp = [(item == "label" ? lbllocate(doc) : []); texlocate(Symbol(item),doc)]
if temp β ["none"]
setval!(out,item,temp)
else
haskey(out,item) && pop!(out,item)
end
end
return out::Dict
end
function preload(data::Dict,extend::Bool,rev::Bool=true)
doc = tomlocate("tex",data)*"\n"
if extend && haskey(data,"comments") && length(data["comments"]) > 0
shift = 0
ls = haskey(data,"show") ? length(data["show"]) : 0
if length(data["comments"]) β ls
if data["comments"][1] β ""
doc *= "%extend:true\n$(data["comments"][1])\n"
else
end
shift += 1
end
if ls > 0
for k β 1:ls
key = data["show"][k]
ta = ""
try
da = nothing
haskey(data,"save") && for s β data["save"]
s["uuid"] == key[1] && (da = s; break)
end
da == nothing && (da = load(key[3],key[2]))
ta = preload(da,Meta.parse(data["extend"][k]))
catch
@warn "could not load $(key[3]) from $(key[2])"
end
dep = data["depot"] β "julia" ? "$(data["depot"]):~:" : ""
vtx = "%vtx:$dep$(key[3])\n"
doc *= join([vtx,ta,"\n",vtx,data["comments"][k+shift]])
end
end
end
return rev ? "$doc%rev:$(data["revised"])" : join(chomp(doc))
end
function dict2tex(data::Dict)
unk = "unknown"
pre = tomlocate("pre",data)
author = tomlocate("author",data,unk)
date = tomlocate("date",data,unk)
title = tomlocate("title",data,unk)
dep = (haskey(data,"depot") && (data["depot"] β "julia")) ? "$(data["depot"]):~:" : ""
reg = "^%vtx:"*regtextag
if occursin(Regex(reg*"\n?"),pre)
prereg = "(?<=%vtx:)"*regtextag*"(?<=\n)?"
df = split(join(match(Regex(prereg),pre).match),":~:";limit=2)
file = join(df[end])
depo = length(df) > 1 ? join(df[1]) : "julia"
pre = preamble(file,depo)*replace(pre,Regex(reg)=>"")
end
tex = pre*"\n"
author β unk && (tex *= "\n\\author{$author}")
date β unk && (tex *= "\n\\date{$date}")
title β unk && (tex *= "\n\\title{$title}")
return article(join(chomp(preload(data,true))),tex)
end
toml2tex(toml::String) = dict2tex(TOML.parse(toml))
tex2toml(tex::String) = dict2toml(tex2dict(tex))
include("depot.jl")
include("search.jl")
include("repl.jl")
function __init__()
haskey(ENV,"AUTHOR") && (global AUTHOR = ENV["AUTHOR"])
# load manifest and dictionary at ini
readmanifest()
readdictionary()
# save manifest and dictionary at end
atexit(() -> (writemanifest(); writedictionary()))
if isdefined(Base, :active_repl)
REPLMode.repl_init(Base.active_repl)
else
atreplinit() do repl
if isinteractive() && repl isa REPL.LineEditREPL
isdefined(repl, :interface) || (repl.interface = REPL.setup_interface(repl))
REPLMode.repl_init(repl)
end
end
end
@require LightGraphs="093fc24a-ae57-5d10-9952-331d41423f4d" begin
import LightGraphs
function makegraph(manifest=manifest,dictionary=dictionary,index=collect(keys(dictionary)))
readmanifest()
readdictionary()
g = LightGraphs.SimpleDiGraph(length(index))
for i β 1:length(index)
key = collect(values(dictionary[index[i]]))[1]
more = manifest[key[2]][key[1]]
if haskey(more,"deps")
for dep β more["deps"]
f = findall(index .== split(dep,':')[1])
!isempty(f) && LightGraphs.add_edge!(g,f[1],i)
end
end
end
return g
end
end
#@require GraphPlot="a2cc645c-3eea-5389-862e-a155d0052231"
#=@require Compose="a81c6b42-2e10-5240-aca2-a61377ecd94b" begin
import LightGraphs, Cairo, GraphPlot
function drawgraph(name="/tmp/vtx-data.pdf",manifest=manifest,dictionary=dictionary,index=collect(keys(dictionary)))
Compose.draw(Compose.PDF(name,32Compose.cm,32Compose.cm),GraphPlot.gplot(makegraph(manifest,dictionary,index),nodelabel=index,layout=GraphPlot.circular_layout))
end
end=#
end
end # module
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 2711 |
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2019 Michael Reed
import ..load, ..save, ..checkmerge, ..readtex, ..writetex, ..tex2dict, ..save, ..article, ..manifest, ..readmanifest, ..getdepot, ..readdictionary, ..dictionary, ..preview_vertex, ..searchvtx, ..cdpkg, ..drawgraph
using TerminalMenus
export zathura, latexmk, pdf, texedit
zathura(f::String,o=stdout) = run(`zathura $f`,(devnull,o,stderr),wait=false)
latexmk(f::String,o=stdout) = run(`latexmk -silent -pdf -cd $f`)
function showpdf(str::String,o=stdout)
try
latexmk(str,o)
catch
end
zathura(replace(str,r".tex$"=>".pdf"),o)
end
function pdf(str::String,file::String="doc")
open("/tmp/$file.tex", "w") do f
write(f, article(str))
end
showpdf("/tmp/$file.tex")
end
pdf(data::Dict,o=stdout) = showpdf(writetex(data),o)
function texedit(data::Dict,file::String="/tmp/doc.tex")
haskey(data,"dir") && (file == "/tmp/doc.tex") && (file = data["dir"])
depot = haskey(data,"depot") ? data["depot"] : "julia"
try
old = load(file,depot)
if (old β nothing)
cmv = checkmerge(data["revised"],old,data["title"],data["author"],data["date"],data["tex"],"Memory buffer out of sync with vertex, proceed?")
if cmv == 0
vtxerror("VerTeX unable to proceed due to merge failure")
elseif cmv < 2
@warn "merged into buffer from $path"
data = old
end
end
catch err
throw(err)
end
try
load = writetex(data,file)
run(`vim --servername julia $load`)
try
ret = tex2dict(readtex(load),data)
return load == file ? ret : save(ret,file)
catch
return save(data,file)
end
writedictionary()
writemanifest(depot)
catch err
throw(err)
end
end
function texedit(file::String="/tmp/doc.tex")
v = nothing
try
v = load(file)
catch
r = request("$file not found, create?",RadioMenu(["cancel","save"]))
r == 1 && (return nothing)
v = save(tex2dict(article("")),file)
end
return texedit(v,file)
end
function display_manifest(repo)
readmanifest()
for x β manifest["julia"]
data = x[2]
@info "$(data["dir"])"
end
end
function display_manifest()
readmanifest()
g = getdepot()
for key β keys(g)
@info "$key β $(g[key])"
end
end
function display_dictionary()
readdictionary()
for key β keys(dictionary)
x = dictionary[key]
@info "$key => $(join(["$(x[k][3]) β $(x[k][2])" for k β keys(x)],", "))"
end
end
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 3257 |
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2019 Michael Reed
# This file is adapted from Julia. License is MIT: https://julialang.org/license
command_declarations = [
"package" => CommandDeclaration[
[ :kind => CMD_VIM,
:name => "vim",
:short_name => "vi",
:handler => do_vim!,
:arg_count => 1 => 2,
:arg_parser => identity,
:description => "edit with vim",
:help => md"""
vi path
edit with vim
"""
],[ :kind => CMD_HELP,
:name => "help",
:short_name => "?",
:arg_count => 0 => Inf,
:arg_parser => identity,
:completions => complete_help,
:description => "show this message",
:help => md"""
help
List available commands along with short descriptions.
help cmd
If `cmd` is a partial command, display help for all subcommands.
If `cmd` is a full command, display help for `cmd`.
""",
],[ :kind => CMD_PDF,
:name => "pdf",
:short_name => "p",
:handler => do_pdf!,
:arg_count => 1 => 2,
:arg_parser => identity,
:description => "display as pdf",
:help => md"""
pdf path [repo] ...
display as pdf
"""
],[ :kind => CMD_STATUS,
:name => "status",
:short_name => "st",
:handler => do_status!,
:arg_count => 0 => 1,
:arg_parser => identity,
:description => "display manifest",
:help => md"""
st [repo]
display manifest
"""
],[ :kind => CMD_DICT,
:name => "dictionary",
:short_name => "dict",
:handler => do_dict!,
:arg_count => 0 => 0,
:arg_parser => identity,
:description => "display dictionary",
:help => md"""
dict
display dictionary
"""
],[ :kind => CMD_RANGER,
:name => "ranger",
:short_name => "ra",
:handler => do_ranger!,
:arg_count => 0 => 1,
:arg_parser => identity,
:description => "select file to edit from repo",
:help => md"""
ra [repot]
select file to edit from repo
"""
],[ :kind => CMD_PREVIEW,
:name => "preview",
:short_name => "pre",
:handler => do_preview!,
:arg_count => 1 => 2,
:arg_parser => identity,
:description => "select file to preview from repo",
:help => md"""
preview [path] [repo]
select file to preview from repo
"""
],[ :kind => CMD_SEARCH,
:name => "search",
:handler => do_search!,
:arg_count => 1 => Inf,
:arg_parser => identity,
:description => "search",
:help => md"""
search query...
search for results
"""
],[ :kind => CMD_CD,
:name => "cd",
:handler => do_cd!,
:arg_count => 0 => 1,
:arg_parser => identity,
:description => "change directory",
:help => md"""
cd [repo]
change directory
"""
],[ :kind => CMD_CDPKG,
:name => "cdpkg",
:handler => do_cdpkg!,
:arg_count => 1 => 1,
:arg_parser => identity,
:description => "change directory to package source",
:help => md"""
cdpkg repo
change directory to package source
"""
],[ :kind => CMD_GRAPH,
:name => "graph",
:handler => do_graph!,
:arg_count => 0 => 0,
:arg_parser => identity,
:description => "graph relational data",
:help => md"""
graph
graph relational data
"""
],
], #package
] #command_declarations
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 4018 |
# This file is adapted from Julia. License is MIT: https://julialang.org/license
function complete_local_path(s, i1, i2)
cmp = REPL.REPLCompletions.complete_path(s, i2)
completions = filter!(isdir, [REPL.REPLCompletions.completion_text(p) for p in cmp[1]])
return completions, cmp[2], !isempty(completions)
end
#=function complete_installed_package(s, i1, i2, project_opt)
pkgs = project_opt ? API.__installed(PKGMODE_PROJECT) : API.__installed()
pkgs = sort!(collect(keys(filter((p) -> p[2] != nothing, pkgs))))
cmp = filter(cmd -> startswith(cmd, s), pkgs)
return cmp, i1:i2, !isempty(cmp)
end
function complete_remote_package(s, i1, i2)
cmp = String[]
julia_version = VERSION
for reg in Types.registries(;clone_default=false)
data = Types.read_registry(joinpath(reg, "Registry.toml"))
for (uuid, pkginfo) in data["packages"]
name = pkginfo["name"]
if startswith(name, s)
compat_data = Operations.load_package_data_raw(
VersionSpec, joinpath(reg, pkginfo["path"], "Compat.toml"))
supported_julia_versions = VersionSpec(VersionRange[])
for (ver_range, compats) in compat_data
for (compat, v) in compats
if compat == "julia"
union!(supported_julia_versions, VersionSpec(v))
end
end
end
if VERSION in supported_julia_versions
push!(cmp, name)
end
end
end
end
return cmp, i1:i2, !isempty(cmp)
end=#
function complete_help(options, partial)
names = String[]
for cmds in values(super_specs)
append!(names, [spec.canonical_name for spec in values(cmds)])
end
return sort!(unique!(append!(names, collect(keys(super_specs)))))
end
function complete_argument(to_complete, i1, i2, lastcommand, project_opt
)::Tuple{Vector{String},UnitRange{Int},Bool}
if lastcommand == CMD_HELP
completions = filter(x->startswith(x,to_complete), completion_cache.canonical_names)
return completions, i1:i2, !isempty(completions)
#=elseif lastcommand in [CMD_STATUS, CMD_RM, CMD_UP, CMD_TEST, CMD_BUILD, CMD_FREE, CMD_PIN]
return complete_installed_package(to_complete, i1, i2, project_opt)=#
elseif lastcommand in [CMD_ADD, CMD_DEVELOP]
if occursin(Base.Filesystem.path_separator_re, to_complete)
return complete_local_path(to_complete, i1, i2)
#=else
rps = complete_remote_package(to_complete, i1, i2)
lps = complete_local_path(to_complete, i1, i2)
return vcat(rps[1], lps[1]), isempty(rps[1]) ? lps[2] : i1:i2, length(rps[1]) + length(lps[1]) > 0=#
end
end
return String[], 0:-1, false
end
function completions(full, index)::Tuple{Vector{String},UnitRange{Int},Bool}
pre = full[1:index]
if isempty(pre)
return completion_cache.commands, 0:-1, false
end
x = parse(pre; for_completions=true)
if x === nothing # failed parse (invalid command name)
return String[], 0:-1, false
end
(key::Symbol, to_complete::String, spec, proj::Bool) = x
last = split(pre, ' ', keepempty=true)[end]
offset = isempty(last) ? index+1 : last.offset+1
if last != to_complete # require a space before completing next field
return String[], 0:-1, false
end
if key == :arg
return complete_argument(to_complete, offset, index, spec.kind, proj)
end
possible::Vector{String} =
key == :meta ? completion_cache.meta_options :
key == :cmd ? completion_cache.commands :
key == :sub ? completion_cache.subcommands[spec] :
key == :opt ? completion_cache.options[spec.kind] :
String[]
completions = filter(x->startswith(x,to_complete), possible)
return completions, offset:index, !isempty(completions)
end
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 9774 |
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2018 Michael Reed
export cdpkg, @cdpkg
cdpkg(pkg) = cd(dirname(Base.find_package(string(pkg))))
macro cdpkg(pkg)
cdpkg(pkg)
return nothing
end
repos = Dict("julia"=>"~/.julia/vtx/")
manifest = Dict("julia"=>Dict())
dictionary = Dict()
function lookup(ref)
s = split(ref,':';limit=2)
if haskey(dictionary,s[1])
key = length(s) > 1 ? s[2] : ""
return haskey(dictionary[s[1]],key) ? dictionary[s[1]][key] : [nothing]
else
return [nothing]
end
end
function getdepot()
repodat = Dict()
try
open(joinpath(homedir(),".julia/vtx/Depot.toml"), "r") do f
repodat = TOML.parse(read(f,String))
end
catch
end
for key in keys(repos)
push!(repodat,key=>repos[key])
end
return repodat
end
function regdepot(depot,location)
dep = getdepot()
if haskey(dep,depot)
dep[depot] = location
else
push!(dep,depot=>location)
end
try
open(joinpath(homedir(),".julia/vtx/Depot.toml"), "w") do f
write(f, dict2toml(dep))
end
catch
end
end
regpkg(depot,location) = regdepot(string(depot),joinpath(dirname(location),"vtx"))
regcmd = :(println(joinpath(dirname(@__DIR__),"vtx")))
function updateref!(data)
depot = haskey(data,"depot") ? data["depot"] : "julia"
!haskey(manifest[depot],data["uuid"]) && push!(manifest[depot],data["uuid"]=>Dict())
setval!(manifest[depot][data["uuid"]],"dir",data["dir"])
# update manifest and dictionary
for cat in ["label","cite"]
if haskey(data,cat)
setval!(manifest[depot][data["uuid"]],cat,data[cat])
cat β "cite" && for ref in data[cat]
if cat == "label"
give = [data["uuid"],haskey(data,"depot") ? data["depot"] : "julia",data["dir"]]
s = split(ref,':';limit=2)
key = length(s) > 1 ? join(s[2]) : ""
haskey(dictionary,s[1]) ? push!(dictionary[s[1]],key=>give) : push!(dictionary,join(s[1])=>Dict(key=>give))
end
end
end
end
for item in ["ref","used","deps","show"]
if haskey(data,item)
setval!(manifest[depot][data["uuid"]],item,data[item])
else
haskey(manifest[depot][data["uuid"]],item) && pop!(manifest[depot][data["uuid"]],item)
end
end
return nothing
end
function updaterefby!(depot,key)
for cat β ["ref","used","deps","show"]
# identify cross-references
if haskey(manifest[depot][key],cat)
for ref in manifest[depot][key][cat]
if cat β "show"
s = split(ref,':';limit=2)
s2 = length(s) > 2 ? s[2] : ""
if haskey(dictionary,s[1]) && haskey(dictionary[s[1]],s2)
addval!(manifest[dictionary[s[1]][s2][2]][dictionary[s[1]][s2][1]],cat*"by",[key,depot,manifest[depot][key]["dir"]])
end
else
if haskey(manifest,ref[2]) && haskey(manifest[ref[2]],ref[1])
addval!(manifest[ref[2]][ref[1]],cat*"by",[key,depot,manifest[depot][key]["dir"]])
end
end
end
end
# remove surplus edges
if haskey(manifest[depot][key],cat*"by")
amt = length(manifest[depot][key][cat*"by"])
k = 1
while k β€ amt
ref = manifest[depot][key][cat*"by"][k]
if haskey(manifest[ref[2]][ref[1]],cat) &&
ref β manifest[ref[2]][ref[1]][cat]
deleteat!(manifest[depot][key][cat*"by"],k)
amt -= 1
else
k += 1
end
end
isempty(manifest[depot][key][cat*"by"]) && pop!(manifest[depot][key],cat*"by")
end
end
return nothing
end
function scan(depot)
depos = getdepot()
!haskey(manifest,depot) && push!(manifest,depot=>Dict())
for (root, dirs, files) in walkdir(checkhome(depos[depot]))
for dir in dirs
for file in readdir(joinpath(root,dir))
data = nothing
if endswith(file, ".vtx")
data = TOML.parsefile(joinpath(root,dir,file))
updateref!(data)
end
end
end
end
end
function scan()
for depot β keys(manifest)
for key β keys(manifest[depot])
updaterefby!(depot,key)
end
end
end
resolve(depot) = haskey(getdepot(),depot) && (scan(depot); scan())
function resolve()
for depot in keys(getdepot())
scan(depot)
end
scan()
end
function save(dat::Dict,path::String;warn=true)
out = deepcopy(dat)
!haskey(out,"depot") && push!(out,"depot"=>"julia")
repo = out["depot"]
depos = getdepot()
!haskey(depos,repo) && (@warn "did not save, $repo depot not found"; return dat)
data = nothing
try
data = load(path,repo)
if (data β nothing)
cmv = checkmerge(dat["revised"],data,dat["title"],dat["author"],dat["date"],dat["tex"],"Save/Overwrite?")
if cmv == 0
throw(error("VerTeX unable to proceed due to merge failure"))
elseif cmv < 2
@warn "skipped saving $path"
return dat
end
end
catch
end
way = joinpath(checkhome(depos[repo]),path)
!isdir(dirname(way)) && mkpath(dirname(way))
if haskey(dat,"dir") && (dat["dir"] β path)
#rm(joinpath(checkhome(depos[repo]),dat["dir"]))
out["dir"] = path
else
push!(out,"dir"=>path)
end
infotxt = "saving VerTeX: $(out["title"])\n"
old = data β nothing ? data : dat
# go through save queue from show list
if haskey(out,"save")
for it β out["save"]
save(it,out["ids"][it["uuid"]][3];warn=warn)
end
pop!(out,"save")
end
haskey(out,"compact") && pop!(out,"compact")
updateref!(out)
updaterefby!(repo,out["uuid"])
if haskey(out,"edit")
setval!(out,"revised",out["edit"])
pop!(out,"edit")
end
if data β nothing
haskey(data,"revised") && pop!(data,"revised")
compare = deepcopy(out)
haskey(compare,"revised") && pop!(compare,"revised")
data == compare && (return out)
end
open(way, "w") do f
write(f, dict2toml(out))
end
warn && (@info infotxt*"saved at $path in $(out["depot"])")
return out
end
function save(dat::Dict;warn=true)
save(dat, haskey(dat,"dir") ? dat["dir"] : dat["uuid"];warn=warn)
end
function save(dat::Dict,path::String,repo::String;warn=true)
out = deepcopy(dat)
if haskey(dat["depot"])
#rm(joinpath(checkhome(getdepot()[dat["depot"]]),dat["dir"]))
out["depot"] = repo
out["dir"] = path
else
push!(out,"depot"=>repo,"dir"=>path)
end
save(out,path;warn=warn)
end
function load(path::String,repo="julia")
depos = getdepot()
!haskey(depos,repo) && (@warn "did not load, $repo depot not found"; return path)
dat = ""
open(joinpath(checkhome(depos[repo]),path), "r") do f
dat = read(f, String)
end
return TOML.parse(dat)
end
function loadpath(data::Dict,file::String="/tmp/doc.tex")
load = ""
g = getdepot()
if haskey(data,"dir") && (data["depot"] β keys(g))
load = joinpath(checkhome(g[data["depot"]]),data["dir"])
load = replace(load,r".vtx$"=>".tex")
!occursin(r".tex$",load) && (load = load*".tex")
else
load = file
end
return load
end
function writetex(data::Dict,file::String="/tmp/doc.tex")
load = loadpath(data,file)
# check if file actually exists yet, if not create it.
open(load, "w") do f
# check if tex file actually needs to be updated?
write(f, VerTeX.dict2tex(data))
end
return load
end
function readtex(load::String)
out = ""
open(load, "r") do f
out = read(f,String)
end
return out
end
function update(data::Dict)
save(tex2dict(readtex(loadpath(data)),data))
end
function writemanifest(depot)
depos = getdepot()
if haskey(manifest,depot) && haskey(depos,depot)
open(joinpath(checkhome(depos[depot]),"Manifest.toml"), "w") do f
write(f, dict2toml(manifest[depot]))
end
else
@warn "no $depot manifest found in memory"
end
return nothing
end
function writemanifest()
for key β keys(manifest)
writemanifest(key)
end
return nothing
end
function writedictionary()
depos = getdepot()
open(joinpath(checkhome(depos["julia"]),"Dictionary.toml"), "w") do f
write(f, dict2toml(dictionary))
end
return nothing
end
function readmanifest(depot)
depos = getdepot()
if haskey(depos,depot)
dat = ""
try
open(joinpath(checkhome(depos[depot]),"Manifest.toml"), "r") do f
dat = read(f, String)
end
setval!(manifest,depot,TOML.parse(dat))
catch
end
else
@warn "did not load, $depot depot not found"
end
return nothing
end
function readmanifest()
depos = getdepot()
for depot β keys(depos)
readmanifest(depot)
end
return nothing
end
function readdictionary()
depos = getdepot()
dat = ""
try
open(joinpath(checkhome(depos["julia"]),"Dictionary.toml"), "r") do f
dat = read(f, String)
end
global dictionary
dictionary = TOML.parse(dat)
catch
end
return nothing
end
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 10188 | module REPLMode
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2019 Michael Reed
# This file is adapted from Julia. License is MIT: https://julialang.org/license
using Markdown
using UUIDs, Pkg
import REPL
import REPL: LineEdit, REPLCompletions
using Pkg.Types, Pkg.Operations #, Pkg.Display
vtxerror(msg::String...) = throw(PkgError(join(msg)))
#########################
# Specification Structs #
#########################
#import Pkg.REPLMode: OptionSpec, OptionSpecs, ArgSpec, CommandSpec, CommandSpecs, SuperSpecs, QString, unwrap, Option, wrap_option, is_opt, parse_option, Statement, lex, tokenize, core_parse, parse, APIOptions, Command, enforce_option, MiniREPL
#---------#
# Options #
#---------#
const OptionDeclaration = Vector{Pair{Symbol,Any}}
#----------#
# Commands #
#----------#
@enum(CommandKind, CMD_HELP, CMD_VIM, CMD_PDF, CMD_STATUS, CMD_DICT,
CMD_RANGER, CMD_PREVIEW, CMD_SEARCH, CMD_CD, CMD_CDPKG,
CMD_GRAPH)
#=@enum(CommandKind, CMD_HELP, CMD_RM, CMD_ADD, CMD_DEVELOP, CMD_UP,
CMD_STATUS, CMD_TEST, CMD_GC, CMD_BUILD, CMD_PIN,
CMD_FREE, CMD_GENERATE, CMD_RESOLVE, CMD_PRECOMPILE,
CMD_INSTANTIATE, CMD_ACTIVATE, CMD_PREVIEW,
CMD_REGISTRY_ADD, CMD_REGISTRY_RM, CMD_REGISTRY_UP, CMD_REGISTRY_STATUS
)=#
const CommandDeclaration = Vector{Pair{Symbol,Any}}
include("specs.jl")
#############
# Execution #
#############
function do_cmd(repl::REPL.AbstractREPL, input::String; do_rethrow=false)
try
statements = parse(input)
commands = map(Command, statements)
for command in commands
do_cmd!(command, repl)
end
catch err
do_rethrow && rethrow()
if err isa PkgError || err isa ResolverError
Base.display_error(repl.t.err_stream, ErrorException(sprint(showerror, err)), Ptr{Nothing}[])
else
Base.display_error(repl.t.err_stream, err, Base.catch_backtrace())
end
end
end
function do_cmd!(command::Command, repl)
context = Dict{Symbol,Any}(:preview => command.preview)
# REPL specific commands
command.spec.kind == CMD_HELP && return Base.invokelatest(do_help!, command, repl)
# API commands
# TODO is invokelatest still needed?
if applicable(command.spec.handler, context, command.arguments, command.options)
Base.invokelatest(command.spec.handler, context, command.arguments, command.options)
else
Base.invokelatest(command.spec.handler, command.arguments, command.options)
end
end
function parse_command(words::Vector{QString})
statement, word = core_parse(words; only_cmd=true)
if statement.super === nothing && statement.spec === nothing
vtxerror("invalid input: `$word` is not a command")
end
return statement.spec === nothing ? statement.super : statement.spec
end
function do_help!(command::Command, repl::REPL.AbstractREPL)
disp = REPL.REPLDisplay(repl)
if isempty(command.arguments)
Base.display(disp, help)
return
end
help_md = md""
cmd = parse_command(command.arguments)
if cmd isa String
# gather all helps for super spec `cmd`
all_specs = sort!(unique(values(super_specs[cmd]));
by=(spec->spec.canonical_name))
for spec in all_specs
isempty(help_md.content) || push!(help_md.content, md"---")
push!(help_md.content, spec.help)
end
elseif cmd isa CommandSpec
push!(help_md.content, cmd.help)
end
!isempty(command.arguments) && @warn "More than one command specified, only rendering help for first"
Base.display(disp, help_md)
end
raw(a) = [x.raw for x β a]
do_vim!(a,b) = texedit(raw(a)...)
do_pdf!(a,b) = pdf(load(raw(a)...))
do_status!(a,b) = display_manifest(raw(a)...)
do_dict!(a,b) = display_dictionary()
function do_ranger!(a,b)
dir = joinpath(homedir(),".julia","config","vtx")
repo = expanduser(getdepot()[length(a)<1 ? "julia" : a[1].raw])
run(`ranger $repo --choosefile=$dir`)
try
texedit(read(`cat $dir`,String))
run(`rm $dir`)
catch
end
end
do_preview!(a,b) = preview_vertex(load(raw(a)...))
do_search!(a,b) = preview_vertex.(searchvtx([:search],raw(a)))
function do_cd!(a,b)
if length(a)<1
dir = joinpath(homedir(),".julia","config","cd")
run(`ranger $(pwd()) --choosedir=$dir`)
cd(read(`cat $dir`,String))
else
cd(joinpath(expanduser(getdepot()[a[1].raw])))
end
println(pwd())
end
do_cdpkg!(a,b) = (cdpkg(a[1].raw); println(pwd()))
function do_graph!(a,b)
path = joinpath("/tmp","vtx-data.pdf")
drawgraph(path)
zathura(path)
end
######################
# REPL mode creation #
######################
# Provide a string macro pkg"cmd" that can be used in the same way
# as the REPLMode `pkg> cmd`. Useful for testing and in environments
# where we do not have a REPL, e.g. IJulia.
const minirepl = Ref{MiniREPL}()
__init__() = minirepl[] = MiniREPL()
macro vtx_str(str::String)
:($(do_cmd)(minirepl[], $str; do_rethrow=true))
end
vtxstr(str::String) = do_cmd(minirepl[], str; do_rethrow=true)
struct VtxCompletionProvider <: LineEdit.CompletionProvider end
function LineEdit.complete_line(c::VtxCompletionProvider, s)
partial = REPL.beforecursor(s.input_buffer)
full = LineEdit.input_string(s)
ret, range, should_complete = completions(full, lastindex(partial))
return ret, partial[range], should_complete
end
prev_project_file = nothing
prev_project_timestamp = nothing
prev_prefix = ""
function promptf()
#=global prev_project_timestamp, prev_prefix, prev_project_file
project_file = try
Types.find_project_file()
catch
nothing
end=#
prefix = ""
#=if project_file !== nothing
if prev_project_file == project_file && prev_project_timestamp == mtime(project_file)
prefix = prev_prefix
else
project = try
Types.read_project(project_file)
catch
nothing
end
if project !== nothing
projname = project.name
name = projname !== nothing ? projname : basename(dirname(project_file))
prefix = string("(", name, ") ")
prev_prefix = prefix
prev_project_timestamp = mtime(project_file)
prev_project_file = project_file
end
end
end=#
return prefix * "vtx> "
end
# Set up the repl Pkg REPLMode
function create_mode(repl, main)
vtx_mode = LineEdit.Prompt(promptf;
prompt_prefix = repl.options.hascolor ? Base.text_colors[:white] : "",
prompt_suffix = "",
complete = VtxCompletionProvider(),
sticky = true)
vtx_mode.repl = repl
hp = main.hist
hp.mode_mapping[:pkg] = vtx_mode
vtx_mode.hist = hp
search_prompt, skeymap = LineEdit.setup_search_keymap(hp)
prefix_prompt, prefix_keymap = LineEdit.setup_prefix_keymap(hp, vtx_mode)
vtx_mode.on_done = (s, buf, ok) -> begin
ok || return REPL.transition(s, :abort)
input = String(take!(buf))
REPL.reset(repl)
do_cmd(repl, input)
REPL.prepare_next(repl)
REPL.reset_state(s)
s.current_mode.sticky || REPL.transition(s, main)
end
mk = REPL.mode_keymap(main)
shell_mode = nothing
for mode in Base.active_repl.interface.modes
if mode isa LineEdit.Prompt
mode.prompt == "shell> " && (shell_mode = mode)
end
end
repl_keymap = Dict()
if shell_mode != nothing
repl_keymap[';'] = function (s,o...)
if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
LineEdit.transition(s, shell_mode) do
LineEdit.state(s, shell_mode).input_buffer = buf
end
else
LineEdit.edit_insert(s, ';')
end
end
end
b = Dict{Any,Any}[
skeymap, repl_keymap, mk, prefix_keymap, LineEdit.history_keymap,
LineEdit.default_keymap, LineEdit.escape_defaults
]
vtx_mode.keymap_dict = LineEdit.keymap(b)
return vtx_mode
end
function repl_init(repl)
main_mode = repl.interface.modes[1]
vtx_mode = create_mode(repl, main_mode)
push!(repl.interface.modes, vtx_mode)
keymap = Dict{Any,Any}(
',' => function (s,args...)
if isempty(s) || position(LineEdit.buffer(s)) == 0
buf = copy(LineEdit.buffer(s))
LineEdit.transition(s, vtx_mode) do
LineEdit.state(s, vtx_mode).input_buffer = buf
end
else
LineEdit.edit_insert(s, ',')
end
end
)
main_mode.keymap_dict = LineEdit.keymap_merge(main_mode.keymap_dict, keymap)
return
end
########
# SPEC #
########
include("completions.jl")
include("args.jl")
include("cmd.jl")
super_specs = SuperSpecs(command_declarations)
########
# HELP #
########
function canonical_names()
# add "package" commands
xs = [(spec.canonical_name => spec) for spec in unique(values(super_specs["package"]))]
sort!(xs, by=first)
# add other super commands, e.g. "registry"
for (super, specs) in super_specs
super != "package" || continue # skip "package"
temp = [(join([super, spec.canonical_name], " ") => spec) for spec in unique(values(specs))]
append!(xs, sort!(temp, by=first))
end
return xs
end
function gen_help()
help = md"""
**Welcome to the VerTeX REPL-mode**. To return to the `julia>` prompt, either press
backspace when the input line is empty or press Ctrl+C.
**Synopsis**
vtx> cmd [opts] [args]
Multiple commands can be given on the same line by interleaving a `;` between the commands.
**Commands**
"""
for (command, spec) in canonical_names()
push!(help.content, Markdown.parse("`$command`: $(spec.description)"))
end
return help
end
const help = gen_help()
end #module
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 1886 | export searchvtx
# recursive function for applying search criteria
function keycheck(data::Dict{<:Any,<:Any},str::Array{String,1},mode::Array{Symbol,1})
found = false
for key in keys(data)
if :deps in mode
for s in str
(key == s) && (found = true)
end
end
if ((:search in mode) || (Symbol(key) in mode)) && (typeof(data[key]) == String)
for s in lowercase.(str)
occursin(s,lowercase(data[key])) && (found = true)
end
end
if (((key == "label") && (:label in mode)) ||
((key == "ref") && (:ref in mode)))
for s in str
for g in data[key]
(g == s) && (found = true)
end
end
end
if (key β "ids" ) && (typeof(data[key]) <: Dict{<:Any,<:Any})
keycheck(data[key],str,mode) && (found = true)
end
end
return found
end
# directory search for VerTeX toml
function searchvtx(mode::Array{Symbol,1},str::Array{String,1})
list = Dict[]
depos = getdepot()
for depot in keys(depos)
for (root, dirs, files) in walkdir(checkhome(depos[depot]))
for dir in dirs
for file in readdir(joinpath(root,dir))
found = false
data = nothing
if endswith(file, ".vtx")
data = TOML.parsefile(joinpath(root,dir,file))
if keycheck(data,str,mode)
found = true
end
end
found && push!(list,data)
end
end
end
end
return list
end
searchvtx(mode::Symbol,str::String...) = searchvtx([mode],collect(str))
searchvtx(str::String,mode::Symbol...) = searchvtx(collect(mode),[str])
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 12636 |
# This file is part of VerTeX.jl. It is licensed under the MIT license
# Copyright (C) 2019 Michael Reed
# This file is adapted from Julia. License is MIT: https://julialang.org/license
#---------#
# Options #
#---------#
struct OptionSpec
name::String
short_name::Union{Nothing,String}
api::Pair{Symbol, Any}
takes_arg::Bool
end
# TODO assert names matching lex regex
# assert now so that you don't fail at user time
# see function `REPLMode.APIOptions`
function OptionSpec(;name::String,
short_name::Union{Nothing,String}=nothing,
takes_arg::Bool=false,
api::Pair{Symbol,<:Any})::OptionSpec
takes_arg && @assert hasmethod(api.second, Tuple{String})
return OptionSpec(name, short_name, api, takes_arg)
end
function OptionSpecs(decs::Vector{OptionDeclaration})::Dict{String, OptionSpec}
specs = Dict()
for x in decs
opt_spec = OptionSpec(;x...)
@assert get(specs, opt_spec.name, nothing) === nothing # don't overwrite
specs[opt_spec.name] = opt_spec
if opt_spec.short_name !== nothing
@assert get(specs, opt_spec.short_name, nothing) === nothing # don't overwrite
specs[opt_spec.short_name] = opt_spec
end
end
return specs
end
#-----------#
# Arguments #
#-----------#
struct ArgSpec
count::Pair
parser::Function
end
struct CommandSpec
kind::CommandKind
canonical_name::String
short_name::Union{Nothing,String}
handler::Union{Nothing,Function}
argument_spec::ArgSpec
option_specs::Dict{String,OptionSpec}
completions::Union{Nothing,Function}
description::String
help::Union{Nothing,Markdown.MD}
end
function CommandSpec(;kind::Union{Nothing,CommandKind} = nothing,
name::Union{Nothing,String} = nothing,
short_name::Union{Nothing,String} = nothing,
handler::Union{Nothing,Function} = nothing,
option_spec::Vector{OptionDeclaration} = OptionDeclaration[],
help::Union{Nothing,Markdown.MD} = nothing,
description::Union{Nothing,String} = nothing,
completions::Union{Nothing,Function} = nothing,
arg_count::Pair = (0=>0),
arg_parser::Function = unwrap,
)::CommandSpec
@assert kind !== nothing "Register and specify a `CommandKind`"
@assert name !== nothing "Supply a canonical name"
@assert description !== nothing "Supply a description"
# TODO assert isapplicable completions dict, string
return CommandSpec(kind, name, short_name, handler, ArgSpec(arg_count, arg_parser),
OptionSpecs(option_spec), completions, description, help)
end
function CommandSpecs(declarations::Vector{CommandDeclaration})::Dict{String,CommandSpec}
specs = Dict()
for dec in declarations
spec = CommandSpec(;dec...)
@assert !haskey(specs, spec.canonical_name) "duplicate spec entry"
specs[spec.canonical_name] = spec
if spec.short_name !== nothing
@assert !haskey(specs, spec.short_name) "duplicate spec entry"
specs[spec.short_name] = spec
end
end
return specs
end
function SuperSpecs(compound_commands)::Dict{String,Dict{String,CommandSpec}}
super_specs = Dict()
for x in compound_commands
name = x.first
spec = CommandSpecs(x.second)
@assert !haskey(super_specs, name) "duplicate super spec entry"
super_specs[name] = spec
end
return super_specs
end
###########
# Parsing #
###########
# QString: helper struct for retaining quote information
struct QString
raw::String
isquoted::Bool
end
unwrap(xs::Vector{QString}) = map(x -> x.raw, xs)
#---------#
# Options #
#---------#
struct Option
val::String
argument::Union{Nothing,String}
Option(val::AbstractString) = new(val, nothing)
Option(val::AbstractString, arg::Union{Nothing,String}) = new(val, arg)
end
Base.show(io::IO, opt::Option) = print(io, "--$(opt.val)", opt.argument == nothing ? "" : "=$(opt.argument)")
wrap_option(option::String) = length(option) == 1 ? "-$option" : "--$option"
is_opt(word::AbstractString) = first(word) == '-' && word != "-"
function parse_option(word::AbstractString)::Option
m = match(r"^(?: -([a-z]) | --([a-z]{2,})(?:\s*=\s*(\S*))? )$"ix, word)
m === nothing && vtxerror("malformed option: ", repr(word))
option_name = m.captures[1] !== nothing ? m.captures[1] : m.captures[2]
option_arg = m.captures[3] === nothing ? nothing : String(m.captures[3])
return Option(option_name, option_arg)
end
#-----------#
# Statement #
#-----------#
# Statement: text-based representation of a command
Base.@kwdef mutable struct Statement
super::Union{Nothing,String} = nothing
spec::Union{Nothing,CommandSpec} = nothing
options::Union{Vector{Option},Vector{String}} = String[]
arguments::Vector{QString} = QString[]
preview::Bool = false
end
function lex(cmd::String)::Vector{QString}
in_doublequote = false
in_singlequote = false
qstrings = QString[]
token_in_progress = Char[]
push_token!(is_quoted) = begin
push!(qstrings, QString(String(token_in_progress), is_quoted))
empty!(token_in_progress)
end
for c in cmd
if c == '"'
if in_singlequote # raw char
push!(token_in_progress, c)
else # delimiter
in_doublequote ? push_token!(true) : push_token!(false)
in_doublequote = !in_doublequote
end
elseif c == '\''
if in_doublequote # raw char
push!(token_in_progress, c)
else # delimiter
in_singlequote ? push_token!(true) : push_token!(false)
in_singlequote = !in_singlequote
end
elseif c == ' '
if in_doublequote || in_singlequote # raw char
push!(token_in_progress, c)
else # delimiter
push_token!(false)
end
elseif c == ';'
if in_doublequote || in_singlequote # raw char
push!(token_in_progress, c)
else # special delimiter
push_token!(false)
push!(qstrings, QString(";", false))
end
else
push!(token_in_progress, c)
end
end
(in_doublequote || in_singlequote) ? vtxerror("unterminated quote") : push_token!(false)
# to avoid complexity in the main loop, empty tokens are allowed above and
# filtered out before returning
return filter(x->!isempty(x.raw), qstrings)
end
function tokenize(cmd::String)
cmd = replace(replace(cmd, "\r\n" => "; "), "\n" => "; ") # for multiline commands
qstrings = lex(cmd)
statements = foldl(qstrings; init=[QString[]]) do collection, next
(next.raw == ";" && !next.isquoted) ?
push!(collection, QString[]) :
push!(collection[end], next)
return collection
end
return statements
end
function core_parse(words::Vector{QString}; only_cmd=false)
statement = Statement()
word = nothing
function next_word!()
isempty(words) && return false
word = popfirst!(words)
return true
end
# begin parsing
next_word!() || return statement, ((word === nothing) ? nothing : word.raw)
if word.raw == "preview"
statement.preview = true
next_word!() || return statement, word.raw
end
# handle `?` alias for help
# It is special in that it requires no space between command and args
if word.raw[1]=='?' && !word.isquoted
length(word.raw) > 1 && pushfirst!(words, QString(word.raw[2:end],false))
word = QString("?", false)
end
# determine command
super = get(super_specs, word.raw, nothing)
if super !== nothing # explicit
statement.super = word.raw
next_word!() || return statement, word.raw
command = get(super, word.raw, nothing)
command !== nothing || return statement, word.raw
else # try implicit package
super = super_specs["package"]
command = get(super, word.raw, nothing)
command !== nothing || return statement, word.raw
end
statement.spec = command
only_cmd && return statement, word.raw # hack to hook in `help` command
next_word!() || return statement, word.raw
# full option parsing is delayed so that the completions parser can use the raw string
while is_opt(word.raw)
push!(statement.options, word.raw)
next_word!() || return statement, word.raw
end
pushfirst!(words, word)
statement.arguments = words
return statement, words[end].raw
end
parse(input::String) =
map(Base.Iterators.filter(!isempty, tokenize(input))) do words
statement, _ = core_parse(words)
statement.spec === nothing && vtxerror("Could not determine command")
statement.options = map(parse_option, statement.options)
statement
end
#------------#
# APIOptions #
#------------#
const APIOptions = Dict{Symbol, Any}
function APIOptions(options::Vector{Option},
specs::Dict{String, OptionSpec},
)::APIOptions
api_options = Dict{Symbol, Any}()
for option in options
spec = specs[option.val]
api_options[spec.api.first] = spec.takes_arg ?
spec.api.second(option.argument) :
spec.api.second
end
return api_options
end
Context!(ctx::APIOptions)::Context = Types.Context!(collect(ctx))
#---------#
# Command #
#---------#
Base.@kwdef struct Command
spec::Union{Nothing,CommandSpec} = nothing
options::APIOptions = APIOptions()
arguments::Vector = []
preview::Bool = false
end
function enforce_option(option::Option, specs::Dict{String,OptionSpec})
spec = get(specs, option.val, nothing)
spec !== nothing || vtxerror("option '$(option.val)' is not a valid option")
if spec.takes_arg
option.argument !== nothing ||
vtxerror("option '$(option.val)' expects an argument, but no argument given")
else # option is a switch
option.argument === nothing ||
vtxerror("option '$(option.val)' does not take an argument, but '$(option.argument)' given")
end
end
"""
checks:
- options are understood by the given command
- options do not conflict (e.g. `rm --project --manifest`)
- options which take an argument are given arguments
- options which do not take arguments are not given arguments
"""
function enforce_option(options::Vector{Option}, specs::Dict{String,OptionSpec})
unique_keys = Symbol[]
get_key(opt::Option) = specs[opt.val].api.first
# per option checking
foreach(x->enforce_option(x,specs), options)
# checking for compatible options
for opt in options
key = get_key(opt)
if key in unique_keys
conflicting = filter(opt->get_key(opt) == key, options)
vtxerror("Conflicting options: $conflicting")
else
push!(unique_keys, key)
end
end
end
"""
Final parsing (and checking) step.
This step is distinct from `parse` in that it relies on the command specifications.
"""
function Command(statement::Statement)::Command
# arguments
arg_spec = statement.spec.argument_spec
arguments = arg_spec.parser(statement.arguments)
if !(arg_spec.count.first <= length(arguments) <= arg_spec.count.second)
vtxerror("Wrong number of arguments")
end
# options
opt_spec = statement.spec.option_specs
enforce_option(statement.options, opt_spec)
options = APIOptions(statement.options, opt_spec)
return Command(statement.spec, options, arguments, statement.preview)
end
######################
# REPL mode creation #
######################
# Provide a string macro pkg"cmd" that can be used in the same way
# as the REPLMode `pkg> cmd`. Useful for testing and in environments
# where we do not have a REPL, e.g. IJulia.
struct MiniREPL <: REPL.AbstractREPL
display::TextDisplay
t::REPL.Terminals.TTYTerminal
end
function MiniREPL()
MiniREPL(TextDisplay(stdout), REPL.Terminals.TTYTerminal(get(ENV, "TERM", Sys.iswindows() ? "" : "dumb"), stdin, stdout, stderr))
end
REPL.REPLDisplay(repl::MiniREPL) = repl.display
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | code | 199 | using VerTeX
using Test
# write your own tests here
@test (x = VerTeX.article("",VerTeX.preamble()*"\\author{x}\n\\date{x}\n\\title{x}\n"); typeof(x) == typeof(VerTeX.dict2tex(VerTeX.tex2dict(x))))
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.1.1 | 71ee73ef11f7d573f4171716730615879a75c750 | docs | 8494 | # VerTeX.jl
*Typeset scattered graph data rewriter based on LaTeX nodes*
[](https://zenodo.org/badge/latestdoi/124144717)
[](https://travis-ci.org/chakravala/VerTeX.jl)
[](https://ci.appveyor.com/project/chakravala/vertex-jl/branch/master)
[](https://coveralls.io/github/chakravala/VerTeX.jl?branch=master)
[](http://codecov.io/github/chakravala/VerTeX.jl?branch=master)
[](https://liberapay.com/chakravala)
For now, this project is a prototype concept for maintaining a body of research and citations via a computational graph database.
The `VerTeX` typeset scattered graph data rewriter is based on a new graph data format called VerTeX, which parses and generates LaTeX documents from nodes.
Current specifications are concerned with how to construct new documents from theorems and definitions using graph data.
This enables research collaborators to maintain databases of LaTeX nodes. The `VerTeX` julia package automatically parses this database of LaTeX nodes to extract citations and references.
This system can also generate graph diagrams depicting the inter-relationships and dependencies of definitions, theorems, calculations, references, and results.
For convenience, the `vtx>` REPL can be used by pressing the `,` key with commands such as `help,vim,pdf,status,dictionary,ranger,preview,search,cd,cdpkg`.
The REPL code was adapted and modifed from the REPL code of [Pkg.jl](https://github.com/JuliaLang/Pkg.jl) using their [MIT](https://julialang.org/license) Julia license.
#### Requirements
The general API is functional out of the box. To use some of the additional terminal user interface features from the REPL, the following unix-like programs are required:
* [vim](https://github.com/vim/vim) for editing nodes as LaTeX documents
* [vimtex](https://github.com/lervag/vimtex) plugin for `vim` for compiling and preview
* [latexmk](http://www.phys.psu.edu/~collins/software/latexmk-jcc/) for compiling LaTeX to PDF formats
* [zathura](https://git.pwmt.org/pwmt/zathura) for viewing PDF output
* [ranger](https://github.com/ranger/ranger) for browsing directories
See some of [chakravala's dot files](https://github.com/chakravala/dotfiles) for examples of `startup.jl`, `.vimrc`, `.latexmkrc`, `zathurarc`.
## Serialization of VerTeX node data
The format is not specific to any kind of file extension or way of saving, as the format is defined only by what data is saved.
Therefore, `VerTeX` data can be saved in any type of serializtion format the database maintainer wants to choose.
To start with, the TOML format has been implemented.
An example `TOML` file generated by `VerTeX` is
```TOML
author = "example"
pre = "%vtx:~/.julia/v0.7/JuliaTeX/vtx/default.tex"
revised = "2018-03-06T20:00:25.559"
uuid = "e87e02c0-2178-11e8-1787-d7c816143f3c"
created = "2018-03-06T19:59:41.514"
title = "testing"
editor = "Person Nameson"
date = "2018"
version = ["VerTeX", "v\"0.1.0\""]
tex = "hello world"
```
These are the main fields for any VerTeX data file:
* **pre** is the LaTeX document preamble data (what packages to load, etc)
* **title** is the title of the VerTeX file and also the `\title{}` field from latex doc
* **author** is the creator of the content (simultaneously it is `\author{}` field of latex doc)
* **date** is the latex doc `\date{}` field
* **tex** is the main body of the LaTeX content for the VerTeX file
* **uuid** is a unique identifier (not necessarily cryptographically secure, but it can be)
* **created** is the date of creation of the uuid
* **revised** is the last editing date and time UTC
* **editor** is the person who was editing the VerTeX file
* **version** is the VerTeX version data
* **depot** is the repository name
There are more data fields envisioned which are not implemented in the protype yet.
The data from this example TOML file results in the following LaTeX document when combined:
```LaTeX
\documentclass[]{article}
\usepackage[active,tightpage]{preview}
\setlength\PreviewBorder{7.77pt}
\usepackage{varwidth}
\AtBeginDocument{\begin{preview}\begin{varwidth}{\linewidth}}
\AtEndDocument{\end{varwidth}\end{preview}}
%vtx:~/.julia/v0.7/JuliaTeX/vtx/default.tex
\title{testing}
\author{example}
\date{2018}
\begin{document}
hello world
\end{document}
```
The program automatically handles the conversion from TOML to LaTeX and vice versa.
Suppose you have some mathematical data (e.g. a theorem, an example, or a proof) and you wish to categorize it in a database. Then the LaTeX form of the data can be converted and stored away in the TOML data format.
Thus it becomes possible to retrieve the database file; automatically convert it into a LaTeX document with all the headers; then edit it as a LaTeX document in an editor; and finally store the update in the TOML data format automatically when the editor is closed.
Hence, edits are automatically made available for search and other features.
### Specifying and extracting relational meta-data
There is more relational meta-data that can be extracted, which will be investigated.
Specifically, it is possible to automatically extract relational edge data (as well as automatically erase it properly if necessary).
It works as follows:
In a local directory somewhere, suppose I have a `vtx` file stored that holds some `key => value` data, which can be loaded using the VerTeX program.
```Julia
julia> using VerTeX
julia> f = VerTeX.load("testdir/pnt.vtx")
Dict{String,Any} with 14 entries:
"label" => ["PNT"]
"pre" => "%vtx:~/.julia/v0.7/VerTeX/vtx/default.tex"
"depot" => "julia"
"author" => "Gauss"
"created" => "2018-03-08T20:04:13.151"
"editor" => "Person Nameson"
"version" => ["VerTeX", "v\"0.1.0\""]
"tex" => "\$\$ \\lim_{x\\rightarrow +\\infty} \\frac{\\pi(x)}{\\int_2^x\\frac{du}{\\log(β¦
"ids" => Dict{String,Any}()
"date" => "unknown"
"revised" => "2018-03-09T15:12:09.635"
"uuid" => "df3c6ade-230b-11e8-09d3-1b9aec48cc35"
"title" => "Prime Number Theorem"
"dir" => "test/pnt.vtx"
```
In this case, it is a statement of the Prime Number Theorem by Gauss (simple example).
The vertex data was generated after editing the information with `vtx> vim test/pnt.vtx` as a regular LaTeX document:
```LaTeX
\documentclass[]{article}
% hidden preamble stuff not worth showing
\newcommand{\deps}[1]{} % VerTeX dependencies
%vtx:~/.julia/v0.7/VerTeX/vtx/default.tex
\author{Gauss}
\title{Prime Number Theorem}
\begin{document}
$$ \lim_{x\rightarrow +\infty} \frac{\pi(x)}{\int_2^x\frac{du}{\log(u)}} = 1 $$
This is the PNT\label{PNT}.
\end{document}
```
This `.tex` file is converted by VerTeX into the above `key => value` format, and vice versa, so making changes to the graph database is done by simply editing the `.tex` files as if it were a regular LaTeX document (with background scripts).
Now if you save another VerTeX which references the prime number theorem using `\ref{PNT}` then the VerTeX system will automatically update both the new VerTeX and also the other VerTeX file containing the referred to prime number theorem with a UUID to mark the reference.
```Julia
julia> VerTeX.save(ans)
β Info: saving VerTeX: Prime Number Theorem
β testdir/pnt.vtx saved in julia
β Info: saving VerTeX: a note on pnt
β updated \ref{PNT}
β at testdir/pnt.vtx in julia
β testdir/note.vtx saved in julia
```
Note that an additional VerTeX at `testdir/note.vtx` is updated to track the reference.
This means that theorems and definitions can be tagged with `\label{}` and `\ref{}` to maintain the connections between the VerTeX files automatically.
All one has to do is edit the LaTeX files, save them as VerTeX, and once all is saved all of the VerTeX data already contains all of the graph edges, ready to be used for analysis.
This is going to make mapping out mathematical theories into graphs superbly easy and useful!
In order to extend it to a conversation / email system, all one needs to do is add a list of receivers / recipients to a VerTeX, and it is now a letter between authors.
| VerTeX | https://github.com/chakravala/VerTeX.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 2429 | ##### Beginning of file
@info("Importing the MirrorUpdater module...")
import MirrorUpdater
import TimeZones
@info("Reading config files...")
include(joinpath("config","preferences","bitbucket.jl",))
include(joinpath("config","preferences","enabled-providers.jl",))
include(joinpath("config","preferences","gitlab.jl",))
include(joinpath("config","preferences","github.jl",))
include(joinpath("config","preferences","time-zone.jl",))
include(joinpath("config","repositories","additional-repos.jl",))
include(joinpath("config","repositories",
"do-not-push-to-these-destinations.jl",))
include(joinpath("config","repositories",
"do-not-try-url-list.jl",))
include(joinpath("config","repositories","registries.jl",))
include(joinpath("config","repositories",
"try-but-allow-failures-url-list.jl",))
git_hosting_providers = Any[]
if GITHUB_ENABLED
const github_provider =
MirrorUpdater.Hosts.GitHubHost.new_github_session(
;
github_organization = GITHUB_ORGANIZATION,
github_bot_username = GITHUB_BOT_USERNAME,
github_bot_personal_access_token = GITHUB_BOT_PERSONAL_ACCESS_TOKEN,
)
push!(git_hosting_providers, github_provider)
end
if GITLAB_ENABLED
const gitlab_provider =
MirrorUpdater.Hosts.GitLabHost.new_gitlab_session(
;
gitlab_group = GITLAB_GROUP,
gitlab_bot_username = GITLAB_BOT_USERNAME,
gitlab_bot_personal_access_token = GITLAB_BOT_PERSONAL_ACCESS_TOKEN,
)
push!(git_hosting_providers, gitlab_provider)
end
if BITBUCKET_ENABLED
const bitbucket_provider =
MirrorUpdater.Hosts.BitbucketHost.new_bitbucket_session(
;
bitbucket_team = BITBUCKET_TEAM,
bitbucket_bot_username = BITBUCKET_BOT_USERNAME,
bitbucket_bot_app_password = BITBUCKET_BOT_APP_PASSWORD,
)
push!(git_hosting_providers, bitbucket_provider)
end
MirrorUpdater.CommandLine.run_mirror_updater_command_line!!(
;
arguments = ARGS,
git_hosting_providers = git_hosting_providers,
registry_list = REGISTRY_LIST,
additional_repos = ADDITIONAL_REPOS,
do_not_try_url_list = DO_NOT_TRY_URL_LIST,
do_not_push_to_these_destinations = DO_NOT_PUSH_TO_THESE_DESTINATIONS,
try_but_allow_failures_url_list = TRY_BUT_ALLOW_FAILURES_URL_LIST,
time_zone = TIME_ZONE,
)
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 362 | import Pkg
try
Pkg.add("Coverage")
catch e1
@warn(
string("Ignoring exception e1:"),
e1,
)
end
import Coverage
import MirrorUpdater
cd(MirrorUpdater.package_directory())
try
Coverage.Codecov.submit(Coverage.Codecov.process_folder())
catch e2
@warn(
string("Ignoring exception e2:"),
e2,
)
end
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 266 | import Pkg
try
Pkg.add("Coverage")
catch e1
@warn(
string("Ignoring exception e1:"),
e1,
)
end
import Coverage
import MirrorUpdater
cd(MirrorUpdater.package_directory())
Coverage.Codecov.submit(Coverage.Codecov.process_folder())
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 174 | const BITBUCKET_TEAM = "UnofficialJuliaMirror"
const BITBUCKET_BOT_USERNAME = "UnofficialJuliaMirrorBot"
const BITBUCKET_BOT_APP_PASSWORD = ENV["BITBUCKET_BOT_APP_PASSWORD"]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 87 | const GITHUB_ENABLED = true
const GITLAB_ENABLED = true
const BITBUCKET_ENABLED = true
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 188 | const GITHUB_ORGANIZATION = "UnofficialJuliaMirror"
const GITHUB_BOT_USERNAME = "UnofficialJuliaMirrorBot"
const GITHUB_BOT_PERSONAL_ACCESS_TOKEN = ENV["GITHUB_BOT_PERSONAL_ACCESS_TOKEN"]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 181 | const GITLAB_GROUP = "UnofficialJuliaMirror"
const GITLAB_BOT_USERNAME = "UnofficialJuliaMirrorBot"
const GITLAB_BOT_PERSONAL_ACCESS_TOKEN = ENV["GITLAB_BOT_PERSONAL_ACCESS_TOKEN"]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 74 | import TimeZones
const TIME_ZONE = TimeZones.TimeZone("America/New_York")
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 557 | include("juliacomputing-repos.jl")
include("julialang-repos.jl")
include("miscellaneous-gpu-related-repos.jl")
include("miscellaneous-julia-related-repos.jl")
include("miscellaneous-ml-related-repos.jl")
include("unregistered-packages.jl")
const ADDITIONAL_REPOS = convert(
Vector{MirrorUpdater.Types.SrcDestPair},
vcat(
JULIACOMPUTING_REPOS,
JULIALANG_REPOS,
MISCELLANEOUS_GPU_RELATED_REPOS,
MISCELLANEOUS_JULIA_RELATED_REPOS,
MISCELLANEOUS_ML_RELATED_REPOS,
UNREGISTERED_PACKAGES,
),
)
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 2149 | const BROKEN_URL_LIST = String[
"git://git.colberg.org/OnlineMoments.jl.git",
"git://gitlab.com/wavexx/Expect.jl.git",
"git://gitlab.com/wavexx/Polyglot.jl.git",
"https://github.com/Datseris/DynamicalBilliardsPlotting.jl.git",
"https://github.com/JuliaComputing/StringArrays.jl.git",
"https://github.com/JuliaGizmos/React.jl.git",
"https://github.com/JuliaGNSS/PhasedArrayTracking.jl.git",
"https://github.com/JuliaMPC/MichiganAutonomousVehicles.jl.git",
"https://github.com/KTH-AC/ControlCore.jl.git",
"https://github.com/OpenGene/HTSLIB.jl.git",
"https://github.com/agbondy/DriftDiffusion.jl.git",
"https://github.com/bramtayl/ChainRecursive.jl.git",
"https://github.com/bramtayl/Chunks.jl.git",
"https://github.com/bramtayl/DotOverloading.jl.git",
"https://github.com/bramtayl/KeyedTables.jl.git",
"https://github.com/bramtayl/Keys.jl.git",
"https://github.com/bramtayl/LazyCall.jl.git",
"https://github.com/bramtayl/LazyContext.jl.git",
"https://github.com/bramtayl/LazyQuery.jl.git",
"https://github.com/bramtayl/NumberedLines.jl.git",
"https://github.com/bramtayl/OnlinePackage.jl.git",
"https://github.com/bramtayl/Parts.jl.git",
"https://github.com/bramtayl/RecurUnroll.jl.git",
"https://github.com/bramtayl/RequirementVersions.jl.git",
"https://github.com/bramtayl/SessionHacker.jl.git",
"https://github.com/bramtayl/TypedBools.jl.git",
"https://github.com/bramtayl/ValuedTuples.jl.git",
"https://github.com/bramtayl/ZippedArrays.jl.git",
"https://github.com/jakebolewski/LibGit2.jl.git",
"https://github.com/dourouc05/CombinatorialBandits.jl.git",
"https://github.com/miguelraz/OrthogonalPolynomials.jl.git",
"https://github.com/rennis250/Arduino.jl.git",
"https://github.com/rennis250/GLUT.jl.git",
"https://github.com/rennis250/GetC.jl.git",
"https://github.com/rennis250/OpenGL.jl.git",
"https://github.com/rennis250/SDL.jl.git",
"https://github.com/rennis250/Sparrow.jl.git",
"https://github.com/sadit/SimilarReferences.jl.git",
"https://github.com/tpoisot/Viridis.jl.git"
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 285 | const DO_NOT_PUSH_TO_THESE_DESTINATIONS = String[
"MirrorUpdater",
"MirrorUpdater.jl",
"UnofficialJuliaMirror/MirrorUpdater",
"UnofficialJuliaMirror/MirrorUpdater.jl",
"UnofficialJuliaMirrorBot/MirrorUpdater",
"UnofficialJuliaMirrorBot/MirrorUpdater.jl",
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 208 | include("broken-url-list.jl")
include("git-lfs-repos-url-list.jl")
const DO_NOT_TRY_URL_LIST = convert(
Vector{String},
vcat(
BROKEN_URL_LIST,
GIT_LFS_REPO_URL_LIST,
),
)
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 265 | import Pkg
const GIT_LFS_REPO_URL_LIST = String[
x["source_url"] for x in values(
Pkg.TOML.parsefile(
joinpath(
@__DIR__,
"git-lfs-repos-src-dest-pairs.toml",
)
)
)
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 922 | const JULIACOMPUTING_REPOS = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaComputing/Deprecations.jl",
destination_repo_name = "JuliaComputing-Deprecations.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaComputing/FemtoCleaner.jl",
destination_repo_name = "JuliaComputing-FemtoCleaner.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaComputing/NewPkgEval.jl",
destination_repo_name =
"NewPkgEval.jl-9f2e2246-6dce-11e8-3d98-4b291446da6e",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaComputing/Registrator.jl",
destination_repo_name =
"Registrator.jl-4418983a-e44d-11e8-3aec-9789530b3b3e",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 1555 | const JULIALANG_REPOS = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/METADATA.jl",
destination_repo_name = "JuliaLang-METADATA.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/Pkg.jl",
destination_repo_name =
"JuliaLang-Pkg.jl-44cfe95a-1eb2-52ea-b672-e2afdf69b78f",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/julia",
destination_repo_name = "JuliaLang-julia",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/libosxunwind",
destination_repo_name = "JuliaLang-libosxunwind",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/libuv",
destination_repo_name = "JuliaLang-libuv",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaLang/readline",
destination_repo_name = "JuliaLang-readline",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaRegistries/Registrator.jl",
destination_repo_name = "Registrator.jl-4418983a-e44d-11e8-3aec-9789530b3b3e",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaRegistries/TagBot",
destination_repo_name = "JuliaRegistries-TagBot",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 421 | const MISCELLANEOUS_GPU_RELATED_REPOS = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaGPU/docker",
destination_repo_name = "JuliaGPU-docker",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaGPU/gitlab-ci",
destination_repo_name = "JuliaGPU-gitlab-ci",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 3336 | const MISCELLANEOUS_JULIA_RELATED_REPOS = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/bors-ng/bors-ng",
destination_repo_name = "bors-ng-bors-ng",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/libuv/libuv",
destination_repo_name = "libuv-libuv",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JeffBezanson/femtolisp",
destination_repo_name = "JeffBezanson-femtolisp",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaMath/openlibm",
destination_repo_name = "JuliaMath-openlibm",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaMath/OpenlibmBuilder",
destination_repo_name = "JuliaMath-OpenlibmBuilder",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaMath/DSFMTBuilder",
destination_repo_name = "JuliaMath-DSFMTBuilder",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaMath/openspecfun",
destination_repo_name = "JuliaMath-openspecfun",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaStrings/utf8proc",
destination_repo_name = "JuliaStrings-utf8proc",),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/djsegal/julia_observer",
destination_repo_name = "djsegal-julia_observer",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaInterop/libcxxwrap-julia",
destination_repo_name = "JuliaInterop-libcxxwrap-julia",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaStats/RmathBuilder",
destination_repo_name = "JuliaStats-RmathBuilder",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaStats/Rmath-julia",
destination_repo_name = "JuliaStats-Rmath-julia",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/JuliaStats/RmathDist.jl",
destination_repo_name = "JuliaStats-RmathDist.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/staticfloat/homebrew-julia",
destination_repo_name = "staticfloat-homebrew-julia",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/staticfloat/homebrew-juliadeps",
destination_repo_name = "staticfloat-homebrew-juliadeps",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/staticfloat/julia-docker",
destination_repo_name = "staticfloat-julia-docker",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/tpapp/texlive-julia-minimal-docker",
destination_repo_name = "tpapp-texlive-julia-minimal-docker",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/wildart/TOML.jl",
destination_repo_name = "wildart-TOML.jl-191fdcea-f9f2-43e0-b922-d33f71e2abc3",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 2466 | const MISCELLANEOUS_ML_RELATED_REPOS = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/DeepMark/deepmark",
destination_repo_name = "DeepMark-deepmark",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/FluxML/fluxml.github.io",
destination_repo_name = "FluxML-fluxml.github.io",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/FluxML/model-zoo",
destination_repo_name = "FluxML-model-zoo",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/KnetML/Knet-the-Julia-dope",
destination_repo_name = "KnetML-Knet-the-Julia-dope",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/KnetML/NLPdemos",
destination_repo_name = "KnetML-NLPdemos",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/KnetML/WGAN.jl",
destination_repo_name = "KnetML-WGAN.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/avik-pal/DeepLearningBenchmarks",
destination_repo_name = "avik-pal-DeepLearningBenchmarks",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/denizyuret/Klutz.jl",
destination_repo_name = "denizyuret-Klutz.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/ilkarman/DeepLearningFrameworks",
destination_repo_name = "ilkarman-DeepLearningFrameworks",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/soumith/convnet-benchmarks",
destination_repo_name = "soumith-convnet-benchmarks",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/ysimillides/mlj-docker",
destination_repo_name = "ysimillides-mlj-docker",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/DilumAluthge/PredictMD-docker",
destination_repo_name = "DilumAluthge-PredictMD-docker",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url = "https://github.com/DilumAluthge/OfflineRegistry",
destination_repo_name = "DilumAluthge-OfflineRegistry",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 994 | const REGISTRY_LIST = MirrorUpdater.Types.Registry[
MirrorUpdater.Types.Registry(
;
owner = "bcbi",
name = "PredictMDRegistry",
uuid = "26a550a3-39fe-4af4-af6d-e8814c2b6dd9",
url = "https://github.com/bcbi/PredictMDRegistry.git",
),
MirrorUpdater.Types.Registry(
;
owner = "JuliaRegistries",
name = "General",
uuid = "23338594-aafe-5451-b93e-139f81909106",
url = "https://github.com/JuliaRegistries/General.git",
),
MirrorUpdater.Types.Registry(
;
owner = "fredrikekre",
name = "Registry",
uuid = "ae0cb698-197b-42ec-a0a0-4f871aea6013",
url = "https://github.com/fredrikekre/Registry.git",
),
MirrorUpdater.Types.Registry(
;
owner = "JuliaFinance",
name = "JuliaFinance",
uuid = "e8ee1221-7013-525c-b886-deb0c05ff931",
url = "https://github.com/JuliaFinance/JuliaFinance.git",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 374 | const TRY_BUT_ALLOW_FAILURES_URL_LIST = String[
"https://gitlab.com/UnofficialJuliaMirror/GeoEfficiency.jl-b1f22607-7830-5ceb-9a8e-05ab1ac77008",
"https://gitlab.com/UnofficialJuliaMirror/JuliaLang-julia",
"https://gitlab.com/UnofficialJuliaMirror/julia.jl-1222c4b2-2114-5bfd-aeef-88e4692bbb3e",
"https://gitlab.com/UnofficialJuliaMirror/libuv-libuv",
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 1519 | const UNREGISTERED_PACKAGES = MirrorUpdater.Types.SrcDestPair[
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/DilumAluthge/DelayedErrors.jl",
destination_repo_name =
"DelayedErrors.jl-b0cb2517-35d6-4513-a0ca-b0b388a189e4",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/JuliaMath/Libm.jl",
destination_repo_name =
"JuliaMath-Libm.jl",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/alan-turing-institute/MLJRegistry.jl",
destination_repo_name =
"MLJRegistry.jl-a41df0fa-2d8a-11e9-1bfb-8110be68cd3e",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl",
destination_repo_name =
"MirrorUpdater.jl-7f0fc4bf-4ffe-4e21-ab91-7bf1358d5ab3",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/UnofficialJuliaMirrorSnapshots/RemoveLFS.jl",
destination_repo_name =
"RemoveLFS.jl-a262596c-dd27-4d54-b019-3619024addbd",
),
MirrorUpdater.Types.SrcDestPair(
;
source_url =
"https://github.com/UnofficialJuliaMirrorSnapshots/Snapshots.jl",
destination_repo_name =
"Snapshots.jl-44eb87bc-f37b-45e8-9f53-3bcb453a652d",
),
]
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 3290 | ##### Beginning of file
# Parts of this file are based on:
# 1. https://github.com/JuliaPackaging/Git.jl/blob/master/deps/build.jl
import Conda
function _default_git_cmd()::String
result::String = lowercase(strip("git"))
return result
end
function _get_git_version(
git::String
)::VersionNumber
a::String = convert(String,read(`$(git) --version`, String))
b::String = convert(String, strip(a))
c::Vector{SubString{String}} = split(b, "git version")
d::String = convert(String,last(c))
e::String = convert(String, strip(d))
f::VersionNumber = VersionNumber(e)
return f
end
function _found_default_git()::Bool
default_git_cmd::String = _default_git_cmd()
found_default_git::Bool = try
success(`$(default_git_cmd) --version`)
catch
false
end
git_version_parsed::Bool = try
isa(
_get_git_version(default_git_cmd),
VersionNumber,
)
catch
false
end
result = found_default_git && git_version_parsed
return result
end
function _install_git()::String
result::String = _install_git_conda()
return result
end
function _install_git_conda()::String
@info("Attempting to install Git using Conda.jl...")
environment::Symbol = :MirrorUpdater
Conda.add("git", environment)
@info("Successfully installed Git using Conda.jl.")
git_cmd::String = strip(
joinpath(
Conda.bin_dir(environment),
"git",
)
)
run(`$(git_cmd) --version`)
return git_cmd
end
function _build_git()::String
install_git::Bool = lowercase(strip(get(ENV, "INSTALL_GIT", "false"))) ==
lowercase(strip("true"))
found_default_git::Bool = _found_default_git()
if install_git
@info("INSTALL_GIT is true, so I will now install git.")
git_cmd = _install_git()
elseif found_default_git
@info("I found git on your system, so I will use that git.")
git_cmd = _default_git_cmd()
else
@info("I did not find git on your system, so I will now install git.")
git_cmd = _install_git()
end
return git_cmd
end
function _build_mirrorupdater()::Nothing
git_cmd = _build_git()
build_jl_file_path = strip(
abspath(
strip(
@__FILE__
)
)
)
@debug(
"deps/build.jl: ",
build_jl_file_path,
)
deps_directory = strip(
abspath(
strip(
dirname(
strip(
build_jl_file_path
)
)
)
)
)
@debug(
"deps:",
deps_directory,
)
deps_jl_file_path = strip(
abspath(
joinpath(
strip(deps_directory),
strip("deps.jl"),
)
)
)
@debug(
"deps/deps.jl:",
deps_jl_file_path,
)
open(deps_jl_file_path, "w") do f
line_1::String = "git_cmd = \"$(strip(string(git_cmd)))\""
@info("Writing line 1 to deps.jl: ", line_1,)
println(f, line_1)
end
return nothing
end
_build_mirrorupdater()
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 717 | ##### Beginning of file
module MirrorUpdater # Begin module MirrorUpdater
__precompile__(true)
import ArgParse
import Conda
import Coverage
import Dates
import GitHub
import HTTP
import JSON
import Pkg
import Test
import TimeZones
include(joinpath("delayederrors.jl"))
include(joinpath("package_directory.jl"))
include(joinpath("version.jl"))
include(joinpath("welcome.jl"))
include(joinpath("Types", "Types.jl"))
include(joinpath("Utils", "Utils.jl"))
include(joinpath("Common", "Common.jl"))
include(joinpath("Run", "Run.jl"))
include(joinpath("CommandLine", "CommandLine.jl"))
include(joinpath("Hosts", "Hosts.jl"))
include(joinpath("init.jl"))
end # End module MirrorUpdater
##### Beginning of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 1104 | struct DelayedError
msg::S where S <: AbstractString
dict::T where T <: AbstractDict
end
function process_delayed_error_list(list)::Nothing
if isempty(list)
@debug("There were no delayed errors.")
else
for x in list
@error("Delayed error from earlier: $(x.msg)", x.dict...)
end
error("There were one or more delayed errors.")
end
return nothing
end
function process_delayed_error_list()::Nothing
global delayed_error_list
process_delayed_error_list(delayed_error_list)
return nothing
end
function delayederror(msg::S; kwargs...)::Nothing where S <: AbstractString
x = DelayedError(msg, Dict(kwargs...))
global delayed_error_list
push!(delayed_error_list, x,)
@error("Delaying this error for later: $(x.msg)", x.dict...)
return nothing
end
function delayederror(msg::Vararg{Any,N}; kwargs...)::Nothing where {N}
delayederror(Main.Base.string(msg); kwargs...)
return nothing
end
function delayedexit(n)::Nothing
process_delayed_error_list()
exit(n)
end
delayedexit() = delayedexit(0)
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 180 | ##### Beginning of file
function __init__()::Nothing
global delayed_error_list = Vector{DelayedError}()
_print_welcome_message()
return nothing
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 4879 | ##### Beginning of file
function _is_filesystem_root(path::AbstractString)::Bool
path::String = abspath(strip(path))
if path == dirname(path)
return true
else
return false
end
end
function _is_package_directory(path::AbstractString)::Bool
path::String = abspath(strip(path))
if isfile(joinpath(path, "Project.toml"))
return true
else
return false
end
end
function _find_package_directory(path::AbstractString)::String
path::String = abspath(strip(path))
if _is_package_directory(path)
return path
elseif _is_filesystem_root(path)
error(string("Could not find the Project.toml file"))
else
result = _find_package_directory(dirname(path))
return result
end
end
"""
package_directory()::String
Return the MirrorUpdater package directory.
"""
function package_directory()::String
result::String = _find_package_directory(abspath(strip(@__FILE__)))
return result
end
function _location(m::Method)::String
result::String = abspath(first(functionloc(m)))
return result
end
function _location(f::Function)::String
result::String = abspath(first(functionloc(f)))
return result
end
function _location(f::Function, types::Tuple)::String
result::String = abspath(first(functionloc(f, types)))
return result
end
function _location(m::Module)::String
result::String = abspath(_location(getfield(m, :eval)))
return result
end
"""
package_directory(parts...)::String
Equivalent to `abspath(joinpath(abspath(package_directory()), parts...))`.
"""
function package_directory(parts...)::String
result::String = abspath(joinpath(abspath(package_directory()), parts...))
return result
end
"""
package_directory(m::Method)::String
If method `m`
is part of a Julia package, returns the package root directory.
If method `m`
is not part of a Julia package, throws an error.
"""
function package_directory(m::Method)::String
m_module_directory::String = abspath(_location(m))
m_package_directory::String = abspath(
_find_package_directory(m_module_directory)
)
return m_package_directory
end
"""
package_directory(m::Method, parts...)::String
Equivalent to
`result = abspath(joinpath(abspath(package_directory(m)), parts...))`.
"""
function package_directory(m::Method, parts...)::String
result::String = abspath(joinpath(abspath(package_directory(m)), parts...))
return result
end
"""
package_directory(f::Function)::String
If function `f`
is part of a Julia package, returns the package root directory.
If function `f`
is not part of a Julia package, throws an error.
"""
function package_directory(f::Function)::String
m_module_directory::String = abspath(_location(f))
m_package_directory::String = abspath(
_find_package_directory(m_module_directory)
)
return m_package_directory
end
"""
package_directory(f::Function, parts...)::String
Equivalent to
`result = abspath(joinpath(abspath(package_directory(f)), parts...))`.
"""
function package_directory(f::Function, parts...)::String
result::String = abspath(joinpath(abspath(package_directory(f)), parts...))
return result
end
"""
package_directory(f::Function, types::Tuple)::String
If function `f` with type signature `types`
is part of a Julia package, returns the package root directory.
If function `f` with type signature `types`
is not part of a Julia package, throws an error.
"""
function package_directory(f::Function, types::Tuple)::String
m_module_directory::String = abspath(_location(f, types))
m_package_directory::String = abspath(
_find_package_directory(m_module_directory)
)
return m_package_directory
end
"""
package_directory(f::Function, types::Tuple, parts...)::String
Equivalent to
`result = abspath(joinpath(abspath(package_directory(f, types)), parts...))`.
"""
function package_directory(f::Function, types::Tuple, parts...)::String
result::String = abspath(joinpath(abspath(package_directory(f, types)), parts...))
return result
end
"""
package_directory(m::Module)::String
If module `m`
is part of a Julia package, returns the package root directory.
If module `m`
is not part of a Julia package, throws an error.
"""
function package_directory(m::Module)::String
m_module_directory::String = abspath(_location(m))
m_package_directory::String = abspath(
_find_package_directory(m_module_directory)
)
return m_package_directory
end
"""
package_directory(m::Module, parts...)::String
Equivalent to
`result = abspath(joinpath(abspath(package_directory(m)), parts...))`.
"""
function package_directory(m::Module, parts...)::String
result::String = abspath(joinpath(abspath(package_directory(m)), parts...))
return result
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 4353 | ##### Beginning of file
import Pkg # stdlib
struct _TomlFile
filename::String
function _TomlFile(path::String)::_TomlFile
path::String = abspath(strip(path))
if isfile(path)
result::_TomlFile = new(path)
return result
else
error("File does not exist")
end
end
end
function _parse_toml_file(x::_TomlFile)::Dict{String, Any}
toml_file_filename::String = x.filename
toml_file_text::String = read(toml_file_filename, String)
toml_file_parsed::Dict{String, Any} = Pkg.TOML.parse(toml_file_text)
return toml_file_parsed
end
function _version_string(x::_TomlFile)::String
toml_file_parsed::Dict{String, Any} = _parse_toml_file(x)
version_string::String = toml_file_parsed["version"]
return version_string
end
function _version_string()::String
MirrorUpdater_toml_file::_TomlFile = _TomlFile(
package_directory("Project.toml")
)
result_versionstring::String = _version_string(MirrorUpdater_toml_file)
return result_versionstring
end
function _version_string(m::Method)::String
m_package_directory::String = package_directory(m)
m_toml_file::_TomlFile = _TomlFile(
joinpath(m_package_directory, "Project.toml")
)
result_versionstring::String = _version_string(m_toml_file)
return result_versionstring
end
function _version_string(f::Function)::String
m_package_directory::String = package_directory(f)
m_toml_file::_TomlFile = _TomlFile(
joinpath(m_package_directory, "Project.toml")
)
result_versionstring::String = _version_string(m_toml_file)
return result_versionstring
end
function _version_string(f::Function, types::Tuple)::String
m_package_directory::String = package_directory(f, types)
m_toml_file::_TomlFile = _TomlFile(
joinpath(m_package_directory, "Project.toml")
)
result_versionstring::String = _version_string(m_toml_file)
return result_versionstring
end
function _version_string(m::Module)::String
m_package_directory::String = package_directory(m)
m_toml_file::_TomlFile = _TomlFile(
joinpath(m_package_directory, "Project.toml")
)
result_versionstring::String = _version_string(m_toml_file)
return result_versionstring
end
"""
version()::VersionNumber
Return the version number of MirrorUpdater.
"""
function version()::VersionNumber
result_versionstring::String = _version_string()
result_versionnumber::VersionNumber = VersionNumber(result_versionstring)
return result_versionnumber
end
"""
version(m::Method)::VersionNumber
If method `m`
is part of a Julia package, returns the version number of that package.
If method `m`
is not part of a Julia package, throws an error.
"""
function version(m::Method)::VersionNumber
result_versionstring::String = _version_string(m)
result_versionnumber::VersionNumber = VersionNumber(result_versionstring)
return result_versionnumber
end
"""
version(f::Function)::VersionNumber
If function `f`
is part of a Julia package, returns the version number of
that package.
If function `f`
is not part of a Julia package, throws an error.
"""
function version(f::Function)::VersionNumber
result_versionstring::String = _version_string(f)
result_versionnumber::VersionNumber = VersionNumber(result_versionstring)
return result_versionnumber
end
"""
version(f::Function, types::Tuple)::VersionNumber
If function `f` with type signature `types`
is part of a Julia package, returns the version number of
that package.
If function `f` with type signature `types`
is not part of a Julia package, throws an error.
"""
function version(f::Function, types::Tuple)::VersionNumber
result_versionstring::String = _version_string(f, types)
result_versionnumber::VersionNumber = VersionNumber(result_versionstring)
return result_versionnumber
end
"""
version(m::Module)::VersionNumber
If module `m` is part of a Julia package, returns the version number of
that package.
If module `m` is not part of a Julia package, throws an error.
"""
function version(m::Module)::VersionNumber
result_versionstring::String = _version_string(m)
result_versionnumber::VersionNumber = VersionNumber(result_versionstring)
return result_versionnumber
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 478 | ##### Beginning of file
function _print_welcome_message()::Nothing
mirrorupdater_version::VersionNumber = version()
mirrorupdater_pkgdir::String = package_directory()
@info(string("This is MirrorUpdater, version ",mirrorupdater_version,),)
@info(string("For help, please visit https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl",),)
@debug(string("MirrorUpdater package directory: ",mirrorupdater_pkgdir,),)
return nothing
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 4150 | ##### Beginning of file
module CommandLine # Begin submodule MirrorUpdater.CommandLine
__precompile__(true)
import ArgParse
import Dates
import HTTP
import Pkg
import TimeZones
import ..Types
import ..Utils
import ..Common
import ..Run
import ..process_delayed_error_list
function run_mirror_updater_command_line!!(
;
registry_list::Vector{Types.Registry},
arguments::Vector{String} =
String[],
git_hosting_providers::AbstractVector =
Any[],
additional_repos::Vector{Types.SrcDestPair} =
Types.SrcDestPair[],
do_not_push_to_these_destinations::Vector{String} =
String[],
do_not_try_url_list::Vector{String} =
String[],
try_but_allow_failures_url_list::Vector{String} =
String[],
time_zone::TimeZones.TimeZone =
TimeZones.TimeZone("America/New_York"),
)::Nothing
@info(
"Running MirrorUpdater.CommandLine.run_mirror_updater_command_line!!"
)
@info("parsing command line arguments...")
parsed_arguments::Dict = _parse_arguments(
arguments
)
@info("processing parsed command line arguments...")
processed_arguments::Dict = _process_parsed_arguments(
parsed_arguments
)
task::String = processed_arguments[:task]
has_gist_description::Bool = processed_arguments[:has_gist_description]
gist_description::String = processed_arguments[:gist_description]
is_dry_run::Bool = processed_arguments[:is_dry_run]
delete_gists_older_than_minutes::Int =
processed_arguments[:delete_gists_older_than_minutes]
Run.run_mirror_updater!!(
;
git_hosting_providers = git_hosting_providers,
task = task,
gist_description = gist_description,
is_dry_run = is_dry_run,
registry_list = registry_list,
additional_repos = additional_repos,
time_zone = time_zone,
do_not_push_to_these_destinations =
do_not_push_to_these_destinations,
do_not_try_url_list =
do_not_try_url_list,
try_but_allow_failures_url_list =
try_but_allow_failures_url_list,
delete_gists_older_than_minutes =
delete_gists_older_than_minutes,
)
process_delayed_error_list()
return nothing
end
function _parse_arguments(arguments::Vector{String})::Dict
s = ArgParse.ArgParseSettings()
ArgParse.@add_arg_table s begin
"--task"
help = "which task to run"
arg_type = String
default = ""
"--gist-description"
help = "description for the temporary gist"
arg_type = String
default = ""
"--dry-run"
help = "do everything except actually pushing the repos"
action = :store_true
"--delete-gists-older-than-minutes"
help = "delete all gists older than N minutes"
arg_type = Int
default = 0
end
result::Dict = ArgParse.parse_args(arguments, s)
return result
end
function _process_parsed_arguments(parsed_arguments::Dict)::Dict{Symbol, Any}
task_argument::String = strip(
convert(String, parsed_arguments["task"])
)
if length(task_argument) > 0
task = task_argument
else
task = "all"
end
gist_description::String = strip(
convert(String, parsed_arguments["gist-description"])
)
if length(gist_description) > 0
has_gist_description = true
else
has_gist_description = false
end
is_dry_run::Bool = parsed_arguments["dry-run"]
delete_gists_older_than_minutes::Int =
parsed_arguments["delete-gists-older-than-minutes"]
result::Dict{Symbol, Any} = Dict{Symbol, Any}()
result[:task] = task
result[:has_gist_description]=has_gist_description
result[:gist_description]=gist_description
result[:is_dry_run]=is_dry_run
result[:delete_gists_older_than_minutes]=delete_gists_older_than_minutes
return result
end
end # End submodule MirrorUpdater.CommandLine
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 33123 | ##### Beginning of file
module Common # Begin submodule MirrorUpdater.Common
__precompile__(true)
import ..Types
import ..Utils
import ArgParse
import Dates
import HTTP
import Pkg
import TimeZones
import ..delayederror
function _toml_file_to_package(
packagetoml_file_filename::String,
)::Types.Package
toml_file_text::String = read(packagetoml_file_filename, String)
toml_file_parsed::Dict{String,Any}=Pkg.TOML.parse(toml_file_text)
pkg_name::String = toml_file_parsed["name"]
pkg_uuid::String = toml_file_parsed["uuid"]
pkg_source_url::String = toml_file_parsed["repo"]
pkg::Types.Package = Types.Package(
;
name=pkg_name,
uuid=pkg_uuid,
source_url=pkg_source_url,
)
return pkg
end
function _get_uuid_from_toml_file(toml_file_filename::String)::String
toml_file_text::String = read(toml_file_filename, String)
toml_file_parsed::Dict{String,Any}=Pkg.TOML.parse(toml_file_text)
uuid::String = toml_file_parsed["uuid"]
return uuid
end
function _make_list(
registry_list::Vector{Types.Registry},
additional_repos::Vector{Types.SrcDestPair};
do_not_try_url_list::Vector{String},
try_but_allow_failures_url_list::Vector{String},
)::Vector{Types.SrcDestPair}
full_list::Vector{Types.SrcDestPair} = Types.SrcDestPair[]
for x in additional_repos
push!(full_list, x)
end
git = Utils._get_git_binary_path()
for registry in registry_list
registry_name = registry.name
registry_uuid = registry.uuid
registry_source_url = registry.url
registry_destination_repo_name = _generate_destination_repo_name(
registry
)
registry_src_dest_pair = Types.SrcDestPair(
;
source_url = registry_source_url,
destination_repo_name = registry_destination_repo_name,
)
push!(full_list, registry_src_dest_pair)
if registry_source_url in do_not_try_url_list ||
Types._name_with_git(registry_source_url) in do_not_try_url_list ||
Types._name_without_git(registry_source_url) in do_not_try_url_list
blah
@warn(
string(
"registry_source_url is in the do-not-try list, ",
"so skipping.",
),
registry_source_url,
)
else
previous_dir::String = pwd()
temp_dir_registry_git_clone_regular::String = mktempdir()
cd(temp_dir_registry_git_clone_regular)
cmd_git_clone_registry_regular = `$(git) clone $(registry.url)`
@info(
"Attempting to run command",
cmd_git_clone_registry_regular,
pwd(),
ENV["PATH"],
)
clone_registry_regular_was_success =
Utils.command_ran_successfully!!(
cmd_git_clone_registry_regular;
)
if clone_registry_regular_was_success
@info("Command ran successfully",)
registry_toml_filename = joinpath(
temp_dir_registry_git_clone_regular,
registry_name,
"Registry.toml"
)
registry_toml_file_uuid = _get_uuid_from_toml_file(
registry_toml_filename
)
if lowercase(strip(registry_uuid)) !=
lowercase(strip(registry_toml_file_uuid))
delayederror(
string(
"The UUID ($(registry_toml_file_uuid)) ",
"I found in the Registry.toml file does not ",
"match the UUID ($(registry_uuid)) ",
"that you provided.",
)
)
end
list_of_packagetoml_filenames::Vector{String} = String[]
for (root, dirs, files) in
walkdir(temp_dir_registry_git_clone_regular)
for file in files
if lowercase(strip(file)) == "package.toml"
packagetoml_fn = joinpath(root, file)
push!(list_of_packagetoml_filenames, packagetoml_fn)
end
end
end
for packagetoml_file_filename in list_of_packagetoml_filenames
pkg = _toml_file_to_package(packagetoml_file_filename)
pkg_source_url = pkg.source_url
pkg_dest_repo_name = _generate_destination_repo_name(pkg)
pkg_src_dest_pair = Types.SrcDestPair(
;
source_url=pkg_source_url,
destination_repo_name=pkg_dest_repo_name,
)
push!(full_list, pkg_src_dest_pair)
end
else
@warn(
"Command did not run successfully",
cmd_git_clone_registry_regular,
pwd(),
ENV["PATH"],
)
if registry_source_url in try_but_allow_failures_url_list ||
Types._name_with_git(registry_source_url) in
try_but_allow_failures_url_list ||
Types._name_without_git(registry_source_url) in
try_but_allow_failures_url_list
@warn(
string(
"URL is in the try-but-allow-failures list, ",
"so ignoring error ",
"that occured when running command",
),
cmd_git_clone_registry_regular,
pwd(),
ENV["PATH"],
)
else
delayederror(
string(
"Encountered error when running command: ",
cmd_git_clone_registry_regular,
pwd(),
ENV["PATH"],
)
)
end
end
cd(previous_dir)
rm(
temp_dir_registry_git_clone_regular;
force = true,
recursive = true,
)
end
end
unique_list_sorted::Vector{Types.SrcDestPair} = sort(unique(full_list))
@info(
string(
"I made a list with ",
"$(length(unique_list_sorted)) ",
"unique pairs."
)
)
return unique_list_sorted
end
function _generate_destination_repo_name(x::Types.Registry)::String
result::String = string(
strip(x.owner),
"-",
strip(x.name),
"-",
strip(x.uuid),
)
return result
end
function _generate_destination_repo_name(x::Types.Package)::String
result::String = string(
strip(x.name),
"-",
strip(x.uuid),
)
return result
end
function _src_dest_pair_to_string(x::Types.SrcDestPair)::String
result::String = string(
strip(x.source_url),
" ",
strip(x.destination_repo_name),
)
return result
end
function _src_dest_pair_list_to_string(
v::Vector{Types.SrcDestPair}
)::String
v_sorted_unique::Vector{Types.SrcDestPair} = sort(unique(v))
lines::Vector{String} = String[
_src_dest_pair_to_string(x) for x in v_sorted_unique
]
result::String = string(
join(lines, "\n",),
"\n",
)
return result
end
function _string_to_src_dest_pair_list(
x::String
)::Vector{Types.SrcDestPair}
all_src_dest_pairs = Types.SrcDestPair[]
lines::Vector{String} = convert(
Vector{String},
split(x, "\n",),
)
for line in lines
columns::Vector{String} = convert(
Vector{String},
split(strip(line)),
)
if length(columns) == 2
source_url::String = strip(columns[1])
destination_repo_name::String = strip(columns[2])
src_dest_pair::Types.SrcDestPair = Types.SrcDestPair(
;
source_url = source_url,
destination_repo_name = destination_repo_name,
)
push!(all_src_dest_pairs, src_dest_pair,)
end
end
src_dest_pairs_sorted_unique::Vector{Types.SrcDestPair} = sort(
unique(
all_src_dest_pairs
)
)
return src_dest_pairs_sorted_unique
end
function _remove_problematic_refs_before_github!!(
;
packed_refs_filename::String,
)::Nothing
original_packed_refs_content::String = read(
packed_refs_filename,
String,
)
original_lines::Vector{String} = convert(
Vector{String},
split(strip(original_packed_refs_content), "\n")
)
function _line_is_ok_to_keep(x::String)::Bool
result::Bool = (!(occursin("refs/pull/", x))) &&
(!(occursin("cache/pull/", x)))
return result
end
function _determine_new_gh_pages_branch_name(
content::String,
suggested_name::String,
)::String
if occursin(suggested_name, content)
result = _determine_new_gh_pages_branch_name(
content,
string(suggested_name, "1"),
)
else
result = suggested_name
end
return result
end
new_name_for_gh_pages_branch = _determine_new_gh_pages_branch_name(
original_packed_refs_content,
"gh-pages1",
)
function _transform_line(x::String)::String
result_1::String = replace(
x,
"gh-pages" => new_name_for_gh_pages_branch,
)
return result_1
end
new_lines::Vector{String} = Vector{String}()
for orig_line in original_lines
if _line_is_ok_to_keep(orig_line)
transformed_line = _transform_line(orig_line)
push!(new_lines, transformed_line)
end
end
new_packed_refs_content::String = string(
join(new_lines, "\n"),
"\n",
)
rm(
packed_refs_filename;
force = true,
recursive = true,
)
write(packed_refs_filename, new_packed_refs_content)
return nothing
end
function _push_mirrors!!(
;
src_dest_pairs::Vector{Types.SrcDestPair},
git_hosting_providers = git_hosting_providers,
recursion_level::Integer = 0,
max_recursion_depth::Integer = 10,
is_dry_run::Bool = false,
do_not_push_to_these_destinations::Vector{String},
do_not_try_url_list::Vector{String},
try_but_allow_failures_url_list::Vector{String},
time_zone::Dates.TimeZone,
)::Nothing
@debug(string("Recursion level: $(recursion_level)"))
git = Utils._get_git_binary_path()
src_dest_pairs_sorted_unique::Vector{Types.SrcDestPair} = sort(
unique(
src_dest_pairs
)
)
@info(
string(
"Running _push_mirrors!! with ",
"$(length(src_dest_pairs_sorted_unique)) ",
"unique pairs.",
)
)
for pair_number = 1:length(src_dest_pairs_sorted_unique)
@info(
string(
"Pair $(pair_number) of ",
"$(length(src_dest_pairs_sorted_unique))",
)
)
pair = src_dest_pairs_sorted_unique[pair_number]
src_url = pair.source_url
if src_url in do_not_try_url_list ||
Types._name_with_git(src_url) in do_not_try_url_list ||
Types._name_without_git(src_url) in do_not_try_url_list
@warn(
string("Src url is in the do not try list, so skipping."),
src_url,
)
else
previous_dir::String = pwd()
temp_dir_repo_git_clone_regular::String = mktempdir()
temp_dir_repo_git_clone_mirror::String = mktempdir()
if recursion_level <= max_recursion_depth
@info(
string(
"Now I will look for additional repos to mirror ",
"(e.g. BinaryBuilder repos that are referenced ",
"in this repo).",
)
)
cd(temp_dir_repo_git_clone_regular)
cmd_git_clone_repo_regular =
`$(git) clone $(src_url) GITCLONEREPOREGULAR`
@info(
"Attempting to run command",
cmd_git_clone_repo_regular,
pwd(),
ENV["PATH"],
)
before = () -> rm(
joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOREGULAR",
);
force = true,
recursive = true,
)
repo_regular_clone_was_success =
Utils.command_ran_successfully!!(
cmd_git_clone_repo_regular;
before = before,
)
mkpath(
joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOREGULAR",
)
)
if repo_regular_clone_was_success
@info("Command ran successfully",)
cd(
joinpath(
temp_dir_repo_git_clone_regular,
"GITCLONEREPOREGULAR",
)
)
git_grep_results::String = try
strip(read(`$(git) grep Builder`, String))
catch exception
@info("ignoring exception: ", exception)
""
end
list_of_new_src_dest_pairs::Vector{Types.SrcDestPair} =
Types.SrcDestPair[]
if length(git_grep_results) > 0
bin_bldr_pair_list::Vector{Types.SrcDestPair} =
_get_list_of_binary_builder_repos(
git_grep_results
)
for bin_bldr_pair in bin_bldr_pair_list
if bin_bldr_pair in src_dest_pairs
else
push!(
list_of_new_src_dest_pairs,
bin_bldr_pair,
)
end
end
end
if (length(git_grep_results) > 0) &&
(length(list_of_new_src_dest_pairs) > 0)
if length(list_of_new_src_dest_pairs) == 1
@info(
string(
"I found ",
"1 ",
"additional repo to mirror. ",
"I will mirror ",
" it first, and then I will return ",
"to my previous list.",
)
)
else
@info(
string(
"I found ",
"$(length(list_of_new_src_dest_pairs)) ",
"additional repos to mirror. ",
"I will mirror ",
" them first, and then I will return ",
"to my previous list.",
)
)
end
_push_mirrors!!(
;
do_not_push_to_these_destinations =
do_not_push_to_these_destinations,
src_dest_pairs = list_of_new_src_dest_pairs,
git_hosting_providers = git_hosting_providers,
recursion_level = recursion_level + 1,
max_recursion_depth = max_recursion_depth,
is_dry_run = is_dry_run,
do_not_try_url_list = do_not_try_url_list,
time_zone = time_zone,
try_but_allow_failures_url_list =
try_but_allow_failures_url_list,
)
else
@info(
string(
"I did not find any additional ",
"repos to mirror.",
)
)
end
else
if src_url in try_but_allow_failures_url_list ||
Types._name_with_git(src_url) in
try_but_allow_failures_url_list ||
Types._name_without_git(src_url) in
try_but_allow_failures_url_list
@warn(
string(
"URL in the try-but-allow-failures list, ",
"so ignoring the error ",
"that occured while running command",
),
cmd_git_clone_repo_regular,
pwd(),
ENV["PATH"],
)
else
delayederror(
string(
"Encountered error when running command: ",
cmd_git_clone_repo_regular,
pwd(),
ENV["PATH"],
)
)
end
end
else
@warn(
string(
"I have exceeded the maximum recursion depth.",
),
recursion_level,
max_recursion_depth,
)
end
cd(temp_dir_repo_git_clone_mirror)
cmd_git_repo_clone_mirror =
`$(git) clone --mirror $(src_url) GITCLONEREPOMIRROR`
@info(
"Attempting to run command",
cmd_git_repo_clone_mirror,
pwd(),
ENV["PATH"],
)
before = () -> rm(
joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOMIRROR",
);
force = true,
recursive = true,
)
repo_mirror_clone_was_success =
Utils.command_ran_successfully!!(
cmd_git_repo_clone_mirror;
max_attempts = 5,
max_seconds_per_attempt = 1800,
before = before,
)
mkpath(
joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOMIRROR",
)
)
if repo_mirror_clone_was_success
@info("Command ran successfully",)
cd(
joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOMIRROR",
)
)
@info("Processing the repository")
packed_refs_filename = joinpath(
temp_dir_repo_git_clone_mirror,
"GITCLONEREPOMIRROR",
"packed-refs",
)
_remove_problematic_refs_before_github!!(
;
packed_refs_filename = packed_refs_filename,
)
destination_repo_name = pair.destination_repo_name
if destination_repo_name in do_not_push_to_these_destinations ||
destination_repo_name in do_not_push_to_these_destinations ||
destination_repo_name in do_not_push_to_these_destinations
@warn(
string(
"Destination repo name is in the ",
"do_not_push_to_these_destinations list, ",
"so skipping.",
)
)
else
if is_dry_run
@info(
string(
"This is a dry run, so I will not ",
"push to any git hosting providers.",
)
)
else
for p = 1:length(git_hosting_providers)
@info(
string(
"Git hosting provider ",
"$(p) of ",
"$(length(git_hosting_providers))",
),
)
provider = git_hosting_providers[p]
args1 = Dict(
:repo_name => destination_repo_name,
)
@info(
string(
"Making sure that repo exists on ",
"git hosting provider $(p). ",
"(If it does not already exist, ",
"I will create it.)",
)
)
provider(:create_repo)(args1)
args2 = Dict(
:repo_name => destination_repo_name,
:directory => pwd(),
:git => git,
:try_but_allow_failures_url_list =>
try_but_allow_failures_url_list,
)
@info(
string(
"Attempting to push to ",
"git hosting provider $(p).",
)
)
push_to_provider_was_success = try
provider(:push_mirrored_repo)(args2)
true
catch exception
@warn(
"ignoring exception: ",
exception,
)
false
end
if push_to_provider_was_success
when = Dates.now(TimeZones.localzone(),)
args3 = Dict(
:source_url => src_url,
:when => when,
:time_zone => time_zone,
)
repo_description_default::String =
Utils.default_repo_description(
;
from = src_url,
when = when,
time_zone = time_zone,
)
repo_description_provider::String = try
provider(
:generate_new_repo_description)(
args3)
catch exception
@warn(
string("ignoring exception: "),
exception,
)
""
end
new_repo_description::String = ""
if length(
strip(
repo_description_provider
)
) == 0
new_repo_description = strip(
repo_description_default
)
else
new_repo_description = strip(
repo_description_provider
)
end
@debug(
string("Repo descriptions: "),
repo_description_default,
repo_description_provider,
new_repo_description,
)
args4 = Dict(
:repo_name =>
destination_repo_name,
:new_repo_description =>
new_repo_description
)
@info(
string(
"Attempting to update ",
"repo description on git hosting ",
"provider $(p).",
),
destination_repo_name,
new_repo_description,
)
provider(:update_repo_description)(args4)
else
delayederror(
string(
"Push to provider $(p) ",
"was not a success.",
)
)
end
end
end
end
else
if src_url in try_but_allow_failures_url_list ||
Types._name_with_git(src_url) in
try_but_allow_failures_url_list ||
Types._name_without_git(src_url) in
try_but_allow_failures_url_list
@warn(
string(
"URL in the try-but-allow-failures list, ",
"so ignoring the error ",
"that occured while running command",
),
cmd_git_repo_clone_mirror,
pwd(),
ENV["PATH"],
)
else
delayederror(
string(
"Encountered error when running command: ",
cmd_git_repo_clone_mirror,
pwd(),
ENV["PATH"],
)
)
end
end
cd(previous_dir)
rm(
temp_dir_repo_git_clone_regular;
force = true,
recursive = true,
)
rm(
temp_dir_repo_git_clone_mirror;
force = true,
recursive = true,
)
end
end
return nothing
end
function _get_list_of_binary_builder_repos(
text::AbstractString,
)::Vector{Types.SrcDestPair}
result::Vector{Types.SrcDestPair} = Types.SrcDestPair[]
lines::Vector{String} = convert(
Vector{String},
split(strip(text), "\n"),
)
regex_1::Regex = r"https:\/\/github.com\/(\w*?)\/(\w*?)\/"
for line in lines
line_stripped::String = strip(line)
if occursin(regex_1, line_stripped)
regex_match::RegexMatch = match(regex_1, line_stripped)
github_repo_owner::String = strip(regex_match[1])
github_repo_name::String = strip(regex_match[2])
source_url::String = string(
"https://github.com/",
github_repo_owner,
"/",
github_repo_name,
)
destination_repo_name::String = string(
github_repo_owner,
"-",
github_repo_name,
)
new_pair = Types.SrcDestPair(
;
source_url = source_url,
destination_repo_name = destination_repo_name,
)
push!(
result,
new_pair,
)
else
end
end
return result
end
function _add_trailing_spaces(x::AbstractString, n::Integer)::String
temp::String = strip(convert(String, x))
if length(temp) >= n
result::String = temp
else
deficit::Int = n - length(temp)
result = string(temp, repeat(" ", deficit))
end
return result
end
function _interval_contains_x(
interval::Types.AbstractInterval,
pair::Types.SrcDestPair,
)::Bool
result::Bool = _interval_contains_x(
interval,
pair.destination_repo_name,
)
return result
end
function _pairs_that_fall_in_interval(
list_of_pairs::Vector{Types.SrcDestPair},
interval::Types.AbstractInterval,
)::Vector{Types.SrcDestPair}
ith_pair_falls_in_interval::Vector{Bool} = Vector{Bool}(
undef,
length(list_of_pairs),
)
for i = 1:length(list_of_pairs)
ith_pair = list_of_pairs[i]
ith_pair_falls_in_interval[i] = _interval_contains_x(
interval,
ith_pair,
)
end
full_sublist::Vector{Types.SrcDestPair} = list_of_pairs[
ith_pair_falls_in_interval
]
unique_sorted_sublist::Vector{Types.SrcDestPair} = sort(
unique(full_sublist)
)
return unique_sorted_sublist
end
function _interval_contains_x(
interval::Types.NoBoundsInterval,
x::AbstractString,
)::Bool
result::Bool = true
return result
end
function _interval_contains_x(
interval::Types.LowerAndUpperBoundInterval,
x::AbstractString,
)::Bool
x_stripped::String = strip(convert(String, x))
left::String = strip(interval.left)
right::String = strip(interval.right)
result::Bool = (left <= x_stripped) && (x_stripped < right)
return result
end
function _interval_contains_x(
interval::Types.LowerBoundOnlyInterval,
x::AbstractString,
)::Bool
x_stripped::String = strip(convert(String, x))
left::String = strip(interval.left)
result::Bool = left <= x_stripped
return result
end
function _interval_contains_x(
interval::Types.UpperBoundOnlyInterval,
x::AbstractString,
)
x_stripped::String = strip(convert(String, x))
right::String = strip(interval.right)
result::Bool = x_stripped < right
return result
end
end # End submodule MirrorUpdater.Common
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 343 | ##### Beginning of file
module Hosts # Begin submodule MirrorUpdater.Hosts
__precompile__(true)
import ..Types
import ..Utils
include(joinpath("BitbucketHost", "BitbucketHost.jl"))
include(joinpath("GitHubHost", "GitHubHost.jl"))
include(joinpath("GitLabHost", "GitLabHost.jl"))
end # End submodule MirrorUpdater.Hosts
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 16667 | ##### Beginning of file
module BitbucketHost # Begin submodule MirrorUpdater.Hosts.BitbucketHost
__precompile__(true)
import ..Types
import ..Utils
import Dates
import HTTP
import JSON
import TimeZones
function new_bitbucket_session(
;
bitbucket_team::String,
bitbucket_bot_username::String,
bitbucket_bot_app_password::String,
)::Function
_bitbucket_team::String = strip(
convert(String, bitbucket_team)
)
_provided_bitbucket_bot_username::String = strip(
convert(String, bitbucket_bot_username)
)
_bitbucket_bot_app_password::String = strip(
convert(String, bitbucket_bot_app_password)
)
function _bitbucket_slug(x::AbstractString)::String
x_stripped::String = strip(convert(String, x))
x_lowercase = lowercase(x_stripped)
if length(x_lowercase) <= 62
result = x_lowercase
else
result = x_lowercase[1:62]
end
while strip(lowercase(result[end:end])) == "." ||
strip(lowercase(result[end:end])) == "_" ||
strip(lowercase(result[end:end])) == "-"
result = result[1:(end-1)]
end
result_converted = convert(String, result)
return result_converted
end
function _get_bitbucket_username_from_provided()::String
method::String = "GET"
url::String = string(
"https://",
"$(_provided_bitbucket_bot_username)",
":",
"$(_bitbucket_bot_app_password)",
"@api.bitbucket.org",
"/2.0",
"/user",
)
r::HTTP.Messages.Response = HTTP.request(
method,
url;
basic_authorization = true
)
r_body::String = String(r.body)
parsed_body::Dict = JSON.parse(r_body)
username::String = parsed_body["username"]
username_stripped::String = strip(username)
return username_stripped
end
@info("Attempting to authenticate to Bitbucket...")
_bitbucket_username::String = _get_bitbucket_username_from_provided()
if lowercase(strip(_bitbucket_username)) !=
lowercase(strip(_provided_bitbucket_bot_username))
delayederror(
string(
"Provided Bitbucket username ",
"(\"$(_provided_bitbucket_bot_username)\") ",
"does not match ",
"actual Bitbucket username ",
"(\"$(_bitbucket_username)\").",
)
)
else
@info(
string(
"Provided Bitbucket username matches ",
"actual Bitbucket username.",
),
_provided_bitbucket_bot_username,
_bitbucket_username,
)
end
@info("Successfully authenticated to Bitbucket :)")
@info(
string(
"Bitbucket username: ",
"$(_get_bitbucket_username_from_provided())",
)
)
@info(
string(
"Bitbucket team (a.k.a. organization): ",
"$(_bitbucket_team)",
)
)
function _create_gist(params::AbstractDict)::Nothing
@warn(
string(
"At this time, snippet (a.k.a. gist) ",
"functionality is not yet supported ",
"for the Bitbucket backend.",
)
)
return nothing
end
function _retrieve_gist(params::AbstractDict)::String
@warn(
string(
"At this time, snippet (a.k.a. gist) ",
"functionality is not yet supported ",
"for the Bitbucket backend.",
)
)
delayederror("Could not find the matching Bitbucket snippet")
end
function _delete_gists(params::AbstractDict)::Nothing
@warn(
string(
"At this time, snippet (a.k.a. gist) ",
"functionality is not yet supported ",
"for the Bitbucket backend.",
)
)
return nothing
end
function _delete_gists_older_than_minutes(params::AbstractDict)::Nothing
@warn(
string(
"At this time, snippet (a.k.a. gist) ",
"functionality is not yet supported ",
"for the Bitbucket backend.",
)
)
return nothing
end
function _repo_name_with_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo,
org = org,
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
result::String = string(
org_stripped,
"/",
repo_name_without_org,
)
return result
end
function _repo_name_without_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_stripped::String = strip(
strip(strip(strip(strip(convert(String, repo)), '/')), '/')
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
if length(org_stripped) == 0
result = repo_stripped
else
repo_stripped_lowercase::String = lowercase(repo_stripped)
org_stripped_lowercase::String = lowercase(org_stripped)
org_stripped_lowercase_withtrailingslash::String = string(
org_stripped_lowercase,
"/",
)
if startswith(repo_stripped_lowercase,
org_stripped_lowercase_withtrailingslash)
index_start =
length(org_stripped_lowercase_withtrailingslash) + 1
result = repo_stripped[index_start:end]
else
result = repo_stripped
end
end
return result
end
function _get_destination_url(
;
repo_name::String,
credentials::Symbol,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _bitbucket_team,
)
result::String = ""
if credentials == :with_auth
result = string(
"https://",
_bitbucket_username,
":",
_bitbucket_bot_app_password,
"@",
"bitbucket.org/",
_bitbucket_team,
"/",
_bitbucket_slug(repo_name_without_org),
)
elseif credentials == :with_redacted_auth
result = string(
"https://",
_bitbucket_username,
":",
"*****",
"@",
"bitbucket.org/",
_bitbucket_team,
"/",
_bitbucket_slug(repo_name_without_org),
)
elseif credentials == :without_auth
result =string(
"https://",
"bitbucket.org/",
_bitbucket_team,
"/",
_bitbucket_slug(repo_name_without_org),
)
else
delayederror(
"$(credentials) is not a supported value for credentials"
)
end
return result
end
function _bitbucket_repo_exists(
;
repo_name::String,
)::Bool
repo_name_without_org = _repo_name_without_org(
;
repo = repo_name,
org = _bitbucket_team,
)
method = "GET"
url = string(
"https://",
"$(_bitbucket_username)",
":",
"$(_bitbucket_bot_app_password)",
"@api.bitbucket.org",
"/2.0",
"/repositories",
"/$(_bitbucket_team)",
"/$(_bitbucket_slug(repo_name_without_org))",
)
result::Bool = try
r = HTTP.request(
method,
url;
basic_authorization = true,
)
true
catch
false
end
return result
end
function _create_repo(params::AbstractDict)::Nothing
repo_name::String = strip(params[:repo_name])
repo_name_with_org::String = _repo_name_with_org(
;
repo = repo_name,
org = _bitbucket_team,
)
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _bitbucket_team,
)
repo_destination_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
if Utils._url_exists(repo_destination_url_without_auth)
@info("According to HTTP GET request, the repo exists.")
else
if _bitbucket_repo_exists(; repo_name = repo_name_without_org)
@info("According to the Bitbucket API, the repo exists.")
else
@info(
string("Attempting to create new repo on Bitbucket"),
repo_destination_url_without_auth,
)
method = "POST"
url = string(
"https://",
"$(_bitbucket_username)",
":",
"$(_bitbucket_bot_app_password)",
"@api.bitbucket.org",
"/2.0",
"/repositories",
"/$(_bitbucket_team)",
"/$(_bitbucket_slug(repo_name_without_org))",
)
headers = Dict(
"content-type" => "application/json",
)
params = Dict(
"scm" => "git",
"is_private" => false,
"name" => _bitbucket_slug(repo_name_without_org),
"slug" => _bitbucket_slug(repo_name_without_org),
"has_issues" => false,
"has_wiki" => false,
)
body = JSON.json(params)
r = HTTP.request(
method,
url,
headers,
body;
basic_authorization = true,
)
@info("Successfully created new repo on Bitbucket")
end
end
return nothing
end
function _push_mirrored_repo(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
repo_directory::String = params[:directory]
git_path::String = params[:git]
try_but_allow_failures_url_list =
params[:try_but_allow_failures_url_list]
repo_name_without_org = _repo_name_without_org(
;
repo = repo_name,
org = _bitbucket_team,
)
repo_dest_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
repo_dest_url_with_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_auth,
)
repo_dest_url_with_redacted_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_redacted_auth,
)
previous_directory = pwd()
cd(repo_directory)
mirrorpush_cmd_withauth =
`$(git_path) push --mirror $(repo_dest_url_with_auth)`
mirrorpush_cmd_withredactedauth =
`$(git_path) push --mirror $(repo_dest_url_with_redacted_auth)`
@info(
string("Attempting to push repo to Bitbucket..."),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
try
Utils.command_ran_successfully!!(
mirrorpush_cmd_withauth;
error_on_failure = true,
last_resort_run = true,
)
@info(
string("Successfully pushed repo to Bitbucket."),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
catch exception
@warn("caught exception: ", exception)
if repo_dest_url_without_auth in try_but_allow_failures_url_list
@warn(
string(
"repo_dest_url_without_auth is in the ",
"try_but_allow_failures_url_list, so ignoring ",
"exception.",
),
repo_dest_url_without_auth,
exception,
)
else
delayederror(string(exception); exception=exception,)
end
end
cd(previous_directory)
return nothing
end
function _generate_new_repo_description(
params::AbstractDict,
)::String
source_url::String = params[:source_url]
when::TimeZones.ZonedDateTime = params[:when]
time_zone::TimeZones.TimeZone = params[:time_zone]
by::String = strip(string("@", _bitbucket_username))
new_description::String = Utils.default_repo_description(
;
from = source_url,
when = when,
time_zone = time_zone,
by = by,
)
return new_description
end
function _update_repo_description(params::AbstractDict)::Nothing
repo_name::String = strip(params[:repo_name])
new_repo_description = strip(params[:new_repo_description])
_create_repo(
Dict(
:repo_name => repo_name,
),
)
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _bitbucket_team,
)
method = "PUT"
url = string(
"https://",
"$(_bitbucket_username)",
":",
"$(_bitbucket_bot_app_password)",
"@api.bitbucket.org",
"/2.0",
"/repositories",
"/$(_bitbucket_team)",
"/$(_bitbucket_slug(repo_name_without_org))",
)
headers = Dict(
"content-type" => "application/json",
)
params = Dict(
"description" => String(strip(new_repo_description)),
)
body = JSON.json(params)
@info("Attempting to update repo description on Bitbucket...")
try
r = HTTP.request(
method,
url,
headers,
body;
basic_authorization = true,
)
@info("Successfully updated repo description on Bitbucket")
catch ex
@error(
string(
"ignoring error \"$(ex)\" while updating ",
"Bitbucket repo description."
),
ex=ex,
)
end
return nothing
end
function _bitbucket_provider(task::Symbol)::Function
if task == :create_gist
return _create_gist
elseif task == :retrieve_gist
return _retrieve_gist
elseif task == :delete_gists
return _delete_gists
elseif task == :create_repo
return _create_repo
elseif task == :push_mirrored_repo
return _push_mirrored_repo
elseif task == :generate_new_repo_description
return _generate_new_repo_description
elseif task == :update_repo_description
return _update_repo_description
elseif task == :delete_gists_older_than_minutes
return _delete_gists_older_than_minutes
else
delayederror("$(task) is not a valid task")
end
end
return _bitbucket_provider
end
end # End submodule MirrorUpdater.Hosts.BitbucketHost
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 17836 | ##### Beginning of file
module GitHubHost # Begin submodule MirrorUpdater.Hosts.GitHubHost
__precompile__(true)
import ..Types
import ..Utils
import Dates
import GitHub
import TimeZones
function new_github_session(
;
github_organization::AbstractString,
github_bot_username::AbstractString,
github_bot_personal_access_token::AbstractString,
)::Function
_github_organization::String = strip(
convert(String, github_organization)
)
_provided_github_bot_username::String = strip(
convert(String, github_bot_username,)
)
_github_bot_personal_access_token::String = strip(
convert(String, github_bot_personal_access_token)
)
function _get_github_username(auth::GitHub.Authorization)::String
user_information::AbstractDict = GitHub.gh_get_json(
GitHub.DEFAULT_API,
"/user";
auth = auth,
)
username::String = user_information["name"]
username_stripped::String = strip(username)
return username_stripped
end
@info("Attempting to authenticate to GitHub...")
auth::GitHub.Authorization = GitHub.authenticate(
_github_bot_personal_access_token
)
_github_username::String = _get_github_username(auth)
if lowercase(strip(_github_username)) !=
lowercase(strip(_provided_github_bot_username))
delayederror(
string(
"Provided GitHub username ",
"(\"$(_provided_github_bot_username)\") ",
"does not match ",
"actual GitHub username ",
"(\"$(_github_username)\").",
)
)
else
@info(
string(
"Provided GitHub username matches ",
"actual GitHub username.",
),
_provided_github_bot_username,
_github_username,
)
end
@info("Successfully authenticated to GitHub :)")
@info(
string(
"GitHub username: ",
"$(_get_github_username(auth))",
)
)
@info(
string(
"GitHub organization: ",
"$(_github_organization)",
)
)
repository_owner = GitHub.owner(
_github_organization,
true;
auth = auth,
)
function _create_gist(params::AbstractDict)::Nothing
gist_description::String = strip(params[:gist_description])
gist_content::String = strip(params[:gist_content])
@info("Attempting to create gist on GitHub...")
create_gist_function = () ->
GitHub.create_gist(
;
auth = auth,
params = Dict(
:public => true,
:description => gist_description,
:files => Dict(
"list.txt" => Dict("content" => gist_content,),
),
),
)
Utils.retry_function_until_success(
create_gist_function;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
@info("Successfully created gist on GitHub.")
return nothing
end
function _get_all_gists()::Vector{GitHub.Gist}
@info("Loading the list of all of my GitHub gists")
full_gist_list::Vector{GitHub.Gist} = GitHub.Gist[]
need_to_continue::Bool = true
current_page_number::Int = 1
while need_to_continue
gists, page_data = GitHub.gists(
_github_username;
params = Dict(
"per_page" => 100,
"page" => current_page_number,
),
auth = auth,
)
if length(gists) == 0
need_to_continue = false
else
for x in gists
if x in full_gist_list
else
push!(full_gist_list, x)
end
end
need_to_continue = true
current_page_number += 1
end
end
unique_gist_list::Vector{GitHub.Gist} = unique(full_gist_list)
return unique_gist_list
end
function _retrieve_gist(params::AbstractDict)::String
gist_description_to_match::String = params[:gist_description]
correct_gist_id::String = ""
all_my_gists = _get_all_gists()
for gist in all_my_gists
if gist.description == gist_description_to_match
correct_gist_id = gist.id
end
end
result::String = ""
if length(correct_gist_id) > 0
@info("Downloading the correct GitHub gist")
correct_gist::GitHub.Gist = GitHub.gist(
correct_gist_id;
auth = auth,
)
correct_gist_content::String = correct_gist.files[
"list.txt"]["content"]
result = correct_gist_content
else
result = ""
end
if length(result) == 0
delayederror("Could not find the matching Gist")
end
return result
end
function _delete_gists(params::AbstractDict)::Nothing
gist_description_to_match::String = params[:gist_description]
list_of_gist_ids_to_delete::Vector{String} = String[]
all_my_gists::Vector{GitHub.Gist} = _get_all_gists()
for gist in all_my_gists
if gist.description == gist_description_to_match
push!(list_of_gist_ids_to_delete, strip(gist.id),)
end
end
for gist_id_to_delete in list_of_gist_ids_to_delete
GitHub.delete_gist(gist_id_to_delete;auth = auth,)
@info(string("Deleted GitHub gist id $(gist_id_to_delete)"))
end
return nothing
end
function _delete_gists_older_than_minutes(params::AbstractDict)::Nothing
time::TimeZones.ZonedDateTime =
params[:time]
delete_gists_older_than_minutes::Int =
params[:delete_gists_older_than_minutes]
max_gist_age_milliseconds::Int =
delete_gists_older_than_minutes*60*1000
list_of_gist_ids_to_delete::Vector{String} = String[]
all_my_gists::Vector{GitHub.Gist} = _get_all_gists()
for gist in all_my_gists
gist_updated_at = gist.updated_at
gist_updated_at_zoned = TimeZones.ZonedDateTime(
gist_updated_at,
TimeZones.localzone(),
)
gist_age = time - gist_updated_at_zoned
if gist_age.value > max_gist_age_milliseconds
push!(list_of_gist_ids_to_delete, strip(gist.id),)
end
end
for gist_id_to_delete in list_of_gist_ids_to_delete
GitHub.delete_gist(gist_id_to_delete;auth = auth,)
@info(string("Deleted GitHub gist id $(gist_id_to_delete)"))
end
return nothing
end
function _repo_name_with_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo,
org = org,
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
result::String = string(
org_stripped,
"/",
repo_name_without_org,
)
return result
end
function _repo_name_without_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_stripped::String = strip(
strip(strip(strip(strip(convert(String, repo)), '/')), '/')
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
if length(org_stripped) == 0
result = repo_stripped
else
repo_stripped_lowercase::String = lowercase(repo_stripped)
org_stripped_lowercase::String = lowercase(org_stripped)
org_stripped_lowercase_withtrailingslash::String = string(
org_stripped_lowercase,
"/",
)
if startswith(repo_stripped_lowercase,
org_stripped_lowercase_withtrailingslash)
index_start =
length(org_stripped_lowercase_withtrailingslash) + 1
result = repo_stripped[index_start:end]
else
result = repo_stripped
end
end
return result
end
function _get_destination_url(
;
repo_name::String,
credentials::Symbol,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _github_organization,
)
result::String = ""
if credentials == :with_auth
result = string(
"https://",
_github_username,
":",
_github_bot_personal_access_token,
"@",
"github.com/",
_github_organization,
"/",
repo_name_without_org,
)
elseif credentials == :with_redacted_auth
result = string(
"https://",
_github_username,
":",
"*****",
"@",
"github.com/",
_github_organization,
"/",
repo_name_without_org,
)
elseif credentials == :without_auth
result =string(
"https://",
"github.com/",
_github_organization,
"/",
repo_name_without_org,
)
else
delayederror(
"$(credentials) is not a supported value for credentials"
)
end
return result
end
function _github_repo_exists(
;
repo_name::String,
)::Bool
repo_name_with_org = _repo_name_with_org(
;
repo = repo_name,
org = _github_organization,
)
result::Bool = try
repo = GitHub.repo(
repo_name_with_org;
auth = auth,
)
true
catch
false
end
return result
end
function _create_repo(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
repo_name_with_org::String = _repo_name_with_org(
;
repo = repo_name,
org = _github_organization,
)
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _github_organization,
)
repo_destination_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
if Utils._url_exists(repo_destination_url_without_auth)
@info("According to HTTP GET request, the repo exists.")
else
if _github_repo_exists(; repo_name = repo_name_with_org)
@info("According to the GitHub API, the repo exists.")
else
@info(
string("Creating new repo on GitHub"),
repo_destination_url_without_auth,
)
repo = GitHub.create_repo(
repository_owner,
repo_name_without_org,
Dict{String, Any}(
"has_issues" => "false",
"has_wiki" => "false",
"has_projects" => "false",
);
auth = auth,
)
end
end
return nothing
end
function _push_mirrored_repo(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
repo_directory::String = params[:directory]
git_path::String = params[:git]
try_but_allow_failures_url_list =
params[:try_but_allow_failures_url_list]
repo_name_without_org = _repo_name_without_org(
;
repo = repo_name,
org = _github_organization,
)
repo_dest_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
repo_dest_url_with_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_auth,
)
repo_dest_url_with_redacted_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_redacted_auth,
)
previous_directory = pwd()
cd(repo_directory)
mirrorpush_cmd_withauth =
`$(git_path) push --mirror $(repo_dest_url_with_auth)`
mirrorpush_cmd_withredactedauth =
`$(git_path) push --mirror $(repo_dest_url_with_redacted_auth)`
@info(
string("Attempting to push repo to GitHub..."),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
try
Utils.command_ran_successfully!!(
mirrorpush_cmd_withauth;
error_on_failure = true,
last_resort_run = true,
)
@info(
string("Successfully pushed repo to GitHub."),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
catch exception
@warn("caught exception: ", exception)
if repo_dest_url_without_auth in try_but_allow_failures_url_list
@warn(
string(
"repo_dest_url_without_auth is in the ",
"try_but_allow_failures_url_list, so ignoring ",
"exception.",
),
repo_dest_url_without_auth,
exception,
)
else
delayederror(string(exception); exception=exception,)
end
end
cd(previous_directory)
return nothing
end
function _generate_new_repo_description(
params::AbstractDict,
)::String
source_url::String = params[:source_url]
when::TimeZones.ZonedDateTime = params[:when]
time_zone::TimeZones.TimeZone = params[:time_zone]
by::String = strip(string("@", _github_username))
new_description::String = Utils.default_repo_description(
;
from = source_url,
when = when,
time_zone = time_zone,
by = by,
)
return new_description
end
function _update_repo_description(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
new_repo_description = params[:new_repo_description]
_create_repo(
Dict(
:repo_name => repo_name,
),
)
repo_name_with_org::String = _repo_name_with_org(
;
repo = repo_name,
org = _github_organization,
)
github_repo_function = () -> GitHub.repo(
repo_name_with_org;
auth = auth,
)
repo = Utils.retry_function_until_success(
github_repo_function;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
@info("Attempting to update repo description on GitHub...")
github_update_description_function = () ->
GitHub.gh_patch_json(
GitHub.DEFAULT_API,
"/repos/$(GitHub.name(repo.owner))/$(GitHub.name(repo.name))";
auth = auth,
params = Dict(
"name" => GitHub.name(repo.name),
"description" => new_repo_description,
),
)
result = Utils.retry_function_until_success(
github_update_description_function;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
@info("Successfully updated repo description on GitHub")
return nothing
end
function _github_provider(task::Symbol)::Function
if task == :create_gist
return _create_gist
elseif task == :retrieve_gist
return _retrieve_gist
elseif task == :delete_gists
return _delete_gists
elseif task == :create_repo
return _create_repo
elseif task == :push_mirrored_repo
return _push_mirrored_repo
elseif task == :generate_new_repo_description
return _generate_new_repo_description
elseif task == :update_repo_description
return _update_repo_description
elseif task == :delete_gists_older_than_minutes
return _delete_gists_older_than_minutes
else
delayederror("$(task) is not a valid task")
end
end
return _github_provider
end
end # End submodule MirrorUpdater.Hosts.GitHubHost
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 28067 | ##### Beginning of file
module GitLabHost # Begin submodule MirrorUpdater.Hosts.GitLabHost
__precompile__(true)
import ..Types
import ..Utils
import Dates
import HTTP
import JSON
import TimeZones
function new_gitlab_session(
;
gitlab_group::String,
gitlab_bot_username::String,
gitlab_bot_personal_access_token::String,
)::Function
_gitlab_group::String = strip(
convert(String, gitlab_group)
)
_provided_gitlab_bot_username::String = strip(
convert(String, gitlab_bot_username,)
)
_gitlab_bot_personal_access_token::String = strip(
convert(String, gitlab_bot_personal_access_token)
)
function _get_gitlab_username()::String
method::String = "GET"
url::String = "https://gitlab.com/api/v4/user"
headers::Dict{String, String} = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request = () -> HTTP.request(
method,
url,
headers,
)
r::HTTP.Messages.Response = Utils.retry_function_until_success(
http_request;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body::String = String(r.body)
parsed_r_body::Dict = JSON.parse(r_body)
username::String = parsed_r_body["name"]
username_stripped::String = strip(username)
return username_stripped
end
function _get_all_my_namespaces()::Vector{Dict}
method::String = "GET"
url::String = "https://gitlab.com/api/v4/namespaces"
headers::Dict{String, String} = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request = () -> HTTP.request(
method,
url,
headers,
)
r = Utils.retry_function_until_success(
http_request;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body = String(r.body)
parsed_body = JSON.parse(r_body)
return parsed_body
end
function _get_namespace_id_for_my_group()::Int
all_my_namespaces::Vector{Dict} = _get_all_my_namespaces()
result::Int = 0
for i = 1:length(all_my_namespaces)
namespace = all_my_namespaces[i]
if strip(namespace["name"]) == strip(_gitlab_group)
result = namespace["id"]
end
end
if result == 0
delayederror("Could not find the id for my group")
end
return result
end
@info("Attempting to authenticate to GitLab...")
_gitlab_username::String = _get_gitlab_username()
if lowercase(strip(_gitlab_username)) !=
lowercase(strip(_provided_gitlab_bot_username))
delayederror(
string(
"Provided GitLab username ",
"(\"$(_provided_gitlab_bot_username)\") ",
"does not match ",
"actual GitLab username ",
"(\"$(_gitlab_username)\").",
)
)
else
@info(
string(
"Provided GitLab username matches ",
"actual GitLab username.",
),
_provided_gitlab_bot_username,
_gitlab_username,
)
end
@info("Successfully authenticated to GitLab :)")
@info(
string(
"GitLab username: ",
"$(_get_gitlab_username())",
)
)
@info(
string(
"GitLab group (a.k.a. organization): ",
"$(_gitlab_group)",
)
)
function _create_gist(params::AbstractDict)::Nothing
gist_description::String = strip(params[:gist_description])
gist_content::String = strip(params[:gist_content])
@info("Attempting to create snippet on GitLab...")
method::String = "POST"
url::String = "https://gitlab.com/api/v4/snippets"
headers::Dict{String, String} = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
"content-type" => "application/json",
)
params::Dict{String, String} = Dict(
"title" => "list.txt",
"file_name" => "list.txt",
"content" => gist_content,
"description" => gist_description,
"visibility" => "public",
)
body::String = JSON.json(params)
try
r::HTTP.Messages.Response = HTTP.request(
method,
url,
headers,
body,
)
@info("Successfully created snippet on GitLab.")
catch exception
@warn("ignoring exception: ", exception,)
end
end
function _get_all_gists()::Vector{Dict}
@info("Loading the list of all of my GitLab snippets")
gist_dict_list::Vector{Dict} = Dict[]
gist_id_list::Vector{Int} = Int[]
need_to_continue::Bool = true
current_page_number::Int = 1
method::String = "GET"
headers::Dict{String, String} = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
url::String = ""
while need_to_continue
url = string(
"https://gitlab.com/api/v4/snippets",
"?per_page=100&page=$(current_page_number)&",
)
r = HTTP.request(
method,
url,
headers,
)
r_body = String(r.body)
parsed_body = JSON.parse(r_body)
if length(parsed_body) == 0
need_to_continue = false
else
need_to_continue = true
current_page_number += 1
for i = 1:length(parsed_body)
gist_dict = parsed_body[i]
gist_id = gist_dict["id"]
if gist_id in gist_id_list
@debug(
string("already have this gist"),
gist_id,
)
else
push!(gist_id_list, gist_id)
push!(gist_dict_list, gist_dict)
end
end
end
end
return gist_dict_list
end
function _retrieve_gist(params::AbstractDict)::String
gist_description_to_match::String = strip(params[:gist_description])
correct_gist_id::Int = 0
correct_gist_raw_url::String = ""
all_my_gists = _get_all_gists()
for gist in all_my_gists
if strip(gist["description"]) == gist_description_to_match
correct_gist_id = gist["id"]
correct_gist_raw_url = strip(gist["raw_url"])
end
end
result::String = ""
if correct_gist_id > 0
@info("Downloading the correct GitLab snippet")
method::String = "GET"
headers::Dict{String, String} = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
url::String = strip(correct_gist_raw_url)
r = HTTP.request(
method,
url,
headers,
)
result = strip(String(r.body))
else
result = ""
end
if length(result) == 0
delayederror("Could not find the matching GitLab snippet")
end
return result
end
function _delete_gists(params::AbstractDict)::Nothing
gist_description_to_match::String = strip(params[:gist_description])
list_of_gist_ids_to_delete::Vector{Int} = Int[]
all_my_gists::Vector{Dict} = _get_all_gists()
for gist in all_my_gists
if strip(gist["description"]) == gist_description_to_match
push!(list_of_gist_ids_to_delete, gist["id"],)
end
end
for gist_id_to_delete in list_of_gist_ids_to_delete
method = "DELETE"
url = string(
"https://gitlab.com/api/v4/snippets/$(gist_id_to_delete)",
)
headers = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
r = HTTP.request(
method,
url,
headers,
)
@info(string("Deleted GitLab snippet id $(gist_id_to_delete)"))
end
return nothing
end
function _delete_gists_older_than_minutes(params::AbstractDict)::Nothing
time::TimeZones.ZonedDateTime =
params[:time]
delete_gists_older_than_minutes::Int =
params[:delete_gists_older_than_minutes]
max_gist_age_milliseconds::Int =
delete_gists_older_than_minutes*60*1000
list_of_gist_ids_to_delete::Vector{Int} = Int[]
all_my_gists::Vector{Dict} = _get_all_gists()
for gist in all_my_gists
gist_updated_at = Dates.DateTime(
gist["updated_at"][1:end-1]
)
gist_updated_at_zoned = TimeZones.ZonedDateTime(
gist_updated_at,
TimeZones.TimeZone("UTC"),
)
gist_age = time - gist_updated_at_zoned
if gist_age.value > max_gist_age_milliseconds
push!(list_of_gist_ids_to_delete, gist["id"],)
end
end
for gist_id_to_delete in list_of_gist_ids_to_delete
method = "DELETE"
url = string(
"https://gitlab.com/api/v4/snippets/$(gist_id_to_delete)",
)
headers = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
r = HTTP.request(
method,
url,
headers,
)
@info(string("Deleted GitLab snippet id $(gist_id_to_delete)"))
end
return nothing
end
function _repo_name_with_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo,
org = org,
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
result::String = string(
org_stripped,
"/",
repo_name_without_org,
)
return result
end
function _repo_name_without_org(
;
repo::AbstractString,
org::AbstractString,
)::String
repo_stripped::String = strip(
strip(strip(strip(strip(convert(String, repo)), '/')), '/')
)
org_stripped::String = strip(
strip(strip(strip(strip(convert(String, org)), '/')), '/')
)
if length(org_stripped) == 0
result = repo_stripped
else
repo_stripped_lowercase::String = lowercase(repo_stripped)
org_stripped_lowercase::String = lowercase(org_stripped)
org_stripped_lowercase_withtrailingslash::String = string(
org_stripped_lowercase,
"/",
)
if startswith(repo_stripped_lowercase,
org_stripped_lowercase_withtrailingslash)
index_start =
length(org_stripped_lowercase_withtrailingslash) + 1
result = repo_stripped[index_start:end]
else
result = repo_stripped
end
end
return result
end
function _get_destination_url(
;
repo_name::String,
credentials::Symbol,
)::String
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _gitlab_group,
)
result::String = ""
if credentials == :with_auth
result = string(
"https://",
_gitlab_username,
":",
_gitlab_bot_personal_access_token,
"@",
"gitlab.com/",
_gitlab_group,
"/",
repo_name_without_org,
)
elseif credentials == :with_redacted_auth
result = string(
"https://",
_gitlab_username,
":",
"*****",
"@",
"gitlab.com/",
_gitlab_group,
"/",
repo_name_without_org,
)
elseif credentials == :without_auth
result =string(
"https://",
"gitlab.com/",
_gitlab_group,
"/",
repo_name_without_org,
)
else
delayederror("$(credentials) is not a supported value for credentials")
end
return result
end
function _gitlab_repo_exists(
;
repo_name::String,
)::Bool
repo_name_without_org = _repo_name_without_org(
;
repo = repo_name,
org = _gitlab_group,
)
result::Bool = try
method = "GET"
url = string(
"https://gitlab.com/api/v4/",
"projects/$(_gitlab_group)%2F$(repo_name)",
)
headers = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
r = HTTP.request(
method,
url,
headers,
)
true
catch
false
end
return result
end
function _list_all_repo_protected_branches(
params::AbstractDict,
)::Vector{String}
repo_name::String = params[:repo_name]
method_1 = "GET"
url_1 = string(
"https://gitlab.com/api/v4/",
"projects/$(_gitlab_group)%2F$(repo_name)",
)
headers_1 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_1 = () -> HTTP.request(
method_1,
url_1,
headers_1,
)
r_1 = Utils.retry_function_until_success(
http_request_1;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body_1 = String(r_1.body)
parsed_body_1 = JSON.parse(r_body_1)
repo_id = parsed_body_1["id"]
method_2 = "GET"
url_2 = "https://gitlab.com/api/v4/projects/$(repo_id)/protected_branches"
headers_2 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_2 = () -> HTTP.request(
method_2,
url_2,
headers_2,
)
r_2 = Utils.retry_function_until_success(
http_request_2;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body_2 = String(r_2.body)
parsed_body_2 = JSON.parse(r_body_2)
list = String[]
for x in parsed_body_2
push!(
list,
strip(x["name"]),
)
end
return list
end
function _unprotect_all_repo_branches(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
list_of_protected_branches = _list_all_repo_protected_branches(
params
)
method_1 = "GET"
url_1 = string(
"https://gitlab.com/api/v4/",
"projects/$(_gitlab_group)%2F$(repo_name)",
)
headers_1 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_1 = () -> HTTP.request(
method_1,
url_1,
headers_1,
)
r_1 = Utils.retry_function_until_success(
http_request_1;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body_1 = String(r_1.body)
parsed_body_1 = JSON.parse(r_body_1)
repo_id = parsed_body_1["id"]
for branch_name in list_of_protected_branches
method_2 = "DELETE"
url_2 = "https://gitlab.com/api/v4/projects/$(repo_id)/protected_branches/$(branch_name)"
headers_2 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_2 = () -> HTTP.request(
method_2,
url_2,
headers_2,
)
r_2 = Utils.retry_function_until_success(
http_request_2;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
end
return nothing
end
function _create_repo(params::AbstractDict)::Nothing
repo_name::String = strip(params[:repo_name])
repo_name_with_org::String = _repo_name_with_org(
;
repo = repo_name,
org = _gitlab_group,
)
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _gitlab_group,
)
repo_destination_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
# if Utils._url_exists(repo_destination_url_without_auth)
if false
@info("According to HTTP GET request, the repo exists.")
else
if _gitlab_repo_exists(; repo_name = repo_name_without_org)
@info("According to the GitLab API, the repo exists.")
else
@info(
string("Creating new repo on GitLab"),
repo_destination_url_without_auth,
)
method = "POST"
url = "https://gitlab.com/api/v4/projects"
headers = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
"content-type" => "application/json",
)
params = Dict(
"name" => repo_name_without_org,
"path" => repo_name_without_org,
"namespace_id" => _get_namespace_id_for_my_group(),
"issues_enabled" => false,
"merge_requests_enabled" => false,
"jobs_enabled" => false,
"wiki_enabled" => false,
"snippets_enabled" => false,
"resolve_outdated_diff_discussions" => false,
"container_registry_enabled" => false,
"shared_runners_enabled" => false,
"visibility" => "public",
"public_jobs" => false,
"only_allow_merge_if_pipeline_succeeds" =>
false,
"only_allow_merge_if_all_discussions_are_resolved" =>
false,
"lfs_enabled" => false,
"request_access_enabled" => false,
"printing_merge_request_link_enabled" => false,
"initialize_with_readme" => false,
)
body = JSON.json(params)
r = HTTP.request(
method,
url,
headers,
body,
)
end
end
return nothing
end
function _delete_repo(params::AbstractDict)::Nothing
repo_name::String = params[:repo_name]
method_1 = "GET"
url_1 = string(
"https://gitlab.com/api/v4/",
"projects/$(_gitlab_group)%2F$(repo_name)",
)
headers_1 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_1 = () -> HTTP.request(
method_1,
url_1,
headers_1,
)
r_1 = Utils.retry_function_until_success(
http_request_1;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body_1 = String(r_1.body)
parsed_body_1 = JSON.parse(r_body_1)
repo_id = parsed_body_1["id"]
method_2 = "DELETE"
url_2 = "https://gitlab.com/api/v4/projects/$(repo_id)"
headers_2 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_2 = () -> HTTP.request(
method_2,
url_2,
headers_2,
)
r_2 = Utils.retry_function_until_success(
http_request_1;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
return nothing
end
function _push_mirrored_repo(params::AbstractDict)::Nothing
# _delete_repo(params)
# sleep(3)
# _create_repo(params)
_unprotect_all_repo_branches(params)
repo_name::String = params[:repo_name]
repo_directory::String = params[:directory]
git_path::String = params[:git]
try_but_allow_failures_url_list =
params[:try_but_allow_failures_url_list]
repo_name_without_org = _repo_name_without_org(
;
repo = repo_name,
org = _gitlab_group,
)
repo_dest_url_without_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :without_auth,
)
repo_dest_url_with_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_auth,
)
repo_dest_url_with_redacted_auth = _get_destination_url(
;
repo_name = repo_name_without_org,
credentials = :with_redacted_auth,
)
previous_directory = pwd()
cd(repo_directory)
mirrorpush_cmd_withauth =
`$(git_path) push --mirror $(repo_dest_url_with_auth)`
mirrorpush_cmd_withredactedauth =
`$(git_path) push --mirror $(repo_dest_url_with_redacted_auth)`
@info(
string("Attempting to push repo to GitLab..."),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
try
Utils.command_ran_successfully!!(
mirrorpush_cmd_withauth;
error_on_failure = false,
last_resort_run = true,
)
@info(
string(
"Pushed repo to GitLab. ",
"Maybe it was a success, ",
"or maybe it was a failure.",
),
mirrorpush_cmd_withredactedauth,
pwd(),
ENV["PATH"],
)
catch exception
@warn("caught exception: ", exception)
if repo_dest_url_without_auth in try_but_allow_failures_url_list
@warn(
string(
"repo_dest_url_without_auth is in the ",
"try_but_allow_failures_url_list, so ignoring ",
"exception.",
),
repo_dest_url_without_auth,
exception,
)
else
@warn(
string(
"The push to GitLab failed. Normally, I would throw ",
"an error. But GitLab will often reject some of the refs.",
"So I'll assume that's what's going on here.",
"And I will ignore the error.",
),
repo_dest_url_without_auth,
exception,
)
end
end
cd(previous_directory)
return nothing
end
function _generate_new_repo_description(
params::AbstractDict,
)::String
source_url::String = params[:source_url]
when::TimeZones.ZonedDateTime = params[:when]
time_zone::TimeZones.TimeZone = params[:time_zone]
by::String = strip(string("@", _gitlab_username))
new_description::String = Utils.default_repo_description(
;
from = source_url,
when = when,
time_zone = time_zone,
by = by,
)
return new_description
end
function _update_repo_description(params::AbstractDict)::Nothing
repo_name::String = strip(params[:repo_name])
new_repo_description = strip(params[:new_repo_description])
_create_repo(
Dict(
:repo_name => repo_name,
),
)
repo_name_without_org::String = _repo_name_without_org(
;
repo = repo_name,
org = _gitlab_group,
)
method_1 = "GET"
url_1 = string(
"https://gitlab.com/api/v4/",
"projects/$(_gitlab_group)%2F$(repo_name)",
)
headers_1 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
)
http_request_1 = () -> HTTP.request(
method_1,
url_1,
headers_1,
)
r_1 = Utils.retry_function_until_success(
http_request_1;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
r_body_1 = String(r_1.body)
parsed_body_1 = JSON.parse(r_body_1)
repo_id = parsed_body_1["id"]
method_2 = "PUT"
url_2 = string(
"https://gitlab.com/api/v4/",
"projects/$(repo_id)",
)
headers_2 = Dict(
"PRIVATE-TOKEN" => gitlab_bot_personal_access_token,
"content-type" => "application/json",
)
params_2 = Dict(
"description" => new_repo_description,
)
body_2 = JSON.json(params_2)
@info("Attempting to update repo description on GitLab...")
http_request_2 = () -> HTTP.request(
method_2,
url_2,
headers_2,
body_2,
)
r_2 = Utils.retry_function_until_success(
http_request_2;
max_attempts = 10,
seconds_to_wait_between_attempts = 180,
)
@info("Successfully updated repo description on GitLab")
return nothing
end
function _gitlab_provider(task::Symbol)::Function
if task == :create_gist
return _create_gist
elseif task == :retrieve_gist
return _retrieve_gist
elseif task == :delete_gists
return _delete_gists
elseif task == :create_repo
return _create_repo
elseif task == :push_mirrored_repo
return _push_mirrored_repo
elseif task == :generate_new_repo_description
return _generate_new_repo_description
elseif task == :update_repo_description
return _update_repo_description
elseif task == :delete_gists_older_than_minutes
return _delete_gists_older_than_minutes
else
delayederror("$(task) is not a valid task")
end
end
return _gitlab_provider
end
end # End submodule MirrorUpdater.Hosts.GitLabHost
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 9103 | ##### Beginning of file
module Run # Begin submodule MirrorUpdater.Run
__precompile__(true)
import ArgParse
import Dates
import HTTP
import Pkg
import TimeZones
import ..Types
import ..Utils
import ..Common
import ..process_delayed_error_list
function run_mirror_updater!!(
;
registry_list::Vector{Types.Registry},
delete_gists_older_than_minutes::Int = 0,
git_hosting_providers::AbstractVector =
Any[],
task::String =
"all",
gist_description::String =
"",
is_dry_run::Bool =
false,
additional_repos::Vector{Types.SrcDestPair} =
Types.SrcDestPair[],
do_not_push_to_these_destinations::Vector{String} =
String[],
do_not_try_url_list::Vector{String} =
String[],
try_but_allow_failures_url_list::Vector{String} =
String[],
time_zone::Dates.TimeZone =
Dates.TimeZone("America/New_York"),
)::Nothing
@info("Running MirrorUpdater.Run.run_mirror_updater!!")
if length(git_hosting_providers) == 0
delayederror(
string(
"You must supply at least one git hosting provider",
)
)
elseif length(git_hosting_providers) == 1
@info(
string(
"I will push to one git hosting provider.",
),
)
else
@info(
string(
"I will push to $(length(git_hosting_providers)) ",
"git hosting providers.",
),
)
end
has_gist_description::Bool = length(gist_description) > 0
if task == "all" || task == "make-list"
@info("Starting stage 1...")
@info("Making list of repos to mirror...")
all_repos_to_mirror_stage1::Vector{Types.SrcDestPair} =
Common._make_list(
registry_list,
additional_repos;
do_not_try_url_list =
do_not_try_url_list,
try_but_allow_failures_url_list =
try_but_allow_failures_url_list,
)
gist_content_stage1::String = Common._src_dest_pair_list_to_string(
all_repos_to_mirror_stage1
)
@info(
string(
"The full list has ",
"$(length(all_repos_to_mirror_stage1)) ",
"unique pairs.",
)
)
if has_gist_description
for p in 1:length(git_hosting_providers)
@info(
string(
"Git hosting provider ",
"$(p) of $(length(git_hosting_providers))",
),
)
provider = git_hosting_providers[p]
@info(
string(
"Creating gist on git hosting provider $(p).",
)
)
args = Dict(
:gist_description => gist_description,
:gist_content => gist_content_stage1,
)
provider(:create_gist)(args)
end
end
@info("SUCCESS: Stage 1 completed successfully.")
end
if task == "all" || Types._is_interval(task)
@info("Starting stage 2...")
if has_gist_description
correct_gist_content_stage2::String = ""
@info("looking for the correct gist")
args = Dict(
:gist_description => gist_description,
)
for p = 1:length(git_hosting_providers)
@info(
string(
"Git hosting provider ",
"$(p) of $(length(git_hosting_providers))",
),
)
provider = git_hosting_providers[p]
if length(correct_gist_content_stage2) == 0
@info(
string(
"Searching git hosting provider $(p) ",
"for the correct gist.",
)
)
correct_gist_content_stage2 = try
provider(:retrieve_gist)(args)
catch exception
@warn("Ignored exception", exception,)
""
end
end
end
if length(strip(correct_gist_content_stage2)) == 0
delayederror("I could not find the correct gist on any host")
end
all_repos_to_mirror_stage2 =
Common._string_to_src_dest_pair_list(
correct_gist_content_stage2
)
else
@info("no need to download any gists: I already have the list")
all_repos_to_mirror_stage2 =
all_repos_to_mirror_stage1
end
@info(
string(
"The full list has ",
"$(length(all_repos_to_mirror_stage2)) ",
"unique pairs.",
)
)
if Types._is_interval(task)
task_interval::Types.AbstractInterval =
Types._construct_interval(task)
@info(
string("Using interval for stage 2: "),
task_interval,
)
selected_repos_to_mirror_stage2 =
Common._pairs_that_fall_in_interval(
all_repos_to_mirror_stage2,
task_interval,
)
else
selected_repos_to_mirror_stage2 =
all_repos_to_mirror_stage2
end
@info(
string(
"The selected subset of the list ",
"for this particular job has ",
"$(length(selected_repos_to_mirror_stage2)) ",
"unique pairs.",
)
)
Common._push_mirrors!!(
;
src_dest_pairs = selected_repos_to_mirror_stage2,
git_hosting_providers = git_hosting_providers,
is_dry_run = is_dry_run,
do_not_try_url_list =
do_not_try_url_list,
try_but_allow_failures_url_list =
try_but_allow_failures_url_list,
do_not_push_to_these_destinations =
do_not_push_to_these_destinations,
time_zone = time_zone,
)
@info("SUCCESS: Stage 2 completed successfully.")
end
if task == "all" || task == "clean-up"
@info("Starting stage 3...")
if has_gist_description
args = Dict(
:gist_description => gist_description
)
for p = 1:length(git_hosting_providers)
@info(
string(
"Git hosting provider ",
"$(p) of $(length(git_hosting_providers))",
),
)
provider = git_hosting_providers[p]
@info(
string(
"Deleting gists from git hosting provider $(p) ",
"that match the provided ",
"gist description.",
),
gist_description,
)
try
provider(:delete_gists)(args)
catch exception
@warn("ignoring exception: ", exception)
end
end
end
if delete_gists_older_than_minutes > 0
time::TimeZones.ZonedDateTime = Dates.now(
TimeZones.localzone()
)
args = Dict(
:delete_gists_older_than_minutes =>
delete_gists_older_than_minutes,
:time =>
time,
)
for p = 1:length(git_hosting_providers)
provider = git_hosting_providers[p]
@info(
string(
"Deleting gists from git hosting provider $(p) ",
"that are older than the provided ",
"age in minutes.",
),
delete_gists_older_than_minutes,
)
try
provider(:delete_gists_older_than_minutes)(args)
catch exception
@warn("ignoring exception: ", exception)
end
end
end
@info("SUCCESS: Stage 3 completed successfully.")
end
@info(
string(
"SUCCESS: run_mirror_updater completed ",
"successfully :) Good-bye!",
)
)
process_delayed_error_list()
return nothing
end
end # End submodule MirrorUpdater.Run
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 8200 | ##### Beginning of file
module Types # Begin submodule MirrorUpdater.Types
__precompile__(true)
abstract type AbstractInterval end
function _name_with_jl(x::AbstractString)::String
name_without_jl::String = _name_without_jl(x)
name_with_jl::String = string(name_without_jl, ".jl")
return name_with_jl
end
function _name_without_jl(x::AbstractString)::String
temp::String = strip(convert(String, x))
if endswith(lowercase(temp), ".jl")
result = strip(temp[1:end-3])
else
result = temp
end
return result
end
function _name_with_git(x::AbstractString)::String
name_without_git::String = _name_without_git(x)
name_with_git::String = string(name_without_git, ".git")
return name_with_git
end
function _name_without_git(x::AbstractString)::String
temp::String = strip(convert(String, x))
if endswith(lowercase(temp), ".git")
result = strip(temp[1:end-4])
else
result = temp
end
return result
end
function _is_interval(x::String)::Bool
if _is_no_bounds_interval(x)
return true
elseif _is_lower_bound_only_interval(x)
return true
elseif _is_upper_bound_only_interval(x)
return true
elseif _is_lower_and_upper_bound_interval(x)
return true
else
return false
end
end
function _get_lower_and_upper_bound_interval_regex()::Regex
lower_and_upper_bound_interval_regex::Regex =
r"\[(\w\w*?)\,(\w\w*?)\)"
return lower_and_upper_bound_interval_regex
end
function _get_lower_bound_only_interval_regex()::Regex
lower_bound_only_interval_regex::Regex =
r"\[(\w\w*?)\,\)"
return lower_bound_only_interval_regex
end
function _get_upper_bound_only_interval_regex()::Regex
upper_bound_only_interval_regex::Regex =
r"\[\,(\w\w*?)\)"
return upper_bound_only_interval_regex
end
function _get_no_bounds_interval_regex()::Regex
no_bounds_interval_regex::Regex =
r"\[\,\)"
return no_bounds_interval_regex
end
function _is_no_bounds_interval(x::String)::Bool
result::Bool = occursin(
_get_no_bounds_interval_regex(),
x,
)
return result
end
function _is_lower_and_upper_bound_interval(x::String)::Bool
result::Bool = occursin(
_get_lower_and_upper_bound_interval_regex(),
x,
)
return result
end
function _is_lower_bound_only_interval(x::String)::Bool
result::Bool = occursin(
_get_lower_bound_only_interval_regex(),
x,
)
return result
end
function _is_upper_bound_only_interval(x::String)::Bool
result::Bool = occursin(
_get_upper_bound_only_interval_regex(),
x,
)
return result
end
struct Package
name::String
uuid::String
source_url::String
function Package(
name::String,
uuid::String,
source_url::String,
)::Package
correct_name = _name_with_jl(name)
correct_uuid = strip(uuid)
correct_source_url = strip(source_url)
result::Package = new(
correct_name,
correct_uuid,
correct_source_url,
)
return result
end
end
function Package(
;
name::AbstractString,
uuid::AbstractString,
source_url::AbstractString,
)::Package
result::Package = Package(
convert(String,name),
convert(String,uuid),
convert(String,source_url),
)
return result
end
struct Registry
owner::String
name::String
uuid::String
url::String
function Registry(
owner::String,
name::String,
uuid::String,
url::String,
)::Registry
correct_owner = strip(owner)
correct_name = _name_without_jl(name)
correct_uuid = strip(uuid)
correct_url = strip(url)
result::Registry = new(
correct_owner,
correct_name,
correct_uuid,
correct_url,
)
return result
end
end
function Registry(
;
owner::AbstractString,
name::AbstractString,
uuid::AbstractString,
url::AbstractString,
)::Registry
result::Registry = Registry(
convert(String,owner),
convert(String,name),
convert(String,uuid),
convert(String,url),
)
return result
end
struct SrcDestPair
source_url::String
destination_repo_name::String
function SrcDestPair(
source_url::String,
destination_repo_name::String,
)::SrcDestPair
correct_source_url = strip(source_url)
correct_destination_repo_name = strip(destination_repo_name)
result::SrcDestPair = new(
correct_source_url,
correct_destination_repo_name,
)
return result
end
end
function SrcDestPair(
;
source_url::AbstractString,
destination_repo_name::String,
)::SrcDestPair
result::SrcDestPair = SrcDestPair(
convert(String, source_url),
convert(String, destination_repo_name),
)
return result
end
struct NoBoundsInterval <: AbstractInterval
end
struct LowerAndUpperBoundInterval <: AbstractInterval
left::String
right::String
function LowerAndUpperBoundInterval(
left::String,
right::String,
)::LowerAndUpperBoundInterval
correct_left = strip(left)
correct_right = strip(right)
result::LowerAndUpperBoundInterval = new(
correct_left,
correct_right,
)
return result
end
end
struct LowerBoundOnlyInterval <: AbstractInterval
left::String
function LowerBoundOnlyInterval(
left::String,
)::LowerBoundOnlyInterval
correct_left = strip(left)
result::LowerBoundOnlyInterval = new(
correct_left,
)
return result
end
end
struct UpperBoundOnlyInterval <: AbstractInterval
right::String
function UpperBoundOnlyInterval(
right::String,
)::UpperBoundOnlyInterval
correct_right = strip(right)
result::UpperBoundOnlyInterval = new(
correct_right,
)
return result
end
end
function _construct_interval(x::String)::AbstractInterval
if _is_no_bounds_interval(x)
result = NoBoundsInterval()
elseif _is_lower_bound_only_interval(x)
loweronly_regexmatch::RegexMatch = match(
_get_lower_bound_only_interval_regex(),
x,
)
loweronly_left::String = strip(
convert(String, loweronly_regexmatch[1])
)
result = LowerBoundOnlyInterval(loweronly_left)
elseif _is_upper_bound_only_interval(x)
upperonly_regexmatch::RegexMatch = match(
_get_upper_bound_only_interval_regex(),
x,
)
upperonly_right::String = strip(
convert(String, upperonly_regexmatch[1])
)
result = UpperBoundOnlyInterval(upperonly_right)
elseif _is_lower_and_upper_bound_interval(x)
lowerandupper_regexmatch::RegexMatch = match(
_get_lower_and_upper_bound_interval_regex(),
x,
)
lowerandupper_left::String = strip(
convert(String, lowerandupper_regexmatch[1])
)
lowerandupper_right::String = strip(
convert(String, lowerandupper_regexmatch[2])
)
result = LowerAndUpperBoundInterval(
lowerandupper_left,
lowerandupper_right,
)
else
delayederror("argument is not a valid interval")
end
return result
end
function Base.isless(
x::SrcDestPair,
y::SrcDestPair,
)::Bool
x_destination_repo_name::String = strip(x.destination_repo_name)
y_destination_repo_name::String = strip(y.destination_repo_name)
result::Bool = Base.isless(
x_destination_repo_name,
y_destination_repo_name,
)
return result
end
end # End submodule MirrorUpdater.Types
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 220 | ##### Beginning of file
module Utils # Begin submodule MirrorUpdater.Utils
__precompile__(true)
import ..delayederror
include("git.jl")
include("misc.jl")
end # End submodule MirrorUpdater.Utils
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 9944 | ##### Beginning of file
import ..package_directory
function _get_git_binary_path()::String
deps_jl_file_path = package_directory("deps", "deps.jl")
if !isfile(deps_jl_file_path)
delayederror(
string(
"MirrorUpdater.jl is not properly installed. ",
"Please run\nPkg.build(\"MirrorUpdater\")",
)
)
end
include(deps_jl_file_path)
git::String = strip(string(git_cmd))
run(`$(git) --version`)
@debug(
"git command: ",
git,
)
return git
end
function git_version()::VersionNumber
git::String = _get_git_binary_path()
a::String = convert(String,read(`$(git) --version`, String))
b::String = convert(String, strip(a))
c::Vector{SubString{String}} = split(b, "git version")
d::String = convert(String,last(c))
e::String = convert(String, strip(d))
f::VersionNumber = VersionNumber(e)
return f
end
function clean_up_branch_name(x::String)::String
temp::String = strip(
strip(
strip(
strip(
strip(x,),
'*',
),
),
'*',
),
)
my_regex::Regex = r"[a-zA-Z0-9_-]*\/[a-zA-Z0-9_-]*\/([a-zA-Z0-9_-]*)"
if occursin(my_regex, temp)
my_match::RegexMatch = match(my_regex, temp)
just_the_branch::String =
clean_up_branch_name(first(my_match.captures))
result = just_the_branch
else
result = temp
end
return result
end
clean_up_branch_name(x::AbstractString) = clean_up_branch_name(
convert(String, x)
)
function get_all_branches_local()
git::String = _get_git_binary_path()
a::String = read(`$(git) branch`, String)
b::String = convert(String, strip(a))
c::Vector{SubString{String}} = split(b, '\n')
d::Vector{String} = clean_up_branch_name.(c)
e::Vector{String} = sort(unique(d))
return e
end
function get_all_branches_local_and_remote()::Vector{String}
git::String = _get_git_binary_path()
a::String = read(`$(git) branch -a`, String)
b::String = convert(String, strip(a))
c::Vector{SubString{String}} = split(b, '\n')
d::Vector{String} = clean_up_branch_name.(c)
e::Vector{String} = sort(unique(d))
return e
end
function get_current_branch()::String
git::String = _get_git_binary_path()
a::String = read(`$(git) status`, String)
b::String = convert(String, strip(a))
c::Vector{SubString{String}} = split(b, '\n')
d::String = convert(String, strip(first(c)))
my_regex::Regex = r"On branch ([a-zA-Z0-9_-]*)"
if occursin(my_regex, d)
my_match::RegexMatch = match(my_regex, d)
just_the_branch::String =
clean_up_branch_name(first(my_match.captures))
return just_the_branch
else
delayederror("could not determine current branch")
end
end
function checkout_branch!(
branch_name::AbstractString;
create::Bool = false,
error_on_failure::Bool = true,
)::Nothing
success::Bool = false
git::String = _get_git_binary_path()
branch_name_cleaned::String = clean_up_branch_name(
branch_name
)
try
run(`$(git) checkout $(branch_name_cleaned)`)
catch e1
@warn(string("ignoring exception"), e1,)
end
current_branch_1::String = get_current_branch()
if strip(current_branch_1) == strip(branch_name_cleaned)
success = true
else
success = false
end
if !success
if create
try
run(`$(git) checkout --orphan $(branch_name_cleaned)`)
catch e2
@warn(string("ignoring exception"), e2,)
end
end
end
current_branch_2::String = get_current_branch()
if strip(current_branch_2) == strip(branch_name_cleaned)
success = true
else
success = false
end
if !success
if error_on_failure
delayederror("could not checkout the specified branch")
else
@warn("could not checkout the specified branch")
end
end
return nothing
end
function branch_exists(branch_name::AbstractString)::Bool
git::String = _get_git_binary_path()
original_branch::String = get_current_branch()
branch_name_cleaned::String = clean_up_branch_name(
branch_name
)
try
run(`$(git) checkout $(branch_name_cleaned)`)
catch e
@warn(string("ignoring exception"), e,)
end
current_branch::String = get_current_branch()
if strip(current_branch)==strip(branch_name_cleaned)
result = true
else
result = false
end
run(`$(git) checkout $(original_branch)`)
return result
end
function git_add_all!()::Nothing
git::String = _get_git_binary_path()
try
run(`$(git) add -A`)
catch e
@warn(string("ignoring exception"), e,)
end
return nothing
end
function git_commit!(
;
message::AbstractString,
committer_name::AbstractString,
committer_email::AbstractString,
allow_empty::Bool = false,
)::Nothing
git::String = _get_git_binary_path()
message_stripped::String = convert(
String,
strip(message),
)
committer_name_stripped::String = convert(
String,
strip(committer_name),
)
committer_email_stripped::String = convert(
String,
strip(committer_email),
)
allow_empty_flag::String = ""
if allow_empty
allow_empty_flag = "--allow-empty"
else
allow_empty_flag = ""
end
run(`$(git) config user.name "$(committer_name_stripped)"`)
run(`$(git) config user.email "$(committer_email_stripped)"`)
run(`$(git) config commit.gpgsign false`)
try
run(`$(git) commit $(allow_empty_flag) -m "$(message_stripped)"`)
catch e
@warn(string("ignoring exception"), e,)
end
return nothing
end
function git_push_upstream_all!()::Nothing
git::String = _get_git_binary_path()
try
run(`$(git) push -u --all`)
catch e
@warn(string("ignoring exception"), e,)
end
return nothing
end
function delete_everything_except_dot_git!(
root_path::AbstractString,
)::Nothing
previous_directory::String = pwd()
list_of_paths_to_remove::Vector{String} = Vector{String}()
root_path_stripped::String = convert(String, strip(root_path))
for file_or_directory in readdir(root_path_stripped)
if strip(lowercase(file_or_directory)) != ".git"
push!(
list_of_paths_to_remove,
joinpath(root_path_stripped, file_or_directory,),
)
end
end
for path_to_remove in list_of_paths_to_remove
rm(
path_to_remove;
force = true,
recursive = true,
)
end
cd(previous_directory)
return nothing
end
function delete_only_dot_git!(root_path::AbstractString)::Nothing
previous_directory::String = pwd()
list_of_paths_to_remove::Vector{String} = Vector{String}()
root_path_stripped::String = convert(String, strip(root_path))
for (rootdir, dirs, files) in walkdir(root_path_stripped)
for dir in dirs
if strip(lowercase(dir)) == ".git"
push!(
list_of_paths_to_remove,
joinpath(rootdir, dir),
)
end
end
for file in files
if strip(lowercase(file)) == ".git"
push!(
list_of_paths_to_remove,
joinpath(rootdir, file),
)
end
end
end
for path_to_remove in list_of_paths_to_remove
rm(
path_to_remove;
force = true,
recursive = true,
)
end
cd(previous_directory)
return nothing
end
function _include_branch(
;
include::AbstractVector,
branch::String,
)::Bool
branch_clean::String = clean_up_branch_name(branch)
matches_any_inclusion_criteria::Vector{Bool} = Bool[
occursin(x, branch_clean) for x in include
]
result::Bool = any(matches_any_inclusion_criteria)
return result
end
function _exclude_branch(
;
exclude::AbstractVector,
branch::String,
)::Bool
branch_clean::String = clean_up_branch_name(branch)
matches_any_exclusion_criteria::Vector{Bool} = Bool[
occursin(x, branch_clean) for x in exclude
]
result::Bool = any(matches_any_exclusion_criteria)
return result
end
function make_list_of_branches_to_snapshot(
;
default_branch::String,
include::AbstractVector,
exclude::AbstractVector,
)::Vector{String}
all_branches::Vector{String} = get_all_branches_local_and_remote()
first_pass::Vector{String} = Vector{String}()
default_br_cl_lc_s::String = strip(
lowercase(
clean_up_branch_name(
default_branch
)
)
)
for b_1 in all_branches
if default_br_cl_lc_s == strip(lowercase(clean_up_branch_name(b_1)))
push!(first_pass, b_1)
end
end
for b_2 in all_branches
if _include_branch(;branch=b_2,include=include)
push!(first_pass, b_2)
end
end
branches_to_snapshot::Vector{String} = Vector{String}()
for b_3 in first_pass
if !_exclude_branch(;branch=b_3,exclude=exclude)
push!(branches_to_snapshot, b_3)
end
end
branches_to_snapshot_cleaned::Vector{String} = clean_up_branch_name.(
branches_to_snapshot
)
result::Vector{String} = sort(unique(branches_to_snapshot_cleaned))
return result
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 12365 | ##### Beginning of file
import Dates
import HTTP
import TimeZones
function default_repo_description(
;
env::AbstractDict = ENV,
from::Any = "",
when::Any = Dates.now(TimeZones.localzone(),),
time_zone::Dates.TimeZone = TimeZones.TimeZone("America/New_York"),
by::Any = "",
)::String
_from::String = strip(string(from))
from_string::String = ""
if length(_from) == 0
from_string = ""
else
from_string = string(
" ",
strip(string("from $(_from)",)),
)
end
_when::String = ""
date_time_string::String = ""
if isa(when, TimeZones.ZonedDateTime)
_when = strip(
string(TimeZones.astimezone(when,time_zone,))
)
else
_when = strip(
string(when)
)
end
if length(_when) == 0
date_time_string = ""
else
date_time_string = string(
" ",
strip(string("on ",_when,)),
)
end
by_string::String = ""
_by::String = strip(
string(by)
)
if length(_by) == 0
by_string = ""
else
by_string = string(
" ",
strip(string("by ",_by,)),
)
end
travis_string::String = ""
if _is_travis_ci(env)
TRAVIS_BUILD_NUMBER::String = strip(
get(env, "TRAVIS_BUILD_NUMBER", "")
)
TRAVIS_JOB_NUMBER::String = strip(
get(env, "TRAVIS_JOB_NUMBER", "")
)
TRAVIS_EVENT_TYPE::String = strip(
get(env, "TRAVIS_EVENT_TYPE", "unknown-travis-event")
)
TRAVIS_BRANCH = strip(
get(env, "TRAVIS_BRANCH", "unknown-branch")
)
TRAVIS_COMMIT = strip(
get(env, "TRAVIS_COMMIT", "unknown-commit")
)
TRAVIS_PULL_REQUEST = strip(
get(env,"TRAVIS_PULL_REQUEST","unknown-pull-request-number")
)
job_or_build_string::String = ""
if length(TRAVIS_JOB_NUMBER) > 0
job_or_build_string = string(
" ",
strip(string("job $(TRAVIS_JOB_NUMBER)")),
)
elseif length(TRAVIS_BUILD_NUMBER) > 0
job_or_build_string = string(
" ",
strip(string("build $(TRAVIS_BUILD_NUMBER)")),
)
else
job_or_build_string = ""
end
triggered_by_string::String = ""
if lowercase(TRAVIS_EVENT_TYPE) == "push"
triggered_by_string = string(
" ",
strip(
string(
", triggered by the push of",
" commit \"$(TRAVIS_COMMIT)\"",
" to branch \"$(TRAVIS_BRANCH)\"",
)
),
)
elseif lowercase(TRAVIS_EVENT_TYPE) == "pull_request"
triggered_by_string = string(
" ",
strip(
string(
", triggered by",
" pull request #$(TRAVIS_PULL_REQUEST)",
)
),
)
elseif lowercase(TRAVIS_EVENT_TYPE) == "cron"
triggered_by_string = string(
" ",
strip(
string(
", triggered by Travis",
" cron job on",
" branch \"$(TRAVIS_BRANCH)\"",
)
),
)
else
triggered_by_string = string(
" ",
strip(
string(
", triggered by Travis \"",
strip(TRAVIS_EVENT_TYPE),
"\" event on",
" branch \"$(TRAVIS_BRANCH)\"",
)
),
)
end
travis_string = string(
" ",
strip(
string(
"via Travis",
job_or_build_string,
triggered_by_string,
)
),
)
else
travis_string = ""
end
new_description::String = strip(
string(
"Last mirrored",
from_string,
date_time_string,
by_string,
travis_string,
)
)
return new_description
end
function _url_exists(url::AbstractString)::Bool
_url::String = strip(convert(String, url))
result::Bool = try
r = HTTP.request("GET", _url)
@debug("HTTP GET request result: ", _url, r.status,)
r.status == 200
catch exception
@debug(string("Ignoring exception"), exception,)
false
end
if result
else
@debug(string("URL does not exist"), _url,)
end
return result
end
mutable struct _DummyOutputWrapperStruct{I, F, S, O}
previous_time_seconds::I
f::F
interval_seconds::I
dummy_output::S
io::O
end
function _DummyOutputWrapperStruct(
;
interval_seconds::I = 60,
initial_offset_seconds::I = interval_seconds,
f::F,
dummy_output::S = "This is a dummy line of output",
io::O = Base.stdout,
)::_DummyOutputWrapperStruct{I, F, S, O} where
I <: Integer where
F <: Function where
S <: AbstractString where
O <: IO
current_time_seconds::I = floor(I, time())
initial_time_seconds::I = current_time_seconds + initial_offset_seconds
wrapper_struct::_DummyOutputWrapperStruct{I, F, S} =
_DummyOutputWrapperStruct(
initial_time_seconds,
f,
interval_seconds,
dummy_output,
io,
)
return wrapper_struct
end
function (x::_DummyOutputWrapperStruct{I, F, S, O})() where
I <: Integer where
F <: Function where
S <: AbstractString where
O <: IO
current_time_seconds::I = floor(I, time())
previous_time_seconds::I = x.previous_time_seconds
f::F = x.f
interval_seconds::I = x.interval_seconds
dummy_output::S = x.dummy_output
io::O = x.io
elapsed_seconds::Int = current_time_seconds - previous_time_seconds
print_dummy_output::Bool = elapsed_seconds > interval_seconds
if print_dummy_output
println(io, dummy_output)
x.previous_time_seconds = current_time_seconds
end
f_result = f()
return f_result
end
function dummy_output_wrapper(
;
f::F,
interval_seconds::I = 60,
initial_offset_seconds::I = interval_seconds,
dummy_output::S = "This is a dummy line of output",
io::O = Base.stdout,
) where
I <: Integer where
F <: Function where
S <: AbstractString where
O <: IO
wrapper_struct::_DummyOutputWrapperStruct{I, F, S, O} =
_DummyOutputWrapperStruct(
;
f = f,
interval_seconds = interval_seconds,
initial_offset_seconds = initial_offset_seconds,
dummy_output = dummy_output,
)
function my_wrapper_function()
result = wrapper_struct()
return result
end
return my_wrapper_function
end
function command_ran_successfully!!(
cmd::Base.AbstractCmd;
max_attempts::Integer = 10,
max_seconds_per_attempt::Real = 540,
seconds_to_wait_between_attempts::Real = 30,
error_on_failure::Bool = true,
last_resort_run::Bool = true,
before::Function = () -> (),
)::Bool
success_bool::Bool = false
my_false = dummy_output_wrapper(
;
f = () -> false,
interval_seconds = 60,
initial_offset_seconds = 60,
dummy_output = "Still waiting between attempts...",
io = Base.stdout,
)
for attempt = 1:max_attempts
if success_bool
else
@debug(string("Attempt $(attempt)"))
if attempt > 1
timedwait(
() -> my_false(),
float(seconds_to_wait_between_attempts);
pollint = float(1.0),
)
end
before()
p = run(cmd; wait = false,)
my_process_exited = dummy_output_wrapper(
;
f = () -> process_exited(p),
interval_seconds = 60,
initial_offset_seconds = 60,
dummy_output = "The process is still running...",
io = Base.stdout,
)
timedwait(
() -> my_process_exited(),
float(max_seconds_per_attempt),
pollint = float(1.0),
)
if process_running(p)
success_bool = false
try
kill(p, Base.SIGTERM)
catch exception
@warn("Ignoring exception: ", exception)
end
try
kill(p, Base.SIGKILL)
catch exception
@warn("Ignoring exception: ", exception)
end
else
success_bool = try
success(p)
catch exception
@warn("Ignoring exception: ", exception)
false
end
end
end
end
if !success_bool && last_resort_run
@debug(string("Attempting the last resort run..."))
try
run(cmd)
success_bool = true
catch exception
success_bool = false
if error_on_failure
delayederror(string(exception); exception = exception,)
else
@error(string(exception), exception = exception,)
end
end
end
if success_bool
@debug(string("Command ran successfully."),)
else
if error_on_failure
delayederror(string("Command did not run successfully."),)
else
@warn(string("Command did not run successfully."),)
end
end
return success_bool
end
function retry_function_until_success(
f::Function;
max_attempts::Integer = 10,
seconds_to_wait_between_attempts::Real = 30,
)
success_bool::Bool = false
f_result = nothing
my_false = dummy_output_wrapper(
;
f = () -> false,
interval_seconds = 60,
initial_offset_seconds = 60,
dummy_output = "Still waiting between attempts...",
io = Base.stdout,
)
for attempt = 1:max_attempts
if success_bool
else
@debug(string("Attempt $(attempt)"))
if attempt > 1
timedwait(
() -> my_false(),
float(seconds_to_wait_between_attempts);
pollint = float(1.0),
)
end
@debug(string("Running the provided function..."))
success_bool = true
f_result = try
f()
catch exception
success_bool = false
@warn("Ignoring exception: ", exception)
nothing
end
end
end
if success_bool
@debug(string("Function ran successfully."),)
return f_result
else
delayederror(string("Function did not run successfully."),)
end
end
function _is_travis_ci(
a::AbstractDict = ENV,
)::Bool
ci::String = lowercase(
strip(get(a, "CI", "false"))
)
travis::String = lowercase(
strip(get(a, "TRAVIS", "false"))
)
continuous_integration::String = lowercase(
strip(get(a, "CONTINUOUS_INTEGRATION", "false"))
)
ci_is_true::Bool = ci == "true"
travis_is_true::Bool = travis == "true"
continuous_integration_is_true::Bool = continuous_integration == "true"
answer::Bool = ci_is_true &&
travis_is_true &&
continuous_integration_is_true
return answer
end
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 211 | ##### Beginning of file
import MirrorUpdater
import Test # stdlib
include("test-utils.jl")
include("set-up-test-modules.jl")
include("test-package-directory.jl")
include("test-version.jl")
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 552 | ##### Beginning of file
testmodulea_filename = joinpath("TestModuleA", "TestModuleA.jl")
testmoduleb_filename = joinpath(
"TestModuleB", "directory1", "directory2", "directory3",
"directory4", "directory5", "TestModuleB.jl",
)
testmodulec_filename = joinpath(mktempdir(), "TestModuleC.jl")
rm(testmodulec_filename; force = true, recursive = true)
open(testmodulec_filename, "w") do io
write(io, "module TestModuleC end")
end
include(testmodulea_filename)
include(testmoduleb_filename)
include(testmodulec_filename)
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 694 | ##### Beginning of file
Test.@test( isdir(MirrorUpdater.package_directory()) )
Test.@test( isdir(MirrorUpdater.package_directory("ci")) )
Test.@test( isdir(MirrorUpdater.package_directory("ci", "travis")) )
Test.@test( isdir(MirrorUpdater.package_directory(TestModuleA)) )
Test.@test( isdir(MirrorUpdater.package_directory(TestModuleB)) )
Test.@test(
isdir( MirrorUpdater.package_directory(TestModuleB, "directory2",) )
)
Test.@test(
isdir(
MirrorUpdater.package_directory(
TestModuleB, "directory2", "directory3",
)
)
)
Test.@test_throws(
ErrorException,MirrorUpdater.package_directory(TestModuleC),
)
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 7384 | ##### Beginning of file
Test.@testset "git tests" begin
git = MirrorUpdater.Utils._get_git_binary_path()
@info(string("git: "), git,)
git_version_cmd = `$(git) --version`
@info(string("Attempting to run command: "), git_version_cmd,)
Test.@test(
MirrorUpdater.Utils.command_ran_successfully!!(
git_version_cmd
)
)
Test.@test(
MirrorUpdater.Utils.command_ran_successfully!!(
`$(git) --version`
)
)
# Test.@test_throws(
# ErrorException,
# MirrorUpdater.Utils.command_ran_successfully!!(
# `$(git) --versionBLAHBLAHBLAH`;
# max_attempts = 5,
# seconds_to_wait_between_attempts = 5,
# error_on_failure = true,
# last_resort_run = true,
# ),
# )
# Test.@test_throws(
# ErrorException,
# MirrorUpdater.Utils.command_ran_successfully!!(
# `$(git) --versionBLAHBLAHBLAH`;
# max_attempts = 5,
# seconds_to_wait_between_attempts = 5,
# error_on_failure = true,
# last_resort_run = false,
# ),
# )
# Test.@test_throws(
# ErrorException,
# MirrorUpdater.Utils.command_ran_successfully!!(
# `$(git) --versionBLAHBLAHBLAH`;
# max_attempts = 5,
# seconds_to_wait_between_attempts = 5,
# error_on_failure = false,
# last_resort_run = true,
# ),
# )
Test.@test(
!(
MirrorUpdater.Utils.command_ran_successfully!!(
`$(git) --versionBLAHBLAHBLAH`;
max_attempts = 5,
seconds_to_wait_between_attempts = 5,
error_on_failure = false,
last_resort_run = false,
)
)
)
function f_1()
return "Hello There"
end
Test.@test(
"Hello There" ==
MirrorUpdater.Utils.retry_function_until_success(
() -> f_1()
)
)
f_2_counter = Ref{Int}()
f_2_counter[] = 0
function f_2(counter)
counter[] += 1
@debug(
string(
"Incremented counter from ",
"$(counter[] - 1) to $(counter[])",
)
)
if counter[] < 7
error("f2_counter < 7")
else
return "General Kenobi"
end
end
Test.@test(
"General Kenobi" ==
MirrorUpdater.Utils.retry_function_until_success(
() -> f_2(f_2_counter);
max_attempts = 10,
seconds_to_wait_between_attempts = 5,
)
)
function f_3()
error("f_3() will always fail")
end
# Test.@test_throws(
# ErrorException,
# MirrorUpdater.Utils.retry_function_until_success(
# ()->f_3();
# max_attempts = 5,
# seconds_to_wait_between_attempts = 5,
# ),
# )
previous_directory::String = pwd()
temp_directory_1::String = joinpath(mktempdir(), "TEMPGITREPOLOCAL")
mkpath(temp_directory_1)
temp_directory_2::String = joinpath(mktempdir(), "TEMPGITREPOREMOTE")
mkpath(temp_directory_2)
cd(temp_directory_2)
run(`$(git) init --bare`)
cd(temp_directory_1)
run(`$(git) init`)
MirrorUpdater.Utils.git_add_all!()
MirrorUpdater.Utils.git_commit!(
;
message="test commit 1",
allow_empty=true,
committer_name="test name",
committer_email="test email",
)
run(`git branch branch1`)
run(`git branch branch2`)
run(`git branch branch3`)
run(`git checkout master`)
Test.@test(
typeof(MirrorUpdater.Utils.git_version()) <: VersionNumber
)
Test.@test(
typeof(MirrorUpdater.Utils.get_all_branches_local()) <:
Vector{String}
)
Test.@test(
typeof(MirrorUpdater.Utils.get_all_branches_local_and_remote()) <:
Vector{String}
)
Test.@test(
typeof(MirrorUpdater.Utils.get_current_branch()) <: String )
Test.@test(
MirrorUpdater.Utils.branch_exists("branch1") )
Test.@test(
!MirrorUpdater.Utils.branch_exists("non-existent-branch") )
Test.@test(
!MirrorUpdater.Utils.branch_exists("non-existent-but-create-me") )
Test.@test(
typeof(MirrorUpdater.Utils.checkout_branch!("branch1")) <: Nothing )
# Test.@test_throws(
# ErrorException,
# MirrorUpdater.Utils.checkout_branch!("non-existent-branch"),
# )
Test.@test_warn(
"",
MirrorUpdater.Utils.checkout_branch!(
"non-existent-branch";
error_on_failure=false,
),
)
Test.@test(
typeof(
MirrorUpdater.Utils.checkout_branch!(
"non-existent-but-create-me";
create=true,
)
) <: Nothing
)
MirrorUpdater.Utils.git_add_all!()
MirrorUpdater.Utils.git_commit!(
;
message="test commit 2",
allow_empty=true,
committer_name="test name",
committer_email="test email",
)
run(`git checkout master`)
Test.@test(
MirrorUpdater.Utils.branch_exists("branch1")
)
Test.@test(
!MirrorUpdater.Utils.branch_exists("non-existent-branch")
)
Test.@test(
MirrorUpdater.Utils.branch_exists("non-existent-but-create-me")
)
run(`$(git) remote add origin $(temp_directory_2)`)
Test.@test(
typeof(MirrorUpdater.Utils.git_push_upstream_all!()) <: Nothing
)
run(`git checkout master`)
include_patterns::Vector{Regex} = Regex[
r"^bRANCh1$"i,
r"^bRanCh3$"i,
]
exclude_patterns::Vector{Regex} = Regex[
r"^brANcH3$"i,
]
branches_to_snapshot::Vector{String} =
MirrorUpdater.Utils.make_list_of_branches_to_snapshot(
;
default_branch = "maSTeR",
include = include_patterns,
exclude = exclude_patterns,
)
Test.@test( length(branches_to_snapshot) == 2 )
Test.@test( length(unique(branches_to_snapshot)) == 2 )
Test.@test(
length(branches_to_snapshot) == length(unique(branches_to_snapshot))
)
Test.@test( branches_to_snapshot[1] == "branch1" )
Test.@test( branches_to_snapshot[2] == "master" )
cd(previous_directory)
MirrorUpdater.Utils.delete_everything_except_dot_git!(temp_directory_1)
MirrorUpdater.Utils.delete_only_dot_git!(temp_directory_2)
rm(temp_directory_1; recursive=true, force=true)
rm(temp_directory_2; recursive=true, force=true)
end # end testset "git tests"
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | code | 838 | ##### Beginning of file
Test.@test( Base.VERSION >= VersionNumber("1.0") )
Test.@test( MirrorUpdater.version() > VersionNumber(0) )
Test.@test(
MirrorUpdater.version() ==
MirrorUpdater.version(MirrorUpdater)
)
Test.@test(
MirrorUpdater.version() ==
MirrorUpdater.version(first(methods(MirrorUpdater.eval)))
)
Test.@test(
MirrorUpdater.version() ==
MirrorUpdater.version(MirrorUpdater.eval)
)
Test.@test(
MirrorUpdater.version() ==
MirrorUpdater.version(MirrorUpdater.eval, (Any,))
)
Test.@test( MirrorUpdater.version(TestModuleA) == VersionNumber("1.2.3") )
Test.@test( MirrorUpdater.version(TestModuleB) == VersionNumber("4.5.6") )
Test.@test_throws(
ErrorException,
MirrorUpdater._TomlFile(joinpath(mktempdir(),"1","2","3","4")),
)
##### End of file
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | docs | 3349 | # Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, sex characteristics, gender identity and expression,
level of experience, education, socio-economic status, nationality, personal
appearance, race, religion, or sexual identity and orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at [email protected]. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
[homepage]: https://www.contributor-covenant.org
For answers to common questions about this code of conduct, see
https://www.contributor-covenant.org/faq
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | docs | 18951 | # MirrorUpdater.jl - Provides functionality for automatically mirroring Julia package repositories
<table>
<tbody>
<tr>
<td>Travis CI</td>
<td><a href="https://travis-ci.com/UnofficialJuliaMirror/MirrorUpdater.jl/branches"><img src="https://travis-ci.com/UnofficialJuliaMirror/MirrorUpdater.jl.svg?branch=master"></a></td>
</tr>
<tr>
<td>Codecov</td>
<td><a href="https://codecov.io/gh/UnofficialJuliaMirror/MirrorUpdater.jl"><img src="https://codecov.io/gh/UnofficialJuliaMirror/MirrorUpdater.jl/branch/master/graph/badge.svg" /></a></td>
</tr>
<tr>
<td>Project Status</td>
<td><a href="https://www.repostatus.org/#active"><img src="https://www.repostatus.org/badges/latest/active.svg" alt="Project Status: Active β The project has reached a stable, usable state and is being actively developed." /></a></td>
</tr>
<tr>
<td>License</td>
<td><a href="LICENSE"><img title="MIT License" alt="MIT License" src="https://img.shields.io/github/license/mashape/apistatus.svg"></a></td>
</tr>
</tbody>
</table>
MirrorUpdater.jl is a Julia application that provides functionality for
automatically mirroring Julia package repositories.
MirrorUpdater.jl (and its sibling project, [Snapshots.jl](https://github.com/UnofficialJuliaMirrorSnapshots/Snapshots.jl)) are used to maintain the
Julia package mirrors and snapshots hosted at:
| | Mirrors | Snapshots |
| ------ | ------- | --------- |
| GitHub | [https://github.com/UnofficialJuliaMirror](https://github.com/UnofficialJuliaMirror) | [https://github.com/UnofficialJuliaMirrorSnapshots](https://github.com/UnofficialJuliaMirrorSnapshots) |
| GitLab | [https://gitlab.com/UnofficialJuliaMirror](https://gitlab.com/UnofficialJuliaMirror) | [https://gitlab.com/UnofficialJuliaMirrorSnapshots](https://gitlab.com/UnofficialJuliaMirrorSnapshots) |
| Bitbucket | [https://bitbucket.org/UnofficialJuliaMirror](https://bitbucket.org/UnofficialJuliaMirror) | [https://bitbucket.org/UnofficialJuliaMirrorSnapshots](https://bitbucket.org/UnofficialJuliaMirrorSnapshots) |
You can host your own mirrors for free by following these instructions:
| Table of Contents |
| ----------------- |
| [1. Setting up GitHub (required)](#setting-up-github-required) |
| [2. Setting up GitLab (optional)](#setting-up-gitlab-optional) |
| [3. Setting up BitBucket (optional)](#setting-up-bitbucket-optional) |
| [4. Setting up Travis (required)](#setting-up-travis-required) |
| [5. Running the updater manually](#running-the-updater-manually) |
| [6. Troubleshooting common issues](#troubleshooting-common-issues) |
## Setting up GitHub (required)
### Step 1
If you do not already have a personal GitHub account, [create one](https://help.github.com/articles/signing-up-for-a-new-github-account/). For the remainder of this README, `MY_PERSONAL_GITHUB_USERNAME` refers to the username of your personal GitHub account.
*For example, for me, `MY_PERSONAL_GITHUB_USERNAME` is equal to `DilumAluthge`.*
### Step 2
Log in to GitHub as `MY_PERSONAL_GITHUB_USERNAME`.
### Step 3
While logged in as `MY_PERSONAL_GITHUB_USERNAME`, [enable two-factor authentication](https://help.github.com/articles/configuring-two-factor-authentication/) on the `MY_PERSONAL_GITHUB_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 4
While logged in as `MY_PERSONAL_GITHUB_USERNAME`, [create a free GitHub organization](https://help.github.com/articles/creating-a-new-organization-from-scratch/) that you will use only for hosting the mirrored repositories. For the remainder of this README, `MY_GITHUB_ORG` refers to the name of this organization. `MY_PERSONAL_GITHUB_USERNAME` should be an `owner` of the `MY_GITHUB_ORG` organization.
*For example, for me, `MY_GITHUB_ORG` is equal to `UnofficialJuliaMirror`.*
### Step 5
While logged in as `MY_PERSONAL_GITHUB_USERNAME`, go to the `MY_GITHUB_ORG` organization security settings page (`https://github.com/organizations/MY_GITHUB_ORG/settings/security`).
Next, make sure that the checkbox next to "Require two-factor authentication for everyone..." is CHECKED.
Finally, click the "Save" button.
### Step 6
Log out of the `MY_PERSONAL_GITHUB_USERNAME` account.
### Step 7
[Create a new GitHub "bot" account](https://help.github.com/articles/signing-up-for-a-new-github-account/) that you will use ONLY for maintaining the mirror. For the remainder of this README, `MY_GITHUB_BOT_USERNAME` refers to the username of this account.
*For example, for me, `MY_GITHUB_BOT_USERNAME` is equal to `UnofficialJuliaMirrorBot`.*
### Step 8
Log in to GitHub as `MY_GITHUB_BOT_USERNAME`.
### Step 9
While logged in as `MY_GITHUB_BOT_USERNAME`, [enable two-factor authentication](https://help.github.com/articles/configuring-two-factor-authentication/) on the `MY_GITHUB_BOT_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 10
While logged in as `MY_GITHUB_BOT_USERNAME`, [create a personal access token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/) for the `MY_GITHUB_BOT_USERNAME` account and store it in a secure place (such as a password manager). For the remainder of this README, `my-github-bot-personal-access-token` refers to this personal access token.
**The personal access token should be treated as securely as a password. Do not share it with anyone. Do not save it in any unsecure location. Do not save it in a file. Do not commit it in a Git repository.**
### Step 11
Log out of the `MY_GITHUB_BOT_USERNAME` account.
### Step 12
Log in to GitHub as `MY_PERSONAL_GITHUB_USERNAME`.
### Step 13
While logged in as `MY_PERSONAL_GITHUB_USERNAME`, go to the `MY_GITHUB_ORG` organization members page (`https://github.com/orgs/UnofficialJuliaMirror/people`).
Then, add `MY_GITHUB_BOT_USERNAME` as a `member` of the `MY_GITHUB_ORG` organization.
This will allow `MY_GITHUB_BOT_USERNAME` to create new repositories within the `MY_GITHUB_ORG` organization.
### Step 14
While logged in as `MY_PERSONAL_GITHUB_USERNAME`, [fork the MirrorUpdater.jl repository](https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl/fork) to the `MY_GITHUB_ORG` organization.
### Step 15
Go to your fork of MirrorUpdater.jl: `https://github.com/MY_GITHUB_ORG/MirrorUpdater.jl`
### Step 16
In your fork, update lines 1 and 2 of `config/github.jl` to look like:
```julia
const GITHUB_ORGANIZATION = "MY_GITHUB_ORG"
const GITHUB_BOT_USERNAME = "MY_GITHUB_BOT_USERNAME"
```
Leave the rest of `config/github.jl` unchanged. Please do not stored your personal access token in the file.
### Step 17
In your fork, update line 1 of `config/enabled-providers.jl` to look like:
```julia
const GITHUB_ENABLED = true
```
### Step 18 (optional)
If there are other registries of Julia packages that you would like to mirror, add them to the `config/registries.jl` file in your fork:
### Step 19 (optional)
Update the other configuration files in the `config/` folder of your fork as you see fit.
**Congratulations, you have finished this section.**
## Setting up GitLab (optional)
### Step 1
If you do not already have a personal GitLab account, [create one](https://gitlab.com/users/sign_in#register-pane). For the remainder of this README, `MY_PERSONAL_GITLAB_USERNAME` refers to the username of your personal GitLab account.
*For example, for me, `MY_PERSONAL_GITLAB_USERNAME` is equal to `DilumAluthge`.*
### Step 2
Log in to GitLab as `MY_PERSONAL_GITLAB_USERNAME`.
### Step 3
While logged in as `MY_PERSONAL_GITLAB_USERNAME`, [enable two-factor authentication](https://docs.gitlab.com/ce/user/profile/account/two_factor_authentication.html#enabling-2fa) on the `MY_PERSONAL_GITLAB_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 4
While logged in as `MY_PERSONAL_GITLAB_USERNAME`, [create a free GitLab group](https://docs.gitlab.com/ce/user/group/#create-a-new-group) that you will use only for hosting the mirrored repositories. For the remainder of this README, `MY_GITLAB_GROUP` refers to the name of this group. `MY_PERSONAL_GITLAB_USERNAME` should be an `owner` of the `MY_GITLAB_GROUP` group.
*For example, for me, `MY_GITLAB_GROUP` is equal to `UnofficialJuliaMirror`.*
### Step 5
While logged in as `MY_PERSONAL_GITLAB_USERNAME`, go to the `MY_GITLAB_GROUP` group general settings page (`https://gitlab.com/groups/MY_GITLAB_GROUP/-/edit`).
Next, scroll down to the "Permissions, LFS, 2FA" section. Click the "Expand" button next to "Permissions, LFS, 2FA" to expand the section.
Then, make sure that the checkbox next to "Require all users in this group to setup Two-factor authentication" is CHECKED.
Finally, click the "Save changes" button.
### Step 6
Log out of the `MY_PERSONAL_GITLAB_USERNAME` account.
### Step 7
[Create a new GitLab "bot" account](https://gitlab.com/users/sign_in#register-pane) that you will use ONLY for maintaining the mirror. For the remainder of this README, `MY_GITLAB_BOT_USERNAME` refers to the username of this account.
*For example, for me, `MY_GITLAB_BOT_USERNAME` is equal to `UnofficialJuliaMirrorBot`.*
### Step 8
Log in to GitLab as `MY_GITLAB_BOT_USERNAME`.
### Step 9
While logged in as `MY_GITLAB_BOT_USERNAME`, [enable two-factor authentication](https://docs.gitlab.com/ce/user/profile/account/two_factor_authentication.html#enabling-2fa) on the `MY_GITLAB_BOT_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 10
While logged in as `MY_GITLAB_BOT_USERNAME`, [create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#creating-a-personal-access-token) for the `MY_GITLAB_BOT_USERNAME` account and store it in a secure place (such as a password manager). For the remainder of this README, `my-gitlab-bot-personal-access-token` refers to this personal access token.
**The personal access token should be treated as securely as a password. Do not share it with anyone. Do not save it in any unsecure location. Do not save it in a file. Do not commit it in a Git repository.**
### Step 11
Log out of the `MY_GITLAB_BOT_USERNAME` account.
### Step 12
Log in to GitLab as `MY_PERSONAL_GITLAB_USERNAME`.
### Step 13
While logged in as `MY_PERSONAL_GITLAB_USERNAME`, go to the `MY_GITLAB_GROUP` group members page (`https://gitlab.com/groups/MY_GITLAB_GROUP/-/group_members`).
Then, add `MY_GITLAB_BOT_USERNAME` as a `member` of the `MY_GITLAB_GROUP` group.
This will allow `MY_GITLAB_BOT_USERNAME` to create new repositories within the `MY_GITLAB_GROUP` group.
### Step 14
Go to your **GitHub** fork of MirrorUpdater.jl: `https://github.com/MY_GITHUB_ORG/MirrorUpdater.jl`
### Step 15
In your GitHub fork of MirrorUpdater.jl, update lines 1 and 2 of `config/gitlab.jl` to look like:
```julia
const GITLAB_GROUP = "MY_GITLAB_GROUP"
const GITLAB_BOT_USERNAME = "MY_GITLAB_BOT_USERNAME"
```
Leave the rest of `config/gitlab.jl` unchanged. Please do not stored your personal access token in the file.
### Step 16
In your GitHub fork of MirrorUpdater.jl, update line 2 of `config/enabled-providers.jl` to look like:
```julia
const GITLAB_ENABLED = true
```
**Congratulations, you have finished this section.**
## Setting up BitBucket (optional)
### Step 1
If you do not already have a personal Bitbucket account, [create one](https://bitbucket.org/account/signup/). For the remainder of this README, `MY_PERSONAL_BITBUCKET_USERNAME` refers to the username of your personal Bitbucket account.
*For example, for me, `MY_PERSONAL_BITBUCKET_USERNAME` is equal to `DilumAluthge`.*
### Step 2
Log in to Bitbucket as `MY_PERSONAL_BITBUCKET_USERNAME`.
### Step 3
While logged in as `MY_PERSONAL_BITBUCKET_USERNAME`, [enable two-factor authentication](https://confluence.atlassian.com/bitbucket/two-step-verification-777023203.html#Two-stepverification-Enabletwo-stepverification) on the `MY_PERSONAL_BITBUCKET_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 4
While logged in as `MY_PERSONAL_BITBUCKET_USERNAME`, [create a free Bitbucket team](https://confluence.atlassian.com/bitbucket/create-and-administer-your-team-665225537.html) that you will use only for hosting the mirrored repositories. For the remainder of this README, `MY_BITBUCKET_TEAM` refers to the name of this team. `MY_PERSONAL_BITBUCKET_USERNAME` should be an `owner` of the `MY_BITBUCKET_TEAM` team.
*For example, for me, `MY_BITBUCKET_TEAM` is equal to `UnofficialJuliaMirror`.*
### Step 5
While logged in as `MY_PERSONAL_BITBUCKET_USERNAME`, go to the `MY_BITBUCKET_TEAM` projects page (`https://bitbucket.org/MY_BITBUCKET_TEAM/profile/projects`).
Then, create a new project inside the `MY_BITBUCKET_TEAM` team. **Make sure to UNCHECK the box next to "This is a private project." We want this project to be a public project.**
For the remainder of this README, `MY_BITBUCKET_PROJECT` refers to the name of this project.
*For example, for me, `MY_BITBUCKET_PROJECT` is equal to `UnofficialJuliaMirrorProject`.*
### Step 6
Log out of the `MY_PERSONAL_BITBUCKET_USERNAME` account.
### Step 7
[Create a new Bitbucket "bot" account](https://bitbucket.org/account/signup/) that you will use ONLY for maintaining the mirror. For the remainder of this README, `MY_BITBUCKET_BOT_USERNAME` refers to the username of this account.
*For example, for me, `MY_BITBUCKET_BOT_USERNAME` is equal to `UnofficialJuliaMirrorBot`.*
### Step 8
Log in to Bitbucket as `MY_BITBUCKET_BOT_USERNAME`.
### Step 9
While logged in as `MY_BITBUCKET_BOT_USERNAME`, [enable two-factor authentication](https://confluence.atlassian.com/bitbucket/two-step-verification-777023203.html#Two-stepverification-Enabletwo-stepverification) on the `MY_BITBUCKET_BOT_USERNAME` account.
**Make sure to store your two-factor recovery codes in a secure location!**
### Step 10
While logged in as `MY_BITBUCKET_BOT_USERNAME`, [create an app password](https://confluence.atlassian.com/bitbucket/app-passwords-828781300.html#Apppasswords-Createanapppassword) for the `MY_BITBUCKET_BOT_USERNAME` account and store it in a secure place (such as a password manager). For the remainder of this README, `my-bitbucket-bot-app-password` refers to this app password.
**The app password should be treated as securely as any other password. Do not share it with anyone. Do not save it in any unsecure location. Do not save it in a file. Do not commit it in a Git repository.**
### Step 11
Log out of the `MY_BITBUCKET_BOT_USERNAME` account.
### Step 12
Log in to Bitbucket as `MY_PERSONAL_BITBUCKET_USERNAME`.
### Step 13
While logged in as `MY_PERSONAL_BITBUCKET_USERNAME`, go to the `MY_BITBUCKET_TEAM` team members page (`https://bitbucket.org/MY_BITBUCKET_TEAM/profile/members`).
Then, add `MY_BITBUCKET_BOT_USERNAME` as a `member` of the `MY_BITBUCKET_TEAM` team.
This will allow `MY_BITBUCKET_BOT_USERNAME` to create new repositories within the `MY_BITBUCKET_TEAM` team.
### Step 14
Go to your **GitHub** fork of MirrorUpdater.jl: `https://github.com/MY_GITHUB_ORG/MirrorUpdater.jl`
### Step 15
In your GitHub fork of MirrorUpdater.jl, update lines 1 and 2 of `config/bitbucket.jl` to look like:
```julia
const BITBUCKET_TEAM = "MY_BITBUCKET_TEAM"
const BITBUCKET_BOT_USERNAME = "MY_BITBUCKET_BOT_USERNAME"
```
Leave the rest of `config/bitbucket.jl` unchanged. Please do not stored your personal access token in the file.
### Step 16
In your GitHub fork of MirrorUpdater.jl, update line 3 of `config/enabled-providers.jl` to look like:
```julia
const BITBUCKET_ENABLED = true
```
**Congratulations, you have finished this section.**
## Setting up Travis (required)
### Step 1
Log in to GitHub as `MY_PERSONAL_GITHUB_USERNAME`.
### Step 2
Log in to Travis using the GitHub account `MY_PERSONAL_GITHUB_USERNAME`: `https://travis-ci.com/`
### Step 3
Enable Travis for your fork: `https://travis-ci.com/profile/MY_GITHUB_ORG`
### Step 4
Go to the Travis settings page for your fork: `https://travis-ci.com/MY_GITHUB_ORG/MirrorUpdater.jl/settings`
### Step 5
In the "General" section of the Travis settings page, turn ON the switch next to "Limit concurrent jobs". Then, enter `1` in the box to the right.
*This step is important. You must limit the concurrent jobs to 1. If you do not, then you will probably trigger the API rate limits for GitHub, GitLab, and/or Bitbucket, which will cause your Travis jobs to fail.*
### Step 6
In the "Environment Variables" section of the Travis settings page, [add a new environment variable](https://docs.travis-ci.com/user/environment-variables/#defining-variables-in-repository-settings) with name equal to `GITHUB_BOT_PERSONAL_ACCESS_TOKEN` and value equal to `my-github-bot-personal-access-token`. **Make sure that the "Display value in build log" option is turned OFF.**
### Step 7
In the "Cron Jobs" section of the Travis settings page, [create a new cron job for your fork](https://docs.travis-ci.com/user/cron-jobs/#adding-cron-jobs). For "Branch", select `master`. For "Interval", select `weekly`. For "Options", select `Do not run if there has been a build in the last 24h`.
**Congratulations, you have finished this section.**
## Running the updater manually
### Step 1
Download the code from your fork:
```bash
git clone https://github.com/MY_GITHUB_ORG/MirrorUpdater.jl
```
### Step 2
`cd` into the `MirrorUpdater.jl` directory:
```bash
cd MirrorUpdater.jl
```
### Step 3
Install the dependencies of the project:
```bash
julia --project -e 'import Pkg; Pkg.resolve();'
```
### Step 4
Build the project:
```bash
julia --project -e 'import Pkg; Pkg.build("MirrorUpdater");'
```
### Step 5
Run the package tests:
```bash
julia --project -e 'import Pkg; Pkg.test("MirrorUpdater");'
```
### Step 6
Set the appropriate environment variables:
```bash
export GITHUB_BOT_PERSONAL_ACCESS_TOKEN="my-github-bot-personal-access-token"
export GITLAB_BOT_PERSONAL_ACCESS_TOKEN="my-gitlab-bot-personal-access-token"
export BITBUCKET_BOT_APP_PASSWORD="my-bitbucket-bot-app-password"
```
### Step 7
Run the updater:
```bash
julia --project run-github-mirror-updater.jl
```
**Congratulations, you have finished this section.**
## Troubleshooting common issues
| Issue | Solution |
| ----- | -------- |
| You get an error of the form "remote: GitLab: You are not allowed to force push code to a protected branch on this project" when trying to push to a remote of the form `https://MY_GITLAB_BOT_USERNAME:[secure]@gitlab.com/MY_GITLAB_GROUP/EXAMPLE-REPO-NAME` | Go to `https://gitlab.com/MY_GITLAB_GROUP/EXAMPLE-REPO-NAME/settings/repository`, click on the "Expand" button next to "Protected Branches", and unprotect all of the protected branches. |
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | docs | 834 | ---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
**To Reproduce**
Steps to reproduce the behavior:
1. Go to '...'
2. Click on '....'
3. Scroll down to '....'
4. See error
**Expected behavior**
A clear and concise description of what you expected to happen.
**Screenshots**
If applicable, add screenshots to help explain your problem.
**Desktop (please complete the following information):**
- OS: [e.g. iOS]
- Browser [e.g. chrome, safari]
- Version [e.g. 22]
**Smartphone (please complete the following information):**
- Device: [e.g. iPhone6]
- OS: [e.g. iOS8.1]
- Browser [e.g. stock browser, safari]
- Version [e.g. 22]
**Additional context**
Add any other context about the problem here.
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 0.3.0 | 814430aa6b98205cb6367658dcd07c983af5c62b | docs | 595 | ---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
---
**Is your feature request related to a problem? Please describe.**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
**Describe the solution you'd like**
A clear and concise description of what you want to happen.
**Describe alternatives you've considered**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
| MirrorUpdater | https://github.com/UnofficialJuliaMirror/MirrorUpdater.jl.git |
|
[
"MIT"
] | 1.1.0 | 3d2f9447be6ced54399b819fd30a5ee9e018791c | code | 7704 | module OktaJWTVerifier
using Base64, HTTP, JSON, Dates, JWTs, ExpiringCaches, Logging
export Verifier, verify_access_token!, verify_id_token!
const regx = r"[a-zA-Z0-9-_]+\.[a-zA-Z0-9-_]+\.?([a-zA-Z0-9-_]+)[/a-zA-Z0-9-_]+?$"
function jsonparse(bytes)
try
return JSON.parse(String(bytes))
catch
throw(ArgumentError("failed to parse JSON"))
end
end
struct Verifier
issuer::String
claims_to_validate::Dict{String, String}
discovery_well_known_url::String
jwkset::Union{JWKSet, Nothing}
jwkset_cache::ExpiringCaches.Cache{String, Any}
cache::ExpiringCaches.Cache{String, Any}
metadata_cache::ExpiringCaches.Cache{String, Any}
leeway::Int64
timeout::Dates.Minute
cleanup::Dates.Minute
end
"""
Verifier(issuer::String;
claims_to_validate::Dict{String, String} = Dict{String, String}(),
timeout::Dates.Minute = Dates.Minute(5),
discovery_well_known_url::String = "/.well-known/openid-configuration",
cache::Cache = Cache{String, Any}(timeout),
metadata_cache::Cache = Cache{String, Any}(timeout),
leeway::Int64 = 120,
cleanup::Dates.Minute = Dates.Minute(5)
)
Create a new Verifier for the given issuer. The issuer is the full issuer URL of the
Okta org, e.g. https://dev-123456.okta.com/oauth2/default. The issuer is used to fetch the
metadata for the Okta org, which is cached for the duration of the timeout.
Verifier objects can then be used to verify access tokens and id tokens.
See [`verify_access_token!`](@ref) and [`verify_id_token!`](@ref) for more details.
"""
function Verifier(issuer::String;
claims_to_validate::Dict{String, String} = Dict{String, String}(),
timeout::Dates.Minute = Dates.Minute(5),
discovery_well_known_url::String = ".well-known/openid-configuration",
cache::Cache = Cache{String, Any}(timeout),
metadata_cache::Cache = Cache{String, Any}(timeout),
leeway::Int64 = 120,
cleanup::Dates.Minute = Dates.Minute(5)
)
return Verifier(issuer, claims_to_validate, discovery_well_known_url, nothing, Cache{String, Any}(timeout), cache, metadata_cache, leeway, timeout, cleanup)
end
struct Jwt
claims::Dict{String, Any}
end
# http get to metadata url to get the jwks_uri
function fetch_metadata(url::String)
local resp
try
resp = HTTP.get(url)
catch e
@error "failed to fetch metadata" exception=(e, catch_backtrace())
throw(ArgumentError("Request for metadata $url was not HTTP 2xx OK"))
end
return jsonparse(resp.body)
end
function get_metadata(j::Verifier)
metadata_url = joinpath(j.issuer, j.discovery_well_known_url)
return get!(j.metadata_cache, metadata_url) do
fetch_metadata(metadata_url)
end
end
const DOWNLOADER = Ref{Any}(nothing)
function decode(v::Verifier, jwt::String, jwkuri::String)
jwkset = get!(v.jwkset_cache, jwkuri) do
jks = JWKSet(jwkuri)
refresh!(jks; downloader=DOWNLOADER[])
return jks
end
token = JWT(; jwt)
validate!(token, jwkset)
return claims(token)
end
function decode_jwt(j::Verifier, jwt::String)
metadata = get_metadata(j)
jwkuri = get(metadata, "jwks_uri", "")
jwkuri == "" && throw(ArgumentError("failed to decode JWT: missing 'jwks_uri' from metadata"))
return decode(j, jwt, jwkuri)
end
"""
verify_access_token!(j::Verifier, jwt::String)
Verify the given access token using the given Verifier. The Verifier must have been
created with the same issuer as the access token. The access token must be a valid JWT
and must have been issued by the same issuer as the Verifier. The access token must also
be valid according to the claims_to_validate passed to the Verifier constructor.
Returns a Jwt object containing the claims of the access token.
"""
function verify_access_token!(j::Verifier, jwt::String)
is_valid_jwt(jwt) || throw(ArgumentError("token is not valid: $jwt"))
myJwt = Jwt(decode_jwt(j, jwt))
validate_iss!(j, myJwt.claims["iss"])
validate_audience!(j, myJwt.claims["aud"])
haskey(myJwt.claims, "cid") && validate_client_id!(j, myJwt.claims["cid"])
validate_exp!(j, myJwt.claims["exp"])
validate_iat!(j, myJwt.claims["iat"])
return myJwt
end
"""
verify_id_token!(j::Verifier, jwt::String)
Verify the given id token using the given Verifier. The Verifier must have been
created with the same issuer as the id token. The id token must be a valid JWT
and must have been issued by the same issuer as the Verifier. The id token must also
be valid according to the claims_to_validate passed to the Verifier constructor.
Returns a Jwt object containing the claims of the id token.
"""
function verify_id_token!(j::Verifier, jwt::String)
is_valid_jwt(jwt) || throw(ArgumentError("token is not valid: $jwt"))
myJwt = Jwt(decode_jwt(j, jwt))
validate_iss!(j, myJwt.claims["iss"])
validate_audience!(j, myJwt.claims["aud"])
validate_client_id!(j, myJwt.claims["cid"])
validate_exp!(j, myJwt.claims["exp"])
validate_iat!(j, myJwt.claims["iat"])
validate_nonce!(j, myJwt.claims["nonce"])
return myJwt
end
function validate_nonce!(j::Verifier, nonce::String)
if get(j.claims_to_validate, "nonce", "") != nonce
throw(ArgumentError("nonce does not match"))
end
end
function validate_audience!(j::Verifier, aud::Union{String, Vector, Dict})
if aud isa String
aud == j.claims_to_validate["aud"] || throw(ArgumentError("audience does not match"))
elseif aud isa Vector
any(==(j.claims_to_validate["aud"]), aud) || throw(ArgumentError("audience does not match"))
elseif aud isa Dict
any(==(j.claims_to_validate["aud"]), values(aud)) || throw(ArgumentError("audience does not match"))
else
throw(ArgumentError("unknown audience type; unable to validate"))
end
end
function validate_client_id!(j::Verifier, cid::String)
if haskey(j.claims_to_validate, "cid")
v = j.claims_to_validate["cid"]
if v isa String
v == cid || throw(ArgumentError("client id does not match"))
elseif v isa Vector
any(==(cid), v) || throw(ArgumentError("client id does not match"))
else
throw(ArgumentError("unknown client id type"))
end
end
end
function validate_exp!(j::Verifier, exp::Int64)
now = Dates.datetime2unix(Dates.now(Dates.UTC) - Dates.Second(j.leeway))
# if exp is less than [leeway] seconds ago, then it's expired
exp < now && throw(ArgumentError("token is expired"))
end
function validate_iat!(j::Verifier, iat::Int64)
now = Dates.datetime2unix(Dates.now(Dates.UTC) + Dates.Second(j.leeway))
# if iat is greater than [leeway] seconds in the future, then it's invalid
now < iat && throw(ArgumentError("token issued in the future"))
end
function validate_iss!(j::Verifier, iss::String)
if iss != j.issuer
throw(ArgumentError("issuer does not match"))
end
end
function is_valid_jwt(jwt::String)
jwt == "" && throw(ArgumentError("token is empty"))
match(regx, jwt) !== nothing || throw(ArgumentError("token is not valid: $jwt"))
parts = split(jwt, ".")
header = jsonparse(base64decode(padheader(String(parts[1]))))
haskey(header, "alg") || throw(ArgumentError("the tokens header must contain an 'alg'"))
haskey(header, "kid") || throw(ArgumentError("the tokens header must contain a 'kid'"))
header["alg"] == "RS256" || throw(ArgumentError("the tokens alg must be 'RS256'"))
return true
end
function padheader(header::String)::String
i = length(header) % 4
if i != 0
header *= repeat("=", 4 - i)
end
return header
end
end
| OktaJWTVerifier | https://github.com/JuliaServices/OktaJWTVerifier.jl.git |
|
[
"MIT"
] | 1.1.0 | 3d2f9447be6ced54399b819fd30a5ee9e018791c | code | 2825 | using Test, Dates, OktaJWTVerifier
@testset "OktaJWTVerifier" begin
# validate issuer
v = Verifier("https://golang.oktapreview.com")
@test_throws ArgumentError OktaJWTVerifier.validate_iss!(v, "test")
# validate nonce
v = Verifier("https://golang.oktapreview.com"; claims_to_validate=Dict("nonce" => "abc123"))
@test_throws ArgumentError OktaJWTVerifier.validate_nonce!(v, "test")
# validate audience
v = Verifier("https://golang.oktapreview.com"; claims_to_validate=Dict("aud" => "test"))
@test_throws ArgumentError OktaJWTVerifier.validate_audience!(v, "test2")
# validate cid
v = Verifier("https://golang.oktapreview.com"; claims_to_validate=Dict("cid" => "test"))
@test_throws ArgumentError OktaJWTVerifier.validate_client_id!(v, "test2")
# validate iat
v = Verifier("https://golang.oktapreview.com")
iat = round(Int, Dates.datetime2unix(Dates.now(Dates.UTC) + Dates.Day(1)))
@test_throws ArgumentError OktaJWTVerifier.validate_iat!(v, iat)
# iat within leeway doesn't throw
iat = round(Int, Dates.datetime2unix(Dates.now(Dates.UTC)))
@test !OktaJWTVerifier.validate_iat!(v, iat)
# validate exp
exp = round(Int, Dates.datetime2unix(Dates.now(Dates.UTC) - Dates.Day(1)))
@test_throws ArgumentError OktaJWTVerifier.validate_exp!(v, exp)
# exp within leeway doesn't throw
exp = round(Int, Dates.datetime2unix(Dates.now(Dates.UTC)))
@test !OktaJWTVerifier.validate_exp!(v, exp)
# id token tests
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "test")
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "123456789.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "aa.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "ew0KICAia2lkIjogImFiYzEyMyIsDQogICJhbmQiOiAidGhpcyINCn0.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "ew0KICAiYWxnIjogIlJTMjU2IiwNCiAgImFuZCI6ICJ0aGlzIg0KfQ.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_id_token!(v, "ew0KICAia2lkIjogImFiYzEyMyIsDQogICJhbGciOiAiSFMyNTYiDQp9.aa.aa")
# access token tests
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "test")
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "123456789.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "aa.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "ew0KICAia2lkIjogImFiYzEyMyIsDQogICJhbmQiOiAidGhpcyINCn0.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "ew0KICAiYWxnIjogIlJTMjU2IiwNCiAgImFuZCI6ICJ0aGlzIg0KfQ.aa.aa")
@test_throws ArgumentError OktaJWTVerifier.verify_access_token!(v, "ew0KICAia2lkIjogImFiYzEyMyIsDQogICJhbGciOiAiSFMyNTYiDQp9.aa.aa")
end
| OktaJWTVerifier | https://github.com/JuliaServices/OktaJWTVerifier.jl.git |
|
[
"MIT"
] | 1.1.0 | 3d2f9447be6ced54399b819fd30a5ee9e018791c | docs | 834 | # OktaJWTVerifier.jl
Simple package for verifying Okta JWTs written in pure Julia.
GitHub Actions : [](https://github.com/JuliaServices/OktaJWTVerifier.jl/actions?query=workflow%3ACI+branch%3Amain)
[](http://codecov.io/github/JuliaServices/OktaJWTVerifier.jl?branch=main)
## Usage
Usage is simple in that you first create a `Verifier` instance and then use that to verify tokens, access or id.
```julia
using OktaJWTVerifier
v = OktaJWTVerifier.Verifier("https://myoktadomain.okta.com/oauth2/default"; claims_to_validate=Dict("aud" => "myoktaaudience"))
verify_access_token!(v, "myoktaaccesstoken")
verify_id_token!(v, "myoktaidtoken")
```
| OktaJWTVerifier | https://github.com/JuliaServices/OktaJWTVerifier.jl.git |
|
[
"MIT"
] | 0.2.2 | 1fbf225c0a1c2afa9a0b06ddbc11d89a37dbcc47 | code | 17031 | ### A Pluto.jl notebook ###
# v0.18.2
using Markdown
using InteractiveUtils
# βββ‘ 1d24602c-a463-11ec-1b01-4b6d9133a279
using PlutoUI, Dates, SQLiteGraph, CSV, DataFrames, EasyConfig
# βββ‘ 72412469-67d7-42e3-a7b2-ce05e860e302
md"""
# `reddit_notebook.jl`
- This notebook analyzes the data at [https://snap.stanford.edu/data/soc-RedditHyperlinks.html](https://snap.stanford.edu/data/soc-RedditHyperlinks.html)
"""
# βββ‘ a78fca5f-fa6a-4929-8861-be95c13492fe
PlutoUI.TableOfContents()
# βββ‘ a5e78f84-b1b4-4e43-afb3-29cd5ec0e8ff
md"## Load Packages"
# βββ‘ a51e7694-8209-44e9-bff3-ed2fb607c069
md"## Load Data"
# βββ‘ 439c9e48-f1b1-4d3f-b1ae-e2b8bbe13c84
url = "https://snap.stanford.edu/data/soc-redditHyperlinks-body.tsv"
# βββ‘ 678cae6f-d2e8-4d20-85b3-0d5080b07a62
path = joinpath(@__DIR__, "reddit.tsv")
# βββ‘ b38fb416-419d-40e0-9cb1-d16106969d07
rows = CSV.Rows(path, delim='\t');
# βββ‘ cf6d196e-9f7e-444b-a88c-78e32bf88fba
first(rows).PROPERTIES
# βββ‘ be695499-5529-4029-a326-26a9391b2069
function parse_properties(props)
data = split(props, ',')
keys = [
"Number of characters"
"Number of characters without counting white space"
"Fraction of alphabetical characters"
"Fraction of digits"
"Fraction of uppercase characters"
"Fraction of white spaces"
"Fraction of special characters, such as comma, exclamation mark, etc."
"Number of words"
"Number of unique works"
"Number of long words (at least 6 characters)"
"Average word length"
"Number of unique stopwords"
"Fraction of stopwords"
"Number of sentences"
"Number of long sentences (at least 10 words)"
"Average number of characters per sentence"
"Average number of words per sentence"
"Automated readability index"
"Positive sentiment calculated by VADER"
"Negative sentiment calculated by VADER"
"Compound sentiment calculated by VADER"
]
for i in 1:length(keys)
keys[i] = replace(keys[i], " " => "_")
end
Config(key => parse(Float64, x) for (key,x) in zip(keys, data))
end
# βββ‘ a291bf9e-48b2-4673-9c0f-1df440ff2db3
df = let
df = DataFrame(rows)
df.TIMESTAMP = DateTime.(df.TIMESTAMP, dateformat"yyyy-mm-dd H:M:S")
df.LINK_SENTIMENT = parse.(Int, df.LINK_SENTIMENT)
df.PROPERTIES = parse_properties.(df.PROPERTIES)
df
end
# βββ‘ d99040c0-eb84-4b98-b4c7-971a3681e0a7
subreddits = sort!(unique!(vcat(df.SOURCE_SUBREDDIT, df.TARGET_SUBREDDIT)))
# βββ‘ 8e6df98b-22ce-4c1e-bf45-5e154725586a
gb = groupby(df, [:SOURCE_SUBREDDIT, :TARGET_SUBREDDIT]);
# βββ‘ 0763d60f-82e9-44af-a26b-931e6749d4af
df2 = combine(gb, :PROPERTIES => length => "N_LINKS")
# βββ‘ 5c222537-f561-4b47-acf5-6731a05d16a9
md"## Make Graph"
# βββ‘ 30450ece-69b6-45d4-b9ee-8b453cb0f93f
db = let
db = DB()
for (i,sub) in enumerate(subreddits)
insert!(db, Node(i, "Subreddit"; name=sub))
end
for row in eachrow(df2)
i = findfirst(==(row.SOURCE_SUBREDDIT), subreddits)
j = findfirst(==(row.TARGET_SUBREDDIT), subreddits)
insert!(db, Edge(i,j,"Has_Link", nlinks=row.N_LINKS))
end
db
end
# βββ‘ 69819a25-1759-4001-a960-44f0dcf88ed2
db[200]
# βββ‘ 00000000-0000-0000-0000-000000000001
PLUTO_PROJECT_TOML_CONTENTS = """
[deps]
CSV = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
Dates = "ade2ca70-3891-5945-98fb-dc099432e06a"
EasyConfig = "acab07b0-f158-46d4-8913-50acef6d41fe"
PlutoUI = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
SQLiteGraph = "de2defd0-d76e-464a-9029-1d71e199ae58"
[compat]
CSV = "~0.10.3"
DataFrames = "~1.3.2"
EasyConfig = "~0.1.10"
PlutoUI = "~0.7.37"
SQLiteGraph = "~0.2.0"
"""
# βββ‘ 00000000-0000-0000-0000-000000000002
PLUTO_MANIFEST_TOML_CONTENTS = """
# This file is machine-generated - editing it directly is not advised
julia_version = "1.7.2"
manifest_format = "2.0"
[[deps.AbstractPlutoDingetjes]]
deps = ["Pkg"]
git-tree-sha1 = "8eaf9f1b4921132a4cff3f36a1d9ba923b14a481"
uuid = "6e696c72-6542-2067-7265-42206c756150"
version = "1.1.4"
[[deps.ArgTools]]
uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f"
[[deps.Artifacts]]
uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
[[deps.Base64]]
uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f"
[[deps.BinaryProvider]]
deps = ["Libdl", "Logging", "SHA"]
git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058"
uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232"
version = "0.5.10"
[[deps.CSV]]
deps = ["CodecZlib", "Dates", "FilePathsBase", "InlineStrings", "Mmap", "Parsers", "PooledArrays", "SentinelArrays", "Tables", "Unicode", "WeakRefStrings"]
git-tree-sha1 = "9310d9495c1eb2e4fa1955dd478660e2ecab1fbb"
uuid = "336ed68f-0bac-5ca0-87d4-7b16caf5d00b"
version = "0.10.3"
[[deps.CodecZlib]]
deps = ["TranscodingStreams", "Zlib_jll"]
git-tree-sha1 = "ded953804d019afa9a3f98981d99b33e3db7b6da"
uuid = "944b1d66-785c-5afd-91f1-9de20f533193"
version = "0.7.0"
[[deps.ColorTypes]]
deps = ["FixedPointNumbers", "Random"]
git-tree-sha1 = "024fe24d83e4a5bf5fc80501a314ce0d1aa35597"
uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f"
version = "0.11.0"
[[deps.Compat]]
deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"]
git-tree-sha1 = "96b0bc6c52df76506efc8a441c6cf1adcb1babc4"
uuid = "34da2185-b29b-5c13-b0c7-acf172513d20"
version = "3.42.0"
[[deps.CompilerSupportLibraries_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae"
[[deps.Crayons]]
git-tree-sha1 = "249fe38abf76d48563e2f4556bebd215aa317e15"
uuid = "a8cc5b0e-0ffa-5ad4-8c14-923d3ee1735f"
version = "4.1.1"
[[deps.DBInterface]]
git-tree-sha1 = "9b0dc525a052b9269ccc5f7f04d5b3639c65bca5"
uuid = "a10d1c49-ce27-4219-8d33-6db1a4562965"
version = "2.5.0"
[[deps.DataAPI]]
git-tree-sha1 = "cc70b17275652eb47bc9e5f81635981f13cea5c8"
uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a"
version = "1.9.0"
[[deps.DataFrames]]
deps = ["Compat", "DataAPI", "Future", "InvertedIndices", "IteratorInterfaceExtensions", "LinearAlgebra", "Markdown", "Missings", "PooledArrays", "PrettyTables", "Printf", "REPL", "Reexport", "SortingAlgorithms", "Statistics", "TableTraits", "Tables", "Unicode"]
git-tree-sha1 = "ae02104e835f219b8930c7664b8012c93475c340"
uuid = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0"
version = "1.3.2"
[[deps.DataStructures]]
deps = ["Compat", "InteractiveUtils", "OrderedCollections"]
git-tree-sha1 = "3daef5523dd2e769dad2365274f760ff5f282c7d"
uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8"
version = "0.18.11"
[[deps.DataValueInterfaces]]
git-tree-sha1 = "bfc1187b79289637fa0ef6d4436ebdfe6905cbd6"
uuid = "e2d170a0-9d28-54be-80f0-106bbe20a464"
version = "1.0.0"
[[deps.Dates]]
deps = ["Printf"]
uuid = "ade2ca70-3891-5945-98fb-dc099432e06a"
[[deps.DelimitedFiles]]
deps = ["Mmap"]
uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab"
[[deps.Distributed]]
deps = ["Random", "Serialization", "Sockets"]
uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b"
[[deps.Downloads]]
deps = ["ArgTools", "LibCURL", "NetworkOptions"]
uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6"
[[deps.EasyConfig]]
deps = ["JSON3", "OrderedCollections", "StructTypes"]
git-tree-sha1 = "c070b3c48a8ba3c6e6507997f0a7f5ebf85c3600"
uuid = "acab07b0-f158-46d4-8913-50acef6d41fe"
version = "0.1.10"
[[deps.FilePathsBase]]
deps = ["Compat", "Dates", "Mmap", "Printf", "Test", "UUIDs"]
git-tree-sha1 = "04d13bfa8ef11720c24e4d840c0033d145537df7"
uuid = "48062228-2e41-5def-b9a4-89aafe57970f"
version = "0.9.17"
[[deps.FixedPointNumbers]]
deps = ["Statistics"]
git-tree-sha1 = "335bfdceacc84c5cdf16aadc768aa5ddfc5383cc"
uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93"
version = "0.8.4"
[[deps.Formatting]]
deps = ["Printf"]
git-tree-sha1 = "8339d61043228fdd3eb658d86c926cb282ae72a8"
uuid = "59287772-0a20-5a39-b81b-1366585eb4c0"
version = "0.4.2"
[[deps.Future]]
deps = ["Random"]
uuid = "9fa8497b-333b-5362-9e8d-4d0656e87820"
[[deps.Hyperscript]]
deps = ["Test"]
git-tree-sha1 = "8d511d5b81240fc8e6802386302675bdf47737b9"
uuid = "47d2ed2b-36de-50cf-bf87-49c2cf4b8b91"
version = "0.0.4"
[[deps.HypertextLiteral]]
git-tree-sha1 = "2b078b5a615c6c0396c77810d92ee8c6f470d238"
uuid = "ac1192a8-f4b3-4bfe-ba22-af5b92cd3ab2"
version = "0.9.3"
[[deps.IOCapture]]
deps = ["Logging", "Random"]
git-tree-sha1 = "f7be53659ab06ddc986428d3a9dcc95f6fa6705a"
uuid = "b5f81e59-6552-4d32-b1f0-c071b021bf89"
version = "0.2.2"
[[deps.InlineStrings]]
deps = ["Parsers"]
git-tree-sha1 = "61feba885fac3a407465726d0c330b3055df897f"
uuid = "842dd82b-1e85-43dc-bf29-5d0ee9dffc48"
version = "1.1.2"
[[deps.InteractiveUtils]]
deps = ["Markdown"]
uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240"
[[deps.InvertedIndices]]
git-tree-sha1 = "bee5f1ef5bf65df56bdd2e40447590b272a5471f"
uuid = "41ab1584-1d38-5bbf-9106-f11c6c58b48f"
version = "1.1.0"
[[deps.IteratorInterfaceExtensions]]
git-tree-sha1 = "a3f24677c21f5bbe9d2a714f95dcd58337fb2856"
uuid = "82899510-4779-5014-852e-03e436cf321d"
version = "1.0.0"
[[deps.JLLWrappers]]
deps = ["Preferences"]
git-tree-sha1 = "abc9885a7ca2052a736a600f7fa66209f96506e1"
uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
version = "1.4.1"
[[deps.JSON]]
deps = ["Dates", "Mmap", "Parsers", "Unicode"]
git-tree-sha1 = "3c837543ddb02250ef42f4738347454f95079d4e"
uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6"
version = "0.21.3"
[[deps.JSON3]]
deps = ["Dates", "Mmap", "Parsers", "StructTypes", "UUIDs"]
git-tree-sha1 = "8c1f668b24d999fb47baf80436194fdccec65ad2"
uuid = "0f8b85d8-7281-11e9-16c2-39a750bddbf1"
version = "1.9.4"
[[deps.LibCURL]]
deps = ["LibCURL_jll", "MozillaCACerts_jll"]
uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21"
[[deps.LibCURL_jll]]
deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"]
uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0"
[[deps.LibGit2]]
deps = ["Base64", "NetworkOptions", "Printf", "SHA"]
uuid = "76f85450-5226-5b5a-8eaa-529ad045b433"
[[deps.LibSSH2_jll]]
deps = ["Artifacts", "Libdl", "MbedTLS_jll"]
uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8"
[[deps.Libdl]]
uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
[[deps.LinearAlgebra]]
deps = ["Libdl", "libblastrampoline_jll"]
uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
[[deps.Logging]]
uuid = "56ddb016-857b-54e1-b83d-db4d58db5568"
[[deps.Markdown]]
deps = ["Base64"]
uuid = "d6f4376e-aef5-505a-96c1-9c027394607a"
[[deps.MbedTLS_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1"
[[deps.Missings]]
deps = ["DataAPI"]
git-tree-sha1 = "bf210ce90b6c9eed32d25dbcae1ebc565df2687f"
uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28"
version = "1.0.2"
[[deps.Mmap]]
uuid = "a63ad114-7e13-5084-954f-fe012c677804"
[[deps.MozillaCACerts_jll]]
uuid = "14a3606d-f60d-562e-9121-12d972cd8159"
[[deps.NetworkOptions]]
uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908"
[[deps.OpenBLAS_jll]]
deps = ["Artifacts", "CompilerSupportLibraries_jll", "Libdl"]
uuid = "4536629a-c528-5b80-bd46-f80d51c5b363"
[[deps.OrderedCollections]]
git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c"
uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d"
version = "1.4.1"
[[deps.Parsers]]
deps = ["Dates"]
git-tree-sha1 = "85b5da0fa43588c75bb1ff986493443f821c70b7"
uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0"
version = "2.2.3"
[[deps.Pkg]]
deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"]
uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
[[deps.PlutoUI]]
deps = ["AbstractPlutoDingetjes", "Base64", "ColorTypes", "Dates", "Hyperscript", "HypertextLiteral", "IOCapture", "InteractiveUtils", "JSON", "Logging", "Markdown", "Random", "Reexport", "UUIDs"]
git-tree-sha1 = "bf0a1121af131d9974241ba53f601211e9303a9e"
uuid = "7f904dfe-b85e-4ff6-b463-dae2292396a8"
version = "0.7.37"
[[deps.PooledArrays]]
deps = ["DataAPI", "Future"]
git-tree-sha1 = "db3a23166af8aebf4db5ef87ac5b00d36eb771e2"
uuid = "2dfb63ee-cc39-5dd5-95bd-886bf059d720"
version = "1.4.0"
[[deps.Preferences]]
deps = ["TOML"]
git-tree-sha1 = "d3538e7f8a790dc8903519090857ef8e1283eecd"
uuid = "21216c6a-2e73-6563-6e65-726566657250"
version = "1.2.5"
[[deps.PrettyTables]]
deps = ["Crayons", "Formatting", "Markdown", "Reexport", "Tables"]
git-tree-sha1 = "dfb54c4e414caa595a1f2ed759b160f5a3ddcba5"
uuid = "08abe8d2-0d0c-5749-adfa-8a2ac140af0d"
version = "1.3.1"
[[deps.Printf]]
deps = ["Unicode"]
uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7"
[[deps.REPL]]
deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"]
uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb"
[[deps.Random]]
deps = ["SHA", "Serialization"]
uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
[[deps.Reexport]]
git-tree-sha1 = "45e428421666073eab6f2da5c9d310d99bb12f9b"
uuid = "189a3867-3050-52da-a836-e630ba90ab69"
version = "1.2.2"
[[deps.SHA]]
uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce"
[[deps.SQLite]]
deps = ["BinaryProvider", "DBInterface", "Dates", "Libdl", "Random", "SQLite_jll", "Serialization", "Tables", "Test", "WeakRefStrings"]
git-tree-sha1 = "8e14d9b200b975e93a0ae0e5d17dea1c262690ee"
uuid = "0aa819cd-b072-5ff4-a722-6bc24af294d9"
version = "1.4.0"
[[deps.SQLiteGraph]]
deps = ["DBInterface", "DataFrames", "EasyConfig", "JSON3", "SQLite"]
git-tree-sha1 = "004f75e30ea2b98d6ab487a758816ddae8c96cd1"
uuid = "de2defd0-d76e-464a-9029-1d71e199ae58"
version = "0.2.0"
[[deps.SQLite_jll]]
deps = ["Artifacts", "JLLWrappers", "Libdl", "Pkg", "Zlib_jll"]
git-tree-sha1 = "f79c1c58951ea4f5bb63bb96b99bf7f440a3f774"
uuid = "76ed43ae-9a5d-5a62-8c75-30186b810ce8"
version = "3.38.0+0"
[[deps.SentinelArrays]]
deps = ["Dates", "Random"]
git-tree-sha1 = "6a2f7d70512d205ca8c7ee31bfa9f142fe74310c"
uuid = "91c51154-3ec4-41a3-a24f-3f23e20d615c"
version = "1.3.12"
[[deps.Serialization]]
uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b"
[[deps.SharedArrays]]
deps = ["Distributed", "Mmap", "Random", "Serialization"]
uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383"
[[deps.Sockets]]
uuid = "6462fe0b-24de-5631-8697-dd941f90decc"
[[deps.SortingAlgorithms]]
deps = ["DataStructures"]
git-tree-sha1 = "b3363d7460f7d098ca0912c69b082f75625d7508"
uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c"
version = "1.0.1"
[[deps.SparseArrays]]
deps = ["LinearAlgebra", "Random"]
uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
[[deps.Statistics]]
deps = ["LinearAlgebra", "SparseArrays"]
uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
[[deps.StructTypes]]
deps = ["Dates", "UUIDs"]
git-tree-sha1 = "d24a825a95a6d98c385001212dc9020d609f2d4f"
uuid = "856f2bd8-1eba-4b0a-8007-ebc267875bd4"
version = "1.8.1"
[[deps.TOML]]
deps = ["Dates"]
uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
[[deps.TableTraits]]
deps = ["IteratorInterfaceExtensions"]
git-tree-sha1 = "c06b2f539df1c6efa794486abfb6ed2022561a39"
uuid = "3783bdb8-4a98-5b6b-af9a-565f29a5fe9c"
version = "1.0.1"
[[deps.Tables]]
deps = ["DataAPI", "DataValueInterfaces", "IteratorInterfaceExtensions", "LinearAlgebra", "OrderedCollections", "TableTraits", "Test"]
git-tree-sha1 = "5ce79ce186cc678bbb5c5681ca3379d1ddae11a1"
uuid = "bd369af6-aec1-5ad0-b16a-f7cc5008161c"
version = "1.7.0"
[[deps.Tar]]
deps = ["ArgTools", "SHA"]
uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e"
[[deps.Test]]
deps = ["InteractiveUtils", "Logging", "Random", "Serialization"]
uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
[[deps.TranscodingStreams]]
deps = ["Random", "Test"]
git-tree-sha1 = "216b95ea110b5972db65aa90f88d8d89dcb8851c"
uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa"
version = "0.9.6"
[[deps.UUIDs]]
deps = ["Random", "SHA"]
uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4"
[[deps.Unicode]]
uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5"
[[deps.WeakRefStrings]]
deps = ["DataAPI", "InlineStrings", "Parsers"]
git-tree-sha1 = "b1be2855ed9ed8eac54e5caff2afcdb442d52c23"
uuid = "ea10d353-3f73-51f8-a26c-33c1cb351aa5"
version = "1.4.2"
[[deps.Zlib_jll]]
deps = ["Libdl"]
uuid = "83775a58-1f1d-513f-b197-d71354ab007a"
[[deps.libblastrampoline_jll]]
deps = ["Artifacts", "Libdl", "OpenBLAS_jll"]
uuid = "8e850b90-86db-534c-a0d3-1478176c7d93"
[[deps.nghttp2_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d"
[[deps.p7zip_jll]]
deps = ["Artifacts", "Libdl"]
uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0"
"""
# βββ‘ Cell order:
# ββ72412469-67d7-42e3-a7b2-ce05e860e302
# ββa78fca5f-fa6a-4929-8861-be95c13492fe
# ββa5e78f84-b1b4-4e43-afb3-29cd5ec0e8ff
# β β1d24602c-a463-11ec-1b01-4b6d9133a279
# ββa51e7694-8209-44e9-bff3-ed2fb607c069
# β β439c9e48-f1b1-4d3f-b1ae-e2b8bbe13c84
# β β678cae6f-d2e8-4d20-85b3-0d5080b07a62
# β βb38fb416-419d-40e0-9cb1-d16106969d07
# β βcf6d196e-9f7e-444b-a88c-78e32bf88fba
# β βbe695499-5529-4029-a326-26a9391b2069
# β βa291bf9e-48b2-4673-9c0f-1df440ff2db3
# β βd99040c0-eb84-4b98-b4c7-971a3681e0a7
# β β8e6df98b-22ce-4c1e-bf45-5e154725586a
# β β0763d60f-82e9-44af-a26b-931e6749d4af
# ββ5c222537-f561-4b47-acf5-6731a05d16a9
# β β30450ece-69b6-45d4-b9ee-8b453cb0f93f
# β β69819a25-1759-4001-a960-44f0dcf88ed2
# ββ00000000-0000-0000-0000-000000000001
# ββ00000000-0000-0000-0000-000000000002
| SQLiteGraph | https://github.com/JuliaComputing/SQLiteGraph.jl.git |
|
[
"MIT"
] | 0.2.2 | 1fbf225c0a1c2afa9a0b06ddbc11d89a37dbcc47 | code | 7202 | module SQLiteGraph
using SQLite: SQLite
import DBInterface: execute
using JSON3: JSON3
using EasyConfig
export DB, Node, Edge
#-----------------------------------------------------------------------------# utils
function single_result_execute(db, stmt, args...)
ex = execute(db, stmt, args...)
isempty(ex) ? nothing : first(first(ex))
end
function print_props(io::IO, o::Config)
for (i,(k,v)) in enumerate(pairs(o))
if i < 5
print(io, k, '=', repr(v))
i == length(o) || print(io, ", ")
end
end
length(o) > 5 && print(io, "β¦")
end
#-----------------------------------------------------------------------------# Model
struct Node
id::Int
labels::Vector{String}
props::Config
end
Node(id::Int, labels::String...; props...) = Node(id, collect(labels), Config(props))
Node(row::SQLite.Row) = Node(row.id, split(row.labels, ';', keepempty=false), JSON3.read(row.props, Config))
function Base.show(io::IO, o::Node)
print(io, "Node($(o.id)")
!isempty(o.labels) && print(io, ", ", join(repr.(o.labels), ", "))
!isempty(o.props) && print(io, "; "); print_props(io, o.props)
print(io, ')')
end
args(n::Node) = (n.id, isempty(n.labels) ? "" : join(n.labels, ';'), JSON3.write(n.props))
struct Edge
source::Int
target::Int
type::String
props::Config
end
Edge(src::Int, tgt::Int, type::String; props...) = Edge(src, tgt, type, Config(props))
Edge(row::SQLite.Row) = Edge(row.source, row.target, row.type, JSON3.read(row.props, Config))
function Base.show(io::IO, o::Edge)
print(io, "Edge($(o.source), $(o.target), ", repr(o.type))
!isempty(o.props) && print(io, "; "); print_props(io, o.props)
print(io, ')')
end
args(e::Edge) = (e.source, e.target, e.type, JSON3.write(e.props))
#-----------------------------------------------------------------------------# Base methods
Base.:(==)(a::Node, b::Node) = all(getfield(a,f) == getfield(b,f) for f in fieldnames(Node))
Base.:(==)(a::Edge, b::Edge) = all(getfield(a,f) == getfield(b,f) for f in fieldnames(Edge))
Base.pairs(o::T) where {T<: Union{Node, Edge}} = (f => getfield(o,f) for f in fieldnames(T))
Base.NamedTuple(o::Union{Node,Edge}) = NamedTuple(pairs(o))
#-----------------------------------------------------------------------------# DB
struct DB
sqlitedb::SQLite.DB
function DB(file::String = ":memory:")
db = SQLite.DB(file)
foreach(x -> execute(db, x), [
"PRAGMA foreign_keys = ON;",
# nodes
"CREATE TABLE IF NOT EXISTS nodes (
id INTEGER NOT NULL UNIQUE PRIMARY KEY,
labels TEXT NOT NULL,
props TEXT NOT NULL
);",
# edges
"CREATE TABLE IF NOT EXISTS edges (
source INTEGER NOT NULL REFERENCES nodes(id),
target INTEGER NOT NULL REFERENCES nodes(id),
type TEXT NOT NULL,
props TEXT NOT NULL,
PRIMARY KEY (source, target, type)
);",
"CREATE INDEX IF NOT EXISTS source_idx ON edges(source);",
"CREATE INDEX IF NOT EXISTS target_idx ON edges(target);",
"CREATE INDEX IF NOT EXISTS type_idx ON edges(type);",
])
new(db)
end
end
function Base.show(io::IO, db::DB)
print(io, "SQLiteGraph.DB(\"$(db.sqlitedb.file)\") ($(n_nodes(db)) nodes, $(n_edges(db)) edges)")
end
execute(db::DB, args...; kw...) = execute(db.sqlitedb, args...; kw...)
n_nodes(db::DB) = single_result_execute(db, "SELECT Count(*) FROM nodes")
n_edges(db::DB) = single_result_execute(db, "SELECT Count(*) FROM edges")
Base.length(db::DB) = n_nodes(db)
Base.size(db::DB) = (nodes=n_nodes(db), edges=n_edges(db))
Base.lastindex(db::DB) = length(db)
Base.axes(db::DB, i) = size(db)[i]
Broadcast.broadcastable(db::DB) = Ref(db)
#-----------------------------------------------------------------------------# insert!
function Base.insert!(db::DB, node::Node)
execute(db, "INSERT INTO nodes VALUES(?, ?, json(?))", args(node))
db
end
function Base.insert!(db::DB, edge::Edge)
execute(db, "INSERT INTO edges VALUES(?, ?, ?, json(?))", args(edge))
db
end
#-----------------------------------------------------------------------------# replace!
function Base.replace!(db::DB, node::Node)
execute(db, "INSERT INTO nodes VALUES(?, ?, json(?)) ON CONFLICT(id) DO UPDATE SET labels=excluded.labels, props=excluded.props", args(node))
db
end
function Base.replace!(db::DB, edge::Edge)
execute(db, "INSERT INTO edges VALUES(?, ?, ?, json(?)) ON CONFLICT(source,target,type) DO UPDATE SET props=excluded.props", args(edge))
db
end
#-----------------------------------------------------------------------------# query
function query(db::DB, select::String, from::String, whr::String, args=nothing)
stmt = "SELECT $select FROM $from WHERE $whr"
# @info stmt
res = isnothing(args) ? execute(db, stmt) : execute(db, stmt, args)
if isempty(res)
error("No $from found where: $whr")
else
return res
end
end
#-----------------------------------------------------------------------------# getindex (Node)
Base.getindex(db::DB, i::Integer) = Node(first(query(db, "*", "nodes", "id=$i")))
Base.getindex(db::DB, ::Colon) = (Node(row) for row in query(db, "*", "nodes", "TRUE"))
#-----------------------------------------------------------------------------# getindex (Edge)
# all specified
function Base.getindex(db::DB, i::Integer, j::Integer, type::AbstractString)
Edge(first(query(db, "*", "edges", "source=$i AND target=$j AND type LIKE '$type'")))
end
# one colon
function Base.getindex(db::DB, i::Integer, j::Integer, ::Colon)
(Edge(row) for row in query(db, "*", "edges", "source=$i AND target=$j"))
end
function Base.getindex(db::DB, i::Integer, ::Colon, type::AbstractString)
(Edge(row) for row in query(db, "*", "edges", "source=$i AND type LIKE '$type'"))
end
function Base.getindex(db::DB, ::Colon, j::Integer, type::AbstractString)
(Edge(row) for row in query(db, "*", "edges", "target=$j AND type LIKE '$type'"))
end
# two colons
function Base.getindex(db::DB, i::Integer, ::Colon, ::Colon)
(Edge(row) for row in query(db, "*", "edges", "source=$i"))
end
function Base.getindex(db::DB, i::Colon, j::Integer, ::Colon)
(Edge(row) for row in query(db, "*", "edges", "target=$j"))
end
function Base.getindex(db::DB, ::Colon, ::Colon, type::AbstractString)
(Edge(row) for row in query(db, "*", "edges", "type LIKE '$type'"))
end
# all colons
Base.getindex(db::DB, ::Colon, ::Colon, ::Colon) = (Edge(row) for row in query(db,"*", "edges", "TRUE"))
#-----------------------------------------------------------------------------# adjacency_matrix
"""
adjacency_matrix(db, type)
Create the adjacency matrix for a given edge `type`. If `A[i,j] == true`, there exists an
edge from node `i` to node `j` with type `type`.
"""
function adjacency_matrix(db::DB, type)
n = n_nodes(db)
out = falses(n, n)
for row in execute(db, "SELECT DISTINCT source, target FROM edges WHERE type=?;", (type,))
out[row.source, row.target] = true
end
out
end
end
| SQLiteGraph | https://github.com/JuliaComputing/SQLiteGraph.jl.git |
|
[
"MIT"
] | 0.2.2 | 1fbf225c0a1c2afa9a0b06ddbc11d89a37dbcc47 | code | 2207 | using SQLiteGraph
using Test
using JSON3
#-----------------------------------------------------------------------------# setup
db = DB()
#-----------------------------------------------------------------------------# Nodes
@testset "Nodes" begin
@testset "Round Trips" begin
for n in [
Node(1),
Node(2, "lab"),
Node(3, "lab1", "lab2"),
Node(4, "lab"; x=1),
Node(5, "lab1", "lab2"; x=1, y=2)
]
insert!(db, n)
@test db[n.id] == n
@test_throws Exception insert!(db, n)
end
end
@testset "replace!" begin
replace!(db, Node(1, "lab"))
@test db[1].labels == ["lab"]
end
@testset "simple query" begin
q = db[:]
for (i,n) in enumerate(q)
@test n.id == i
end
@test length(collect(db[:])) == 5
end
end
#-----------------------------------------------------------------------------# Edges
@testset "Edges" begin
@testset "Round Trips" begin
for e in [
Edge(1,2,"type"),
Edge(1,3,"type"; x=1),
Edge(1,4,"type 2"; x=1,y=2,z=3)
]
insert!(db, e)
@test db[e.source, e.target, e.type] == e
@test_throws Exception insert!(db, e)
end
end
@testset "replace!" begin
replace!(db, Edge(1,2,"type"; x=1))
@test db[1,2,"type"].props.x == 1
end
@testset "simple query" begin
@test db[1,2,"type"] == Edge(1,2,"type"; x=1)
@test length(collect(db[1,2,:])) == 1
@test length(collect(db[1,:,"type"])) == 2
@test length(collect(db[:,2,"type"])) == 1
@test length(collect(db[:,:,"type"])) == 2
@test length(collect(db[:, 4, :])) == 1
@test length(collect(db[:,:,"type"])) == 2
@test length(collect(db[:,:,:])) == 3
end
@testset "Adj Matrix" begin
m = SQLiteGraph.adjacency_matrix(db, "type")
@test sum(m) == 2
@test m[1, 2]
@test m[1, 3]
m2 = SQLiteGraph.adjacency_matrix(db, "type 2")
@test sum(m2) == 1
@test m2[1, 4]
end
end
| SQLiteGraph | https://github.com/JuliaComputing/SQLiteGraph.jl.git |
|
[
"MIT"
] | 0.2.2 | 1fbf225c0a1c2afa9a0b06ddbc11d89a37dbcc47 | docs | 3121 | [](https://github.com/joshday/SQLiteGraph.jl/actions?query=workflow%3ACI+branch%3Amain)
[](https://codecov.io/gh/joshday/SQLiteGraph.jl)
<h1 align="center">SQLiteGraph</h1>
A Graph Database for Julia, built on top of [SQLite.jl](https://github.com/JuliaDatabases/SQLite.jl).
<br><br>
## Definitions
SQLiteGraph.jl uses the [Property Graph Model of the Cypher Query Language (PDF)](https://s3.amazonaws.com/artifacts.opencypher.org/openCypher9.pdf).
- A **_Node_** describes a discrete object in a domain.
- Nodes can have 0+ **_labels_** that classify what kind of node they are.
- An **_Edge_** describes a directional relationship between nodes.
- An edge must have a **_type_** that classifies the relationship.
- Both edges and nodes can have additional key-value **_properties_** that provide further information.
<br><br>
## Edges and Nodes
- Nodes and Edges have a simple representation:
```julia
struct Node
id::Int
labels::Vector{String}
props::EasyConfig.Config
end
struct Edge
source::Int
target::Int
type::String
props::EasyConfig.Config
end
```
- With simple constructors:
```julia
Node(id, labels...; props...)
Edge(source_id, target_id, type; props...)
```
<br><br>
## Adding Elements to the Graph
```julia
using SQLiteGraph
db = DB()
insert!(db, Node(1, "Person", "Actor"; name="Tom Hanks"))
insert!(db, Node(2, "Movie"; title="Forest Gump"))
insert!(db, Edge(1, 2, "Acts In"; awards=["Best Actor in a Leading Role"]))
```
<br><br>
## Editing Elements
`insert!` will not replace an existing node or edge. Instead, use `replace!`.
```julia
replace!(db, Node(2, "Movie"; title="Forest Gump", genre="Drama"))
```
<br><br>
## Simple Queries
- Use `getindex` to access elements.
- If `:` is used as an index, an iterator is returned.
```julia
db[1] # Node(2, "Movie"; title="Forest Gump", genre="Drama")
for node in db[:]
println(node)
end
# (Pretend the graph is populated with many more items. The following return iterators.)
db[1, :, "Acts In"] # All movies that Tom Hanks acts in
db[:, 2, "Acts In"] # All actors in "Forest Gump"
db[1, 2, :] # All relationships between "Tom Hanks" and "Forest Gump"
db[:, :, :] # All edges
```
<br><br>
## β¨ Attribution β¨
SQLiteGraph is **STRONGLY** influenced by [https://github.com/dpapathanasiou/simple-graph](https://github.com/dpapathanasiou/simple-graph).
<br><br>
## Under the Hood Details
- Nodes and edges are saved in the `nodes` and `edges` tables, respectively.
- `nodes`
- `id` (`INTEGER`): unique identifier of a node
- `labels` (`TEXT`): stored as `;`-delimited (thus `;` cannot be used in a label)
- `props` (`TEXT`): stored as `JSON3.write(props)`
- `edges`
- `source` (`INTEGER`): id of "from" node (`nodes(id)` is a foreign key)
- `target` (`INTEGER`): id of "to" node (`nodes(id)` is a foreign key)
- `type` (`TEXT`): the "class" of the edge/relationship
- `props` (`TEXT`)
| SQLiteGraph | https://github.com/JuliaComputing/SQLiteGraph.jl.git |
|
[
"MIT"
] | 0.2.3 | 6484a27c35ecc680948c7dc7435c97f12c2bfaf7 | code | 689 | using FuncPipelines
using Documenter
DocMeta.setdocmeta!(FuncPipelines, :DocTestSetup, :(using FuncPipelines); recursive=true)
makedocs(;
modules=[FuncPipelines],
authors="chengchingwen <[email protected]> and contributors",
repo="https://github.com/chengchingwen/FuncPipelines.jl/blob/{commit}{path}#{line}",
sitename="FuncPipelines.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://chengchingwen.github.io/FuncPipelines.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
)
deploydocs(;
repo="github.com/chengchingwen/FuncPipelines.jl",
devbranch="main",
)
| FuncPipelines | https://github.com/chengchingwen/FuncPipelines.jl.git |
|
[
"MIT"
] | 0.2.3 | 6484a27c35ecc680948c7dc7435c97f12c2bfaf7 | code | 168 | module FuncPipelines
export Pipelines, Pipeline, PipeGet, PipeVar
include("./utils.jl")
include("./pipeline.jl")
include("./show.jl")
include("./manipulate.jl")
end
| FuncPipelines | https://github.com/chengchingwen/FuncPipelines.jl.git |
|
[
"MIT"
] | 0.2.3 | 6484a27c35ecc680948c7dc7435c97f12c2bfaf7 | code | 1906 | @static if VERSION < v"1.7"
function __replace(f::Function, ps::Tuple, count)
if count == 0 || isempty(ps)
return ps
else
p1 = first(ps)
y = f(p1)
return (y, __replace(f, Base.tail(ps), count - !==(p1, y))...)
end
end
Base.replace(f::Function, ps::Pipelines; count::Integer = typemax(Int)) = Pipelines(__replace(f, ps.pipes, Base.check_count(count)))
else
Base.replace(f::Function, ps::Pipelines; count::Integer = typemax(Int)) = Pipelines(replace(f, ps.pipes; count))
end
"""
replace(f::Function, ps::Pipelines; [count::Integer])
Return a new `Pipelines` where each `Pipeline` p in `ps` is replaced by `f(p)`.
If count is specified, then replace at most count values in total (replacements being defined as new(x) !== x)
"""
Base.replace(f::Function, ps::Pipelines; count::Integer = typemax(Int))
"""
replace(f::Function, p::Pipeline)
replace the function in `p` with the same target name and applied arguments.
"""
function Base.replace(f::Base.Callable, p::Pipeline)
name = target_name(p)
g = p.f
if g isa ApplyN
n = _nth(g)
if n == 1
h = ApplyN{1}(f)
else
if g.f isa ApplySyms
h = ApplyN{2}(ApplySyms{_syms(g.f)}(f))
else
h = ApplyN{2}(f)
end
end
else
h = f
end
return Pipeline{name}(h)
end
"""
Base.setindex(ps::Pipelines, p::Pipeline, i::Integer)
Replace the `i`-th pipeline in `ps` with `p`.
"""
Base.setindex(ps::Pipelines, p::Pipeline, i::Integer) = Pipelines(Base.setindex(ps.pipes, p, i))
"""
get_pipeline_func(p::Pipeline)
Get the underlying function in pipeline.
"""
function get_pipeline_func(p::Pipeline)
f = p.f
!(f isa ApplyN) && return f
_nth(f) != 2 && return f.f
g = f.f
return g isa ApplySyms ? g.f : g
end
| FuncPipelines | https://github.com/chengchingwen/FuncPipelines.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.