licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 750 | #import Base.Multimedia.display
function Base.Multimedia.display(phy::phylo)
n = phy.Nnode
ntip = length(phy.tip_label)
println("Rooted phylogenetic tree with $ntip tips and $n internal nodes.\n")
println("Tip labels:")
print("\t")
for i in 1:minimum((ntip, 6))
print(phy.tip_label[i])
print(", ")
end
print("...")
end
function Base.Multimedia.display(data::SSEdata)
n = data.Nnode
ntip = length(data.tiplab)
ρ = data.ρ
println("Rooted phylogenetic tree with $ntip tips and $n internal nodes, and sampling fraction ρ = $ρ.\n")
println("Tip labels:")
print("\t")
for i in 1:minimum((ntip, 6))
print(data.tiplab[i])
print(", ")
end
print("...")
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 230 |
function contains_polytomies(phy::phylo)
edge = phy.edge
id = zeros(Int64, maximum(edge))
for row in eachrow(edge)
anc, dec = row
id[anc] += 1
id[dec] += 1
end
return(any(id .> 3))
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1171 | export makebins
function makebins(N0, model, lower, upper; filter = "", nbins = 18)
λ = model.λ
μ = model.μ
K = length(λ)
N = deepcopy(N0)
## sp
Δλ = λ * ones(K)' .- ones(K) * λ'
## ex
Δμ = μ * ones(K)' .- ones(K) * μ'
## netdiv
r = λ .- μ
Δr = r * ones(K)' .- ones(K) * r'
## relext
ϵ = μ ./ λ
Δϵ = ϵ * ones(K)' .- ones(K) * ϵ'
if filter == "speciation"
is_zero = Δλ .!= 0
N[is_zero] .= 0
elseif filter == "extinction"
is_zero = Δμ .!= 0
N[is_zero] .= 0
elseif filter == "speciation+extinction"
is_zero_ex = Δμ .!= 0
is_zero_sp = Δλ .!= 0
is_zero = is_zero_ex .& is_zero_sp
N[is_zero] .= 0
end
borders = collect(range(lower, upper; length = nbins+1))
mids = [(borders[i]+borders[i+1])/2 for i in 1:nbins]
bins = zeros(nbins, 4)
for (j, Δx) in enumerate([Δλ, Δμ, Δr, Δϵ])
for i in 1:nbins
in_bin = (Δx .> borders[i]) .& (Δx .<= borders[i+1])
not_zero = N .> 0
idx = in_bin .& not_zero
bins[i,j] = sum(N[idx])
end
end
return (mids, bins)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 6940 | export make_SSEdata, make_quantiles, make_SSEdata2, allpairwise, lrange, make_descendants, make_ancestors
export make_descendants_nodes
function descendant_nodes(node, data)
desc_edge_idxs = findall(data.edges[:,1] .== node)
desc = data.edges[desc_edge_idxs,:]
res = desc[:,2]
end
@doc raw"""
make_ancestors(data)
takes node indices as input, and returns edge indices
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
data = make_SSEdata(phy, ρ)
make_ancestors(data)
```
with result
```julia
Dict{Int64, Int64} with 464 entries:
56 => 115
35 => 71
425 => 379
⋮ => ⋮
```
"""
function make_ancestors(data::SSEdata)
ntip = length(data.tiplab)
rootnode = ntip + 1
maxnode = maximum(data.edges)
ancestors = Dict(node => 0 for node in 1:maxnode if node != rootnode)
for (i, row) in enumerate(eachrow(data.edges))
anc, dec = row
ancestors[dec] = i
end
return(ancestors)
end
@doc raw"""
make_descendants(data)
takes node indices as input, and returns edge indices
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
data = make_SSEdata(phy, ρ)
make_descendants(data)
```
with result
```julia
Dict{Int64, Vector{Any}} with 232 entries:
402 => [330, 331]
413 => [357, 360]
425 => [380, 381]
⋮ => ⋮
```
"""
function make_descendants(data::SSEdata)
ntip = length(data.tiplab)
rootnode = ntip + 1
maxnode = maximum(data.edges)
descendants = Dict(node => [] for node in rootnode:maxnode)
for (i, row) in enumerate(eachrow(data.edges))
anc, dec = row
if anc > ntip
append!(descendants[anc], i)
end
end
return(descendants)
end
function make_descendants(edges::Array{Int64,2})
ntip = size(edges)[1]÷2 + 1
rootnode = ntip + 1
maxnode = maximum(edges)
descendants = Dict(node => [] for node in rootnode:maxnode)
for (i, row) in enumerate(eachrow(edges))
anc, dec = row
if anc > ntip
append!(descendants[anc], i)
end
end
return(descendants)
end
@doc raw"""
make_descendants_nodes(data)
takes node indices as input, and returns node indices
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
data = make_SSEdata(phy, ρ)
make_descendants_nodes(data)
```
"""
function make_descendants_nodes(data::SSEdata)
ntip = length(data.tiplab)
rootnode = ntip + 1
maxnode = maximum(data.edges)
descendants = Dict(node => [] for node in rootnode:maxnode)
for row in eachrow(data.edges)
anc, dec = row
if anc > ntip
append!(descendants[anc], dec)
end
end
return(descendants)
end
function parental_node(node, data)
parental_edge_idx = findall(data.edges[:,2] .== node)
parent_node = data.edges[parental_edge_idx,1][1]
return(parent_node)
end
function make_quantiles(d, k)
quantiles = zeros(k)
step = 0.5
for i in 1:k
p = (i-step)/k
quantiles[i] = Distributions.quantile(d, p)
end
return(quantiles)
end
function make_quantiles2(d, k)
ps = [(i-0.5)/k for i in 1:k]
quantiles = Distributions.quantile.(d, ps)
return(quantiles)
end
function make_quantiles3(d, k)
ps = collect(range(0.0, 1.0; length = k+2))
ps = ps[2:(length(ps)-1)]
quantiles = Distributions.quantile.(d, ps)
return(quantiles)
end
function SSEdata(phy::phylo, ρ::Float64)
make_SSEdata(phy, ρ)
end
function make_SSEdata(phy::phylo, datafile::String, ρ::Float64; include_traits = true)
if contains_polytomies(phy)
throw("Your tree is not a binary tree (it has hard polytomies). This program does not support trees with hard polytomies.")
end
if include_traits
df = CSV.File(datafile)
trait_data = Dict(taxon => string(state) for (taxon, state) in zip(df[:Taxon], df[:state]))
else
trait_data = Dict(taxon => "?" for taxon in phy.tip_label)
end
node_depth = phy.node_depths
tiplab = phy.tip_label
branching_times = phy.branching_times
state_space = sort(unique(values(trait_data)))
edges = convert.(Int64, phy.edge)
el = phy.edge_length
po = phy.po
Nnode = phy.Nnode
data = SSEdata(state_space, trait_data, edges, tiplab, node_depth, ρ, el, branching_times, po, Nnode)
return(data)
end
@doc raw"""
make_SSEdata(phy, ρ)
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
data = make_SSEdata(phy, ρ)
```
"""
function make_SSEdata(phy::phylo, ρ::Float64)
trait_data = Dict(taxon => "?" for taxon in phy.tip_label)
node_depth = phy.node_depths
tiplab = phy.tip_label
branching_times = phy.branching_times
state_space = NaN
edges = convert.(Int64, phy.edge)
el = phy.edge_length
po = phy.po
if any(el .< 0)
throw(error("Tree includes negative branch lengths."))
end
Nnode = phy.Nnode
data = SSEdata(state_space, trait_data, edges, tiplab, node_depth, ρ, el, branching_times, po, Nnode)
return(data)
end
function partition_postorder_indices(data)
ancestor_node = Dict(val => key for (key, val) in eachrow(data.edges))
d = Dict(node => 0 for node in data.edges[:,2])
parents = collect(1:length(data.tiplab))
res = [parents]
root_node = length(data.tiplab)+1
for i in 1:maximum(data.edges)
for node in parents
parent = ancestor_node[node]
if parent != root_node
d[parent] += 1
end
end
# find the twos
parents = Int64[]
for (key, val) in d
if val == 2
append!(parents, key)
delete!(d, key)
end
end
if isempty(parents)
break
end
append!(res, [parents])
end
return(res)
end
@doc raw"""
allpairwise(λ, μ)
Example:
```julia
using Pesto
lambda = [0.2, 0.3]
mu = [0.05, 0.10, 0.15, 0.20]
λ, μ = allpairwise(lambda, mu)
```
with result
```julia
([0.2, 0.3, 0.2, 0.3, 0.2, 0.3, 0.2, 0.3], [0.05, 0.05, 0.1, 0.1, 0.15, 0.15, 0.2, 0.2])
```
"""
function allpairwise(xs, ys)
ny = length(xs)
nx = length(ys)
k = ny * nx
λ = zeros(Base.eltype(xs), k)
μ = zeros(Base.eltype(ys), k)
for (i, (x, y)) in enumerate(Iterators.product(xs, ys))
λ[i] = x
μ[i] = y
end
return(λ, μ)
end
@doc raw"""
lrange(from, to, length)
Similar to `range`, but with proportional spacing.
Example:
```julia
using Pesto
lrange(0.001, 100.0, 6)
```
with result
```julia
6-element Vector{Float64}:
0.0010000000000000002
0.010000000000000004
0.10000000000000002
1.0000000000000004
10.000000000000002
100.00000000000004
```
"""
function lrange(from::Float64, to::Float64, length::Int64 = 6)
exp.(collect(range(log(from), log(to); length = length)))
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 3158 | export lp, ψ, Econstant, estimate_constant_bdp
@doc raw"""
lp(λ, μ, data)
From Louca and Pennell 2020 (Nature), eq. S28
```math
L = \frac{\rho^{n+1}} {\lambda (1 - E(t_1))^2} \times \prod_{i=1}^n \lambda \times \psi(0, t_i) \\
E(t) = 1 - \frac{\exp(\lambda - \mu)t}{\frac{1}{\rho} + \frac{\lambda}{\lambda -\mu} \Big ( \exp((\lambda - \mu)t) - 1 \Big)} \\
\psi(t) = \frac{e^{t(\lambda - \mu)}}{ [ 1 + \frac{\rho \lambda}{\lambda - \mu}(e^{t(\lambda - \mu)} - 1)]^{2}}
```
Logged:
```math
\log(L) = (n+1) \log(\rho) + \log(\psi(t_1)) - \log(\lambda) - 2 \log(1 - E(t_1)) + \sum_{i=1}^n \log(\lambda) + \log(\psi(t_i))
```
Example:
```julia
λ = 1.0
μ = 0.5
phy = readtrees(Pesto.path("bears.tre"))
ρ = 1.0
data = make_SSEdata2(phy, ρ)
lp(λ, μ, data)
```
"""
function lp(λ, μ, data::SSEdata)
ρ = data.ρ
ts = data.branching_times
n = length(ts)
logL = (n+1) * log(ρ) + log(ψ(ts[1], λ, μ, ρ))
logL += - log(λ) - 2*log(1 - Econstant(ts[1], λ, μ, ρ))
# res = zeros(typeof(λ), n)
#Threads.@threads for i in 1:n
for i in 1:n
logL += log(λ) + log(ψ(ts[i], λ, μ, ρ))
# res[i] = log(λ) + log(ψ(ts[i], λ, μ, ρ))
end
# logL = sum(res)
# logL += (n+1) * log(ρ) + log(ψ(ts[1], λ, μ, ρ))
# logL += - log(λ) - 2*log(1 - Econstant(ts[1], λ, μ, ρ))
return(logL)
end
@doc raw"""
Equation S5 in Morlon et al. 2011 [PNAS]
```math
\psi(s, t) = e^{(\lambda - \mu)(t - s)} [ 1 + \frac{\frac{\lambda}{\lambda - \mu}(e^{t(\lambda - \mu)} - e^{s(\lambda-\mu)})}{\frac{1}{\rho} + \frac{\lambda}{\lambda - \mu} \times (e^{s(\lambda-\mu)}-1)}]^{-2}
```
We use this one, simplified where `s = 0`
```math
\psi(t) = \frac{e^{t(\lambda - \mu)}}{ [ 1 + \frac{\rho \lambda}{\lambda - \mu}(e^{t(\lambda - \mu)} - 1)]^{2}}
```
Example:
```julia
ρ = 1.0
λ = 1.0
μ = 0.5
t = 0.1
ψ(t, λ, μ, ρ)
```
"""
function ψ(t, λ, μ, ρ)
nom = exp(t * (λ - μ))
denom = 1 + ((ρ * λ) /(λ - μ)) * (exp(t * (λ - μ)) - 1)
res = nom / (denom*denom)
return res
end
@doc raw"""
from Morlon et al. 2011 [PNAS], eq. S4
```math
E(t) = 1 - \frac{\exp(t(\lambda - \mu))}{\frac{1}{\rho} + \frac{\lambda}{\lambda -\mu} \Big ( \exp((\lambda - \mu)t) - 1 \Big)}
```
"""
function Econstant(t, λ, μ, ρ)
nom = exp((λ - μ) * t)
denom = (1 / ρ) + (λ / (λ - μ)) * (exp((λ - μ)*t) - 1)
res = 1 - nom/denom
return res
end
@doc raw"""
estimate_constant_bdp(data::SSEdata[; xinit = [0.11, 0.09], lower = [0.0001, 0.0001], upper = [20.0, 20.0]])
Estimates the speciation and extinction rate under the reconstructed birth-death process with time-homogeneous rates.
Example:
```julia
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.67
data = make_SSEdata2(phy, ρ)
λml, μml = estimate_constant_bdp(data)
```
"""
function estimate_constant_bdp(data::SSEdata; xinit = [0.11, 0.09], lower = [0.00000001, 0.00000001], upper = [20.0, 20.0])
## ML estimates of parameters
f(x) = -lp(x[1], x[2], data) ## function to minimize
inner_optimizer = Optim.GradientDescent()
optres = Optim.optimize(f, lower, upper, xinit, Optim.Fminbox(inner_optimizer))
λml, μml = optres.minimizer
return(λml, μml)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 2826 | ## Probability that a lineage at time `t` is not represented in the reconstructed tree
## * This equation does not depend on the topology, so we solve for it first
function extinction_ode(dE, E, p, t)
λ, μ, η, K = p
dE[:] .= μ .- (λ .+ μ .+ η) .* E .+ λ .* E.^2 .+ (η/(K-1)) .* (sum(E) .- E)
end
function extinction_ode_tv(dE, E, p, t)
λ, μ, η, K = p
dE[:] .= μ(t) .- (λ(t) .+ μ(t) .+ η(t)) .* E .+ λ(t) .* E.^2 .+ (η(t)/(K-1)) .* (sum(E) .- E)
end
function extinction_prob(model::SSEconstant)
return(extinction_ode)
end
function extinction_prob(model::SSEtimevarying)
return(extinction_ode_tv)
end
## Probability of of observing the branch at time `t`
## * We solve this equation in the postorder traversal
function backward_ode(dD, D, p, t)
λ, μ, η, K, E = p
Et = E(t)
dD[:] .= - (λ .+ μ .+ η) .* D .+ 2 .* λ .* D .* Et .+ (η/(K-1)) .* (sum(D) .- D)
end
function backward_ode_tv(dD, D, p, t)
λ, μ, η, K, E = p
Et = E(t)
dD[:] .= - (λ(t) .+ μ(t) .+ η(t)) .* D .+ 2 .* λ(t) .* D .* Et .+ (η(t)/(K-1)) .* (sum(D) .- D)
end
function backward_prob(model::SSEconstant)
return(backward_ode)
end
function backward_prob(model::SSEtimevarying)
return(backward_ode_tv)
end
## This ODE is the previous one times minus one
## * We solve this equation in the preorder traversal, albeit with different starting values for each branch
function forward_ode(dF, F, p, t)
λ, μ, η, K, E = p
Et = E(t)
dF[:] .= (-1) .* ( - (λ .+ μ .+ η) .* F .+ 2 .* λ .* F .* Et .+ (η/(K-1)) .* (sum(F) .- F))
end
function forward_ode_tv(dF, F, p, t)
λ, μ, η, K, E = p
Et = E(t)
dF[:] .= (-1) .* ( - (λ(t) .+ μ(t) .+ η(t)) .* F .+ 2 .* λ(t) .* F .* Et .+ (η(t)/(K-1)) .* (sum(F) .- F))
end
function forward_prob(model::SSEconstant)
return(forward_ode)
end
function forward_prob(model::SSEtimevarying)
return(forward_ode_tv)
end
## This is the ODE to solve for the numebr of rate shifts
function number_of_shifts!(dN, N, p, t)
η, K, S, D = p
Dt = D(t)
St = S(t)
r = -(η/(K-1.0))
LoopVectorization.@turbo for i in 1:K, j in 1:K
dN[i,j] = r * St[j] * Dt[i] / Dt[j]
end
## assign diagonal zero afterwards, since LoopVectorization
## does not know how to handle if statement
LoopVectorization.@turbo for i in 1:K
dN[i,i] = 0.0
end
end
function number_of_shifts_tv!(dN, N, p, t)
η, K, S, D = p
Dt = D(t)
St = S(t)
r = -(η(t)/(K-1.0))
LoopVectorization.@turbo for i in 1:K, j in 1:K
dN[i,j] = r * St[j] * Dt[i] / Dt[j]
end
LoopVectorization.@turbo for i in 1:K
dN[i,i] = 0.0
end
end
function shift_problem(model::SSEconstant)
return(number_of_shifts!)
end
function shift_problem(model::SSEtimevarying)
return(number_of_shifts_tv!)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 480 | ## wrapper foranalysis
export pesto
function pesto(data; n = 6, sd = 0.587)
λml, μml = estimate_constant_bdp(data)
dλ = Distributions.LogNormal(log(λml), sd)
dμ = Distributions.LogNormal(log(μml), sd)
λquantiles = make_quantiles(dλ, n)
µquantiles = make_quantiles(dμ, n)
λ, μ = allpairwise(λquantiles, µquantiles)
η = optimize_eta(λ, µ, data)
model = SSEconstant(λ, μ, η)
rates = birth_death_shift(model, data);
return(model, rates)
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 275 | export backwards_forwards_pass
function backwards_forwards_pass(model, data; alg = OrdinaryDiffEq.Tsit5())
E = extinction_probability(model, data)
Ds, sf = postorder(model, data, E; alg = alg)
Fs = preorder(model, data, E, Ds; alg = alg)
return(Ds, Fs)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1069 | ## Wrapper function
export birth_death_shift
export plottree
@doc raw"""
birth_death_shift(model, data)
Calculates average branch rates under the birth-death-shift model with a finite state space.
Example:
```julia
using Pesto
phy = readtree(Pesto.path("bears.tre"))
ρ = 1.0
data = make_SSEdata(phy, "", ρ; include_traits = false)
λ = [0.1, 0.2]
μ = [0.05, 0.15]
η = 0.05
model = SSEconstant(λ, μ, η)
res = birth_death_shift(model, data)
```
"""
function birth_death_shift(model, data; nshifts = true, shift_bayes_factor = true)
Ds, Fs = backwards_forwards_pass(model, data)
Ss = ancestral_state_probabilities(data, Ds, Fs)
rates = tree_rates(data, model, Fs, Ss)
if nshifts
nshift = compute_nshifts(model, data, Ds, Ss; ape_order = false)
append!(nshift, 0.0)
rates[!,"nshift"] = nshift
end
if shift_bayes_factor
bf = posterior_prior_shift_odds(model,data)
append!(bf, NaN)
rates[!,"shift_bf"] = bf
rates[!,"shift_bf_log"] = log10.(bf)
end
return(rates)
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 567 | export extinction_probability
function extinction_probability(model::SSE, data::SSEdata)
alg = OrdinaryDiffEq.Tsit5()
K = number_of_states(model)
pE = (model.λ, model.μ, model.η, K)
tree_height = maximum(data.node_depth)
tspan = (0.0, tree_height)
E0 = repeat([1.0 - data.ρ], K)
ode = extinction_prob(model)
pr = OrdinaryDiffEq.ODEProblem(ode, E0, tspan, pE);
## use low tolerance because we only solve E once, so we can afford it
E = OrdinaryDiffEq.solve(pr, alg, abstol = 1e-10, reltol = 1e-10)
return(E)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1477 | export logL_root
function number_of_states(model::SSEtimevarying)
x = model.λ(0.0)
n = length(x)
return(n)
end
function number_of_states(model::SSEconstant)
n = length(model.λ)
return(n)
end
function get_speciation_rates(model::SSEconstant, t::Float64)
return(model.λ)
end
function get_speciation_rates(model::SSEtimevarying, t::Float64)
return(model.λ(t))
end
function logL_root(model::SSE, data::SSEdata)
E = extinction_probability(model, data)
D_ends, sf = postorder_nosave(model, data, E)
root_index = length(data.tiplab)+1
root_age = data.node_depth[root_index]
left_edge, right_edge = findall(data.edges[:,1] .== root_index)
D_left = D_ends[left_edge,:]
D_right = D_ends[right_edge,:]
# λroot = get_speciation_rates(model, root_age)
D = D_left .* D_right
n = number_of_states(model)
freqs = repeat([1.0 / n], n)
# we divide by this to condition the probability density
# on that in order to have a tree in the first place, at
# least two lineages must have survived to the present.
nonextinct = (1.0 .- E(root_age)).^2
# Why do we divide by (1-E)^2 * λ, and not just (1-E)^2 ?
D = D ./ nonextinct
prob = sum(freqs .* D)
logL = log(prob) + sum(sf)
end
function sselp(η, λ, μ, data)
model = SSEconstant(λ, μ, η)
logL_root(model, data)
end
function sselp_tv(η, λ, μ, data)
model = SSEtimevarying(λ, μ, t -> η)
logL_root(model, data)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1921 | ## number of shifts in the phylogeny
export compute_nshifts
export state_shifts
function state_shifts(model::SSE, data::SSEdata; ape_order = true)
Ds, Fs = backwards_forwards_pass(model, data);
Ss = ancestral_state_probabilities(data, Ds, Fs);
state_shifts(model, data, Ds, Ss; ape_order = ape_order)
end
function state_shifts(model::SSE, data::SSEdata, Ds, Ss; alg = OrdinaryDiffEq.Tsit5(), ape_order = true)
nbranches = size(data.edges)[1]
K = number_of_states(model)
nshifts = zeros(nbranches, K, K)
ode = shift_problem(model)
Threads.@threads for edge_idx in 1:nbranches
a = Ds[edge_idx].t[end]
b = Ds[edge_idx].t[1]
tspan = (a,b)
N0 = zeros(K,K)
p = (model.η, K, Ss[edge_idx], Ds[edge_idx])
prob = OrdinaryDiffEq.ODEProblem(ode, N0, tspan, p)
sol = OrdinaryDiffEq.solve(prob, alg, isoutofdomain = notneg)
nshifts[edge_idx,:,:] = sol[end]
end
if ape_order
## reorder to ape node indices
ancestors = make_ancestors(data)
node_nshifts = zeros(maximum(data.edges), K, K)
for i in 1:maximum(data.edges)
if i == length(data.tiplab)+1
node_nshifts[i,:,:] .= 0.0
else
edge_idx = ancestors[i]
node_val = nshifts[edge_idx,:,:]
node_nshifts[i,:,:] = node_val
end
end
return(node_nshifts)
else
return(nshifts)
end
end
function compute_nshifts(model, data; ape_order = true)
Ds, Fs = backwards_forwards_pass(model, data);
Ss = ancestral_state_probabilities(data, Ds, Fs);
compute_nshifts(model, data, Ds, Ss; ape_order = ape_order)
end
function compute_nshifts(model, data, Ds, Ss; ape_order = true)
nshifts = state_shifts(model, data, Ds, Ss; ape_order = ape_order)
res = sum(nshifts, dims = 2:3)[:,1,1]
return(res)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 2715 | export optimize_eta
@doc raw"""
optimize_eta(λ, μ, data)
Finds the maximum-likelihood parameter value for η (the transition rate) under the birth-death-shift model with a finite state space, conditional on λ and μ.
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
data = make_SSEdata2(phy, ρ)
λ = [0.1, 0.2, 0.3, 0.1, 0.2, 0.3, 0.1, 0.2, 0.3]
μ = [0.09, 0.09, 0.09, 0.19, 0.19, 0.19, 0.29, 0.29, 0.29]
ηml = optimize_eta(λ, μ, data)
#model = SSEconstant(λ, μ, ηml)
```
11
"""
function optimize_eta(λ::Vector{Float64}, μ::Vector{Float64}, data; lower = -Inf, upper = Inf, xinit = -Inf)
if !isfinite(xinit)
xinit = minimum([
0.1 / sum(data.branch_lengths),
maximum(λ) / 4]
)
end
if !isfinite(lower)
#lower = 0.0001 * xinit
lower = 1.0e-10
end
if !isfinite(upper)
#upper = 100.0 * xinit
upper = maximum(λ) / 2
end
## find the maximum-likelihood estimate of eta, the transition rate
f(η) = -sselp(η[1], λ, μ, data)
## define the gradient function with respect to η
g!(G, η) = begin
G[1] = ForwardDiff.derivative(f, η[1])
end
# inner_optimizer = Optim.GradientDescent(linesearch=Optim.LineSearches.BackTracking(order=3))
inner_optimizer = Optim.GradientDescent()
# inner_optimizer = Optim.Newton()
opts = Optim.Options(x_tol = 0.1, f_tol = 0.1, g_tol = 0.1, show_trace = false)
result = Optim.optimize(f, g!, [lower], [upper], [xinit], Optim.Fminbox(inner_optimizer), opts)
ηml = result.minimizer[1]
return(ηml)
end
function optimize_eta(λ::Function, μ::Function, data; lower = -Inf, upper = Inf, xinit = -Inf)
if !isfinite(xinit)
xinit = minimum([
0.1 / sum(data.branch_lengths),
maximum(λ) / 4]
)
end
if !isfinite(lower)
#lower = 0.0001 * xinit
lower = 1.0e-10
end
if !isfinite(upper)
#upper = 100.0 * xinit
upper = maximum(λ) / 2
end
## find the maximum-likelihood estimate of eta, the transition rate
f(η) = -sselp_tv(η[1], λ, μ, data)
## define the gradient function with respect to η
g!(G, η) = begin
G[1] = ForwardDiff.derivative(f, η[1])
end
# inner_optimizer = Optim.GradientDescent(linesearch=Optim.LineSearches.BackTracking(order=3))
inner_optimizer = Optim.GradientDescent()
# inner_optimizer = Optim.Newton()
opts = Optim.Options(x_tol = 0.1, f_tol = 0.1, g_tol = 0.1, show_trace = false)
result = Optim.optimize(f, g!, [lower], [upper], [xinit], Optim.Fminbox(inner_optimizer), opts)
ηml = result.minimizer[1]
return(ηml)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 2751 | export postorder
function postorder(model::SSE, data::SSEdata, E; alg = OrdinaryDiffEq.Tsit5())
## Pre-compute descendants in hashtable
descendants = make_descendants(data)
n = number_of_states(model)
## Postorder traversal: computing the branch probabilities through time
nrows = size(data.edges, 1)
Ntip = length(data.tiplab)
elt = eltype(model)
## Storing the Ds
Ds = Dict()
## Storing the solution at the end of the branch
D_ends = zeros(elt, nrows, n)
# D_begins = zeros(elt, nrows, n)
## Storing the scaling factors
sf = zeros(elt, nrows)
pD = (model.λ, model.μ, model.η, n, E)
u0 = ones(elt, n)
tspan = (0.0, 1.0)
ode = backward_prob(model)
prob = OrdinaryDiffEq.ODEProblem(ode, u0, tspan, pD)
for m in data.po
anc, dec = data.edges[m,:]
if dec < Ntip+1
species = data.tiplab[dec]
trait_value = data.trait_data[species]
if trait_value == "?" ## If we don't know or or didn't observe the trait
D = repeat([1.0], n) .* data.ρ
else ## If we observed the trait and measured it
trait_idx = convert.(Float64, trait_value .== data.state_space)
D = trait_idx .* data.ρ
end
u0 = elt.(D)
#D_begins[i,:] = u0
node_age = data.node_depth[dec]
parent_node_age = data.node_depth[anc]
tspan = (node_age, parent_node_age)
prob = OrdinaryDiffEq.remake(prob, u0 = u0, tspan = tspan)
sol = OrdinaryDiffEq.solve(prob, alg, isoutofdomain = notneg)
Ds[m] = sol
sol = sol[end]
k = sum(sol)
sol = sol ./ k
D_ends[m,:] = sol
logk = log(k)
sf[m] = logk
end
end
for m in data.po
anc, dec = data.edges[m,:]
if dec > Ntip
left_edge, right_edge = descendants[dec]
node_age = data.node_depth[dec]
D_left = D_ends[left_edge,:]
D_right = D_ends[right_edge,:]
λt = get_speciation_rates(model, node_age)
D = D_left .* D_right .* λt
u0 = D
parent_node_age = data.node_depth[anc]
tspan = (node_age, parent_node_age)
prob = OrdinaryDiffEq.remake(prob, u0 = u0, tspan = tspan)
sol = OrdinaryDiffEq.solve(prob, alg, isoutofdomain = notneg)
Ds[m] = sol
sol = sol[end]
k = sum(sol)
sol = sol ./ k
D_ends[m,:] = sol
if k > 0.0
sf[m] += log(k)
end
end
end
return(Ds, sf)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 2368 | export postorder_nosave
notneg(u,p,t) = any(x->x<0,u)
function eltype(model::SSEconstant)
return(typeof(model.η))
end
function eltype(model::SSEtimevarying)
return(typeof(model.η(0.0)))
end
"""
postorder_nosave(model::SSEconstant, data::SSEdata, E, alg = OrdinaryDiffEq.Tsit5())
TBW
"""
function postorder_nosave(model::SSE, data::SSEdata, E, alg = OrdinaryDiffEq.Tsit5())
## Pre-compute descendants in hashtable
descendants = make_descendants(data)
ancestors = make_ancestors(data)
K = number_of_states(model)
## Postorder traversal: computing the branch probabilities through time
nrows = size(data.edges, 1)
Ntip = length(data.tiplab)
## Storing the solution at the end of the branch
elt = eltype(model)
D_ends = zeros(elt, nrows, K)
## Storing the scaling factors
sf = zeros(elt, nrows)
pD = (model.λ, model.μ, model.η, K, E)
u0 = ones(elt, K)
tspan = (0.0, 1.0)
ode = backward_prob(model)
prob = OrdinaryDiffEq.ODEProblem(ode, u0, tspan, pD)
for m in data.po
anc, dec = data.edges[m,:]
node_age = data.node_depth[dec]
parent_node_age = data.node_depth[anc]
tspan = (node_age, parent_node_age)
if dec < Ntip+1
species = data.tiplab[dec]
trait_value = data.trait_data[species]
if trait_value == "?" ## If we don't know or or didn't observe the trait
D = ones(elt, K) .* data.ρ
else ## If we observed the trait and measured it
trait_idx = convert.(elt, trait_value .== data.state_space)
D = trait_idx .* data.ρ
end
u0 = elt.(D)
else
left_edge, right_edge = descendants[dec]
D_left = D_ends[left_edge,:]
D_right = D_ends[right_edge,:]
λt = get_speciation_rates(model, node_age)
D = D_left .* D_right .* λt
u0 = D
end
prob = OrdinaryDiffEq.remake(prob, u0 = u0, tspan = tspan)
sol = OrdinaryDiffEq.solve(prob, alg, isoutofdomain = notneg, save_everystep = false)
sol = sol[end]
c = sum(sol)
sol = sol ./ c
D_ends[m,:] = sol
if c > 0.0
sf[m] += log(c)
end
end
return(D_ends, sf)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1755 | export preorder
function preorder(model::SSE, data::SSEdata, E, Ds; alg = OrdinaryDiffEq.Tsit5())
## Precompute ancestor edges
ancestors = make_ancestors(data)
descendants = make_descendants(data)
## Preorder pass, compute `F(t)`
K = number_of_states(model)
elt = eltype(model)
root_node = length(data.tiplab)+1
nrows = size(data.edges, 1)
## Store the whole `F(t)` per branch
Fs = Dict()
pF = (model.λ, model.μ, model.η, K, E)
ode = forward_prob(model)
tspan = [0.0, 1.0]
u0 = [1.0, 1.0]
prob = OrdinaryDiffEq.ODEProblem(ode, u0, tspan, pF)
for m in reverse(data.po)
anc = data.edges[m,1]
dec = data.edges[m,2]
## if root
if anc == root_node
F_parent = ones(elt, K)
left_edge, right_edge = descendants[root_node]
root_age = maximum(data.node_depth)
λroot = get_speciation_rates(model, root_age)
D_parent = Ds[left_edge][end] .* Ds[right_edge][end] .* λroot
else
parent_edge = ancestors[anc]
F_parent = Fs[parent_edge][end]
D_parent = Ds[parent_edge][1]
end
Dm = Ds[m][end]
F_start = D_parent .* F_parent ./ Dm
F_start = F_start ./ sum(F_start) ## Normalize, because these numbers can get very tiny (1E-10)
node_age = data.node_depth[dec]
parent_node = parental_node(dec, data)
parent_node_age = data.node_depth[parent_node]
tspan = (parent_node_age, node_age)
u0 = F_start
prob = OrdinaryDiffEq.remake(prob, u0 = u0, tspan = tspan)
sol = OrdinaryDiffEq.solve(prob, alg, isoutofdomain = notneg)
Fs[m] = sol
end
return(Fs)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 5048 | export posterior_shift_prob
export prior_shift_prob
export posterior_prior_shift_odds
function Qmatrix(model)
K = length(model.λ)
Q = zeros(K,K)
Q .= model.η / (K-1)
for i in 1:K
Q[i,i] = - model.η
end
return(Q)
end
function Amatrix(model, E, Q, t)
A = LinearAlgebra.diagm(- model.λ .- model.μ .+ 2 .* model.λ .* E(t)) .+ Q
return(A)
end
function P(model, t, Δt, D, E, Q)
K = length(model.λ)
Dt = D(t)
A = Amatrix(model, E, Q, t)
P1 = (LinearAlgebra.I(K) .- Δt .* A) .* (Dt * ones(K)')
colSums = ones(K)*ones(K)' * P1
res = P1 ./ colSums
return(res)
end
#=
function dumb_range(a::Float64, b::Float64, n::Int64)
x = zeros(n)
x[1] = a
for i in 1:(n-1)
x[]
end
end =#
function posterior_shift_prob_difference_eq(model, data; n_knots = 20)
E = extinction_probability(model, data);
## there is no point in factoring this out, because the rest of the function is much slower
Ds, Fs = backwards_forwards_pass(model, data);
Ss = ancestral_state_probabilities(data, Ds, Fs);
Q = Qmatrix(model)
K = length(model.λ)
n_edges = length(data.branch_lengths)
prob_no_shift = zeros(n_edges, n_knots-1)
times = [
collect(range(Ds[i].t[1], Ds[i].t[end]; length = n_knots)) for i in 1:n_edges
]
edge_indices = 1:n_edges
Threads.@threads for edge_index in 1:n_edges
t0 = Ds[edge_index].t[1]
t1 = Ds[edge_index].t[end]
span = t0 - t1
Δt = span/(n_knots-1)
S = Ss[edge_index]
ts = times[edge_index]
for i in 1:(n_knots-1)
t = ts[i]
St = S(t)
Sm = ones(K) * transpose(St)
P1 = P(model, t, Δt, Ds[edge_index], E, Q)
P_ij = (1 .- LinearAlgebra.I(K)) .* P1 .* Sm
P_no_shift = 1 .- sum(P_ij)
prob_no_shift[edge_index,i] = P_no_shift
end
end
shift_prob = 1 .- prod(prob_no_shift, dims = 2)[:,1]
return(shift_prob)
end
function no_shifts_prob(dlnX, lnX, p, t)
D, S, η, K = p
Dt = D(t)
dlnX[1] = sum((η/(K-1)) .* S(t) .* (sum(Dt) .- Dt) ./ Dt)
end
function no_shifts_prob_tv(dlnX, lnX, p, t)
D, S, η, K = p
Dt = D(t)
dlnX[1] = sum((η(t)/(K-1)) .* S(t) .* (sum(Dt) .- Dt) ./ Dt)
end
function no_shifts_problem(model::SSEconstant)
return(no_shifts_prob)
end
function no_shifts_problem(model::SSEtimevarying)
return(no_shifts_prob_tv)
end
function posterior_shift_prob(model::SSE, data::SSEdata)
alg = OrdinaryDiffEq.Tsit5()
## there is no point in factoring this out, because the rest of the function is much slower
Ds, Fs = backwards_forwards_pass(model, data);
Ss = ancestral_state_probabilities(data, Ds, Fs);
n_edges = length(data.branch_lengths)
K = number_of_states(model)
lnX = zeros(n_edges)
ode = no_shifts_problem(model)
Threads.@threads for edge_index in 1:n_edges
S = Ss[edge_index]
D = Ds[edge_index]
t0 = Ds[edge_index].t[1]
t1 = Ds[edge_index].t[end]
tspan = (t1, t0)
p = (D, S, model.η, K)
u0 = zeros(1)
prob = OrdinaryDiffEq.ODEProblem(ode, u0, tspan, p)
sol = OrdinaryDiffEq.solve(prob, alg, save_everystep = false)
lnX[edge_index] = sol[end][1]
end
prob_atleast_one_shift = 1.0 .- exp.(lnX)
return(prob_atleast_one_shift)
end
## https://en.wikipedia.org/wiki/Poisson_distribution
function poisson_pmf(model::SSEconstant, t0::Float64, t1::Float64, n::Int64)
η = model.η
time = t1 - t0
r = η * time
res = (r^n) * exp(-r) / factorial(n)
end
function poisson_zero(model::SSEconstant, t0::Float64, t1::Float64)
η = model.η
time = t1 - t0
r = η * time
res = exp(-r)
end
# https://gtribello.github.io/mathNET/resources/jim-chap22.pdf
function poisson_zero(model::SSEtimevarying, t0::Float64, t1::Float64)
x, w = FastGaussQuadrature.gausslegendre(10)
η_int = quadrature(model.η, t0, t1, x, w)
res = exp(-η_int)
end
function prior_shift_prob(model, data)
n_edges = length(data.branch_lengths)
prob_no_shift = zeros(n_edges)
for edge_index in 1:n_edges
node_index = data.edges[edge_index,2]
bl = data.branch_lengths[edge_index]
t0 = data.node_depth[node_index]
t1 = t0 + bl
prob_no_shift[edge_index] = poisson_zero(model, t0, t1)
end
prob_shift = 1.0 .- prob_no_shift
return(prob_shift)
end
# Shi, J. J., & Rabosky, D. L. (2015). Speciation dynamics during the global radiation of extant bats. Evolution, 69(6), 1528-1545.
function posterior_prior_shift_odds(model, data)
prior_atleast_one_shift = Pesto.prior_shift_prob(model, data)
prior_no_shifts = 1.0 .- prior_atleast_one_shift
posterior_atleast_one_shift = posterior_shift_prob(model, data)
posterior_no_shifts = 1.0 .- posterior_atleast_one_shift
odds = (posterior_atleast_one_shift ./ prior_atleast_one_shift) ./ (posterior_no_shifts ./ prior_no_shifts)
return(odds)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 822 | export tip_rates
function tip_rates(model::SSEconstant, data::SSEdata)
Ds, Fs = backwards_forwards_pass(model, data)
Ss = ancestral_state_probabilities(data, Ds, Fs)
branch_indices = 1:size(data.edges)[1]
ntips = length(data.tiplab)
ancestors = make_ancestors(data)
res = zeros(ntips, 4)
rates = (
model.λ,
model.µ,
model.λ .- model.µ,
model.µ ./ model.λ
)
for i in 1:size(data.tiplab)[1]
edge_index = ancestors[i]
for (j, rate) in enumerate(rates)
t = Ds[edge_index].t[1]
S = Ss[edge_index](t)
res[i,j] = rate' * S
end
end
names = ["lambda", "mu", "netdiv", "relext"]
df = DataFrames.DataFrame(res, names)
df[!,"species"] = data.tiplab
return(df)
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 4612 | export tree_rates
export ancestral_state_probabilities
#https://en.wikipedia.org/wiki/Gaussian_quadrature
function quadrature(f, t0, t1, x, w)
## change of interval from t0 => t1 to -1 => 1
g(x) = ((t1 - t0) / 2.0) * f(((t1 - t0)/2.0)*x + (t1 + t0)/2.0)
I = LinearAlgebra.dot(w, g.(x))
return(I)
end
function meanbranch(f, t0, t1, x, w)
# I is the integral
I = quadrature(f, t0, t1, x, w)
# divide by time interval, since we want the average rate
res = I / (t1 - t0)
return(res)
end
function tree_rates(data, model; n = 10)
Ds, Fs = backwards_forwards_pass(model, data);
Ss = ancestral_state_probabilities(data, Ds, Fs);
tree_rates(data, model, Fs, Ss; n = n)
end
function tree_rates(data::SSEdata, model::SSEconstant, Fs, Ss; n = 10)
rates = zeros(size(data.edges)[1], 8)
x, w = FastGaussQuadrature.gausslegendre(n)
Threads.@threads for i = 1:size(data.edges)[1]
#for i = 1:size(data.edges)[1]
t0, t1 = extrema(Fs[i].t)
rates[i,1] = meanbranch(t -> LinearAlgebra.dot(model.λ, Ss[i](t)), t0, t1, x, w)
rates[i,2] = meanbranch(t -> LinearAlgebra.dot(model.μ, Ss[i](t)), t0, t1, x, w)
rates[i,3] = meanbranch(t -> LinearAlgebra.dot(model.λ .- model.μ, Ss[i](t)), t0, t1, x, w)
rates[i,4] = meanbranch(t -> LinearAlgebra.dot(model.μ ./ model.λ, Ss[i](t)), t0, t1, x, w)
## difference from oldest to youngest point on branch
## t0 is youngest, t1 is oldest
rates[i,5] = LinearAlgebra.dot(model.λ, Ss[i](t0)) - LinearAlgebra.dot(model.λ, Ss[i](t1))
rates[i,6] = LinearAlgebra.dot(model.μ, Ss[i](t0)) - LinearAlgebra.dot(model.μ, Ss[i](t1))
rates[i,7] = LinearAlgebra.dot(model.λ .- model.μ, Ss[i](t0)) - LinearAlgebra.dot(model.λ .- model.μ, Ss[i](t1))
rates[i,8] = LinearAlgebra.dot(model.μ ./ model.λ, Ss[i](t0)) - LinearAlgebra.dot(model.μ ./ model.λ, Ss[i](t1))
end
node = data.edges[:,2]
edge = 1:size(data.edges)[1]
names = ["mean_lambda", "mean_mu", "mean_netdiv", "mean_relext",
"delta_lambda", "delta_mu", "delta_netdiv", "delta_relext"]
df = DataFrames.DataFrame(rates, names)
df[!, "node"] = node
df[!, "edge"] = edge
root_index = length(data.tiplab)+1
push!(df, [NaN NaN NaN NaN NaN NaN NaN NaN root_index 0])
return(df)
end
function tree_rates(data::SSEdata, model::SSEtimevarying, Fs, Ss; n = 10)
rates = zeros(size(data.edges)[1], 8)
x, w = FastGaussQuadrature.gausslegendre(n)
Threads.@threads for i = 1:size(data.edges)[1]
#for i = 1:size(data.edges)[1]
t0, t1 = extrema(Fs[i].t)
rates[i,1] = meanbranch(t -> LinearAlgebra.dot(model.λ(t), Ss[i](t)), t0, t1, x, w)
rates[i,2] = meanbranch(t -> LinearAlgebra.dot(model.μ(t), Ss[i](t)), t0, t1, x, w)
rates[i,3] = meanbranch(t -> LinearAlgebra.dot(model.λ(t) .- model.μ(t), Ss[i](t)), t0, t1, x, w)
rates[i,4] = meanbranch(t -> LinearAlgebra.dot(model.μ(t) ./ model.λ(t), Ss[i](t)), t0, t1, x, w)
## difference from oldest to youngest point on branch
## t0 is youngest, t1 is oldest
rates[i,5] = LinearAlgebra.dot(model.λ(t0), Ss[i](t0)) - LinearAlgebra.dot(model.λ(t1), Ss[i](t1))
rates[i,6] = LinearAlgebra.dot(model.μ(t0), Ss[i](t0)) - LinearAlgebra.dot(model.μ(t1), Ss[i](t1))
rates[i,7] = LinearAlgebra.dot(model.λ(t0) .- model.μ(t0), Ss[i](t0)) - LinearAlgebra.dot(model.λ(t1) .- model.μ(t1), Ss[i](t1))
rates[i,8] = LinearAlgebra.dot(model.μ(t0) ./ model.λ(t0), Ss[i](t0)) - LinearAlgebra.dot(model.μ(t1) ./ model.λ(t1), Ss[i](t1))
end
node = data.edges[:,2]
edge = 1:size(data.edges)[1]
names = ["mean_lambda", "mean_mu", "mean_netdiv", "mean_relext",
"delta_lambda", "delta_mu", "delta_netdiv", "delta_relext"]
df = DataFrames.DataFrame(rates, names)
df[!, "node"] = node
df[!, "edge"] = edge
root_index = length(data.tiplab)+1
push!(df, [NaN NaN NaN NaN NaN NaN NaN NaN root_index 0])
return(df)
end
function ancestral_state_probabilities(data::SSEdata, Ds, Fs)
Ss = Dict()
for edge_idx in 1:(maximum(data.edges)-1)
Ss[edge_idx] = t -> Fs[edge_idx](t) .* Ds[edge_idx](t) ./ (sum(Fs[edge_idx](t) .* Ds[edge_idx](t)))
end
return (Ss)
end
function ancestral_state_probabilities(Ds, Fs)
Ss = Dict()
#for edge_idx in 1:(maximum(data.edges)-1)
for edge_idx in collect(keys(Ds))
Ss[edge_idx] = t -> Fs[edge_idx](t) .* Ds[edge_idx](t) ./ (sum(Fs[edge_idx](t) .* Ds[edge_idx](t)))
end
return (Ss)
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 5583 | ## read newick
function readfile(filename)
io = open(filename, "r")
s = read(io, String)
close(io)
return(s)
end
function readnewick(filename)
s = readfile(filename)
s = replace(s, r"^_+|_+$" => "")
s = replace(s, r"[ \t]" => "")
s = s[findfirst('(', s):end]
s = s[1:findfirst(';', s)]
s = stripcomments(s)
tokens = tokenize(s)
ntip = sum(1 for token in tokens if token == "(") +1
nroot = ntip+1
idx = [1,1]
edges = zeros(Int64, 2*(ntip-1), 2)
edges[1,1] = nroot
el = zeros(size(edges)[1])
tiplabs = String[]
tokens2 = tokens[2:end-2]
left, right = partition(tokens2)
for side in [left, right]
if length(side) == 1
terminaledge!(edges, el, tiplabs, side, idx, nroot)
else
internaledge!(edges, el, tiplabs, side, idx, nroot)
end
end
#return(edges, el, tiplabs)
node_depths = get_node_depths(edges, el)
Nnode = length(tiplabs)-1
po = postorder(edges)
branching_times = get_branching_times(node_depths, ntip)
phylo(
edges,
el,
Nnode,
tiplabs,
node_depths,
branching_times,
po
)
end
function get_branching_times(node_depths, n_tips)
branching_times = sort(node_depths[(n_tips+1):end], rev = true)
return(branching_times)
end
function postorder!(po, edges, n_tips, descendants, edge_index)
dec = edges[edge_index,2]
if dec > n_tips
left,right = descendants[dec]
postorder!(po, edges, n_tips, descendants, left) ## left subtree
postorder!(po, edges, n_tips, descendants, right) ## right subtree
end
append!(po, edge_index)
end
function postorder(edges)
n_tips = (size(edges)[1]+2) ÷ 2
po = Int64[]
descendants = make_descendants(edges)
root_node = n_tips+1
left, right = descendants[root_node]
postorder!(po, edges, n_tips, descendants, left)
postorder!(po, edges, n_tips, descendants, right)
return(po)
end
function nd!(node_depths, t, edges, el, n_tips, descendants, edge_index)
dec = edges[edge_index,2]
t += el[edge_index]
node_depths[dec] = t
## if internal node
if dec > n_tips
left, right = descendants[dec]
nd!(node_depths, t, edges, el, n_tips, descendants, left) ## left subtree
nd!(node_depths, t, edges, el, n_tips, descendants, right) ## right subtree
end
## if is tip, do nothing
end
function get_node_depths(edges, el)
n_tips = (size(edges)[1]+2) ÷ 2
n_nodes = size(edges)[1]+1
node_depths = zeros(Float64, n_nodes)
root_node = n_tips+1
descendants = make_descendants(edges)
left, right = descendants[root_node]
t = 0.0 ## t is not mutable/not a reference
nd!(node_depths, t, edges, el, n_tips, descendants, left)
nd!(node_depths, t, edges, el, n_tips, descendants, right)
th = maximum(node_depths) ## tree height
node_depths = th .- node_depths ## in units of time before the present
return(node_depths)
end
function stripcomments(s)
res = replace(s, r"\[.*?\]" => "")
return(res)
end
function tokenize(s)
tokens = String[]
## strip everything between square bracket
s = stripcomments(s)
len = length(s)
single_tokens = Set([')', '(', ',', ';'])
i = 1
while i <= len
if s[i] ∈ single_tokens
token = string(s[i])
append!(tokens, [token])
i += 1
else
l = Int64[]
firstcomma = findfirst(',', @view s[i:end])
firstclose = findfirst(')', @view s[i:end])
if !isnothing(firstcomma)
append!(l, firstcomma-1)
end
if !isnothing(firstclose)
append!(l, firstclose-1)
end
if !isempty(l)
close_idx = minimum(l)
token = @view s[i:close_idx+i-1]
append!(tokens, [token])
i += length(token)
else
i += 1
end
end
end
return(tokens)
end
function parse_brlen(s)
res = parse(Float64, split(s, ':')[end])
return(res)
end
function parse_tiplab(s)
res = split(s, ':')[1]
return(res)
end
function findsplit(tokens)
global ps = 0
for (i, token) in enumerate(tokens)
if token == "("
ps += 1
elseif token == ")"
ps -= 1
end
if (token == ",") & (ps == 0)
return(i)
end
end
throw("split not found")
end
function partition(tokens)
comma = findsplit(tokens)
left = @view tokens[1:comma-1]
right = @view tokens[1+comma:end]
return (left, right)
end
function internaledge!(edges, el, tiplabs, tokens, idx, node)
l = parse_brlen(tokens[end])
el[idx[1]] = l
tokens = tokens[2:end-2]
edges[idx[1],1] = node
edges[idx[1],2] = maximum(edges)+1
node = edges[idx[1],2]
idx[1] += 1
left, right = partition(tokens)
for branch in [left, right]
if !isempty(branch)
if branch[end][1] != ':'
terminaledge!(edges, el, tiplabs, branch[1], idx, node)
else
internaledge!(edges, el, tiplabs, branch, idx, node)
end
end
end
end
function terminaledge!(edges, el, tiplabs, s, idx, node)
edges[idx[1],1] = node
edges[idx[1],2] = idx[2]
l = parse_brlen(s)
el[idx[1]] = l
tiplab = String(split(s, ':')[1])
push!(tiplabs, tiplab)
idx[1] += 1
idx[2] += 1
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1132 | export readtree
@doc raw"""
readtree("/path/to/phylo.tre")
reads a NEXUS or Newick file using RCall and the R-package `ape`
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
display(phy)
```
"""
function readtree(treefile::String)
s = readuntil(treefile, "(")
isnexus = contains(s, "#NEXUS")
RCall.@rput isnexus
RCall.@rput treefile
RCall.R"""
library(ape)
if(isnexus){
phy <- read.nexus(treefile)
}else{
phy <- read.tree(treefile)
}
nde <- node.depth.edgelength(phy)
node_depths <- max(nde) - nde
phy$node_depths <- node_depths
phy$branching_times <- branching.times(phy)
phy$tip_label <- phy$tip.label
po <- postorder(phy)
phy$po <- po
class(phy) <- "list"
"""
RCall.@rget phy
if !(phy[:branching_times] isa Vector)
phy[:branching_times] = Float64[phy[:branching_times]]
end
r = phylo(
phy[:edge],
phy[:edge_length],
phy[:Nnode],
phy[:tip_label],
phy[:node_depths],
phy[:branching_times],
phy[:po]
)
return(r)
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 3214 | export writenewick
export newick
## write a newick file
@doc raw"""
writenewick(filename, data, rates)
writes a newick file with the rate values as comments
Example:
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
λ = [0.1, 0.2, 0.3, 0.4, 0.20]
μ = [0.05, 0.15, 0.05, 0.15, 0.25]
η = 1 / tree_length
rates = birth_death_shift(model, primates)
writenewick("/tmp/newick.tre", primates, rates)
```
"""
function writenewick(filename::String, data::SSEdata, rates::DataFrames.DataFrame)
newick_string = newick(data, rates)
open(filename, "w") do io
write(io, newick_string)
write(io, "\n")
end
end
function node_data(rates::DataFrames.DataFrame)
DataFrames.sort!(rates, :node)
rates_plain = DataFrames.select(rates, DataFrames.Not(:node))
res = []
for row in eachrow(rates_plain)
entries = String[]
for (i, name) in enumerate(names(row))
e = name * "=" * string(row[i])
append!(entries, [e])
end
nd = String[]
append!(nd, ["[&"])
for (i, entry) in enumerate(entries)
if i > 1
append!(nd, [","])
end
append!(nd, [entry])
end
append!(nd, ["]"])
append!(res, [*(nd...)])
end
return(res)
end
## create a newick string from the data object
## translated from R-package treeio: https://github.com/YuLab-SMU/treeio/blob/master/R/write-beast.R
function newick(data::SSEdata, rates::DataFrames.DataFrame)
ancestors = make_ancestors(data)
nd = node_data(rates)
ind1 = [ancestors[i] for i in 1:ntip(data)]
ind2 = [ancestors[i] for i in (ntip(data)+2):(nnode(data))]
ind = vcat(ind1, 0, ind2)
kids = make_descendants_nodes(data)
root = getRoot(data.edges)
desc = kids[root]
s = []
append!(s, "(")
n = ntip(data)
for j in desc
if j > n
addinternal!(s, kids, nd, data, ind, j)
else
addterminal!(s, data, nd, ind[j])
end
if j != desc[length(desc)]
append!(s, ",")
end
end
append!(s, "):0.0;")
newick = *(s...)
return(newick)
end
function addinternal!(s, kids, nd, data, ind, i)
append!(s, "(")
desc = kids[i]
for j in desc
if j in data.edges[:,1]
addinternal!(s, kids, nd, data, ind, j)
else
addterminal!(s, data, nd, ind[j])
end
if j != desc[length(desc)]
append!(s, ",")
end
end
append!(s, ")")
append!(s, nd[i])
append!(s, ":")
append!(s, string(data.branch_lengths[ind[i]]))
end
function addterminal!(s, data, nd, i)
ii = data.edges[i,2]
tl = data.tiplab[ii]
append!(s, tl)
append!(s, nd[ii])
append!(s, ":")
append!(s, string(data.branch_lengths[i]))
end
function nnode(data)
return (size(data.edges)[1]+1)
end
function ntip(data)
return(length(data.tiplab))
end
function getRoot(edges)
descendants = Set(edges[:,2])
for node in edges[:,1]
if node ∉ descendants
return(node)
end
end
throw("root not found")
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1453 | export plot_preorder!
export treeplot
function plot_preorder!(
h::Array{Float64,2},
t::Float64,
i::Vector{Int64},
r::Vector{Float64},
data::SSEdata,
edge_index::Int64
)
node_index = data.edges[edge_index,2]
bl = data.branch_lengths[edge_index]
h[edge_index,1:2] = [t, t + bl]
# if is internal
if node_index > data.Nnode+1
left, right = findall(data.edges[:,1] .== node_index)
plot_preorder!(h, t+bl, i, r, data, left)
plot_preorder!(h, t+bl, i, r, data, right)
h[edge_index,3] = (h[left,3] + h[right,3]) / 2
# if is tip
else
h[edge_index,3] = i[1]
i[1] += 1
end
h[edge_index,4] = r[edge_index]
end
export coordinates
function coordinates(
data::SSEdata
)
r = zeros(size(data.edges)[1])
coordinates(data, r)
end
function coordinates(
data::SSEdata,
r::Vector{Float64}
)
n_species = length(data.tiplab)
root_index = n_species+1
left, right = findall(data.edges[:,1] .== root_index)
n_edges = length(data.branch_lengths)
h = zeros(n_edges, 4)
i = [1]
t = 0.0
plot_preorder!(h, t, i, r, data, left)
plot_preorder!(h, t, i, r, data, right)
return(h)
end
## create new function
## the method is empty, but we need it initialized here,
## so that the plot extension module can create new methods in Pesto.treeplot()
function treeplot()
end
function treeplot!()
end | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 1070 | import RCall: sexp, protect, unprotect, setclass!, RClass, sexpclass
RCall.sexpclass(::SSEdata) = RClass{:phylo}
function sexp(::Type{RClass{:phylo}}, f::SSEdata)
phy = protect(sexp(Dict(
"edge" => f.edges,
"Nnode" => f.Nnode,
"tip.label" => f.tiplab,
"edge.length" => f.branch_lengths
)))
setclass!(phy, sexp("phylo"))
unprotect(1)
phy
end
# function sexp(::Type{RClass{:treedata}}, f::SSEresult)
# td = protect(sexp(
# Dict(
# "phy" => Dict(
# "edge" => f.phy["edge"],
# "Nnode" => f.phy["Nnode"],
# "tip.label" => f.phy["tip.label"],
# "edge.length" => f.phy["edge.length"]
# ),
# "data" => f.rates
# )))
# #setclass!(td, sexp("treedata"))
# RCall.R"""
# td <- new("treedata", data = )
# """
# unprotect(1)
# # x = rcopy(reval("""
# #setClass("Foo", representation(x = "numeric"))
# # foo <- new("Foo", x = 20)
# # """))
# td
# end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | code | 99 | using Pesto
using Test
@testset "Pesto.jl" begin
# Write your tests here.
@test true
end
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 1920 | ## Pesto.jl: Phylogenetic Estimation of Shifts in the Tempo of Origination
[](https://kopperud.github.io/Pesto.jl/stable)
[](https://kopperud.github.io/Pesto.jl/dev)
The program can fit state-dependent speciation and extinction (SSE, Maddison et al 2007) models on reconstructed phylogenetic trees. We use these models to make inferences about when, and on which branches, there were shifts in the tempo of the process of diversification. The method is equivalent to the one presented by Höhna et al (2019), however it is much faster. The runtime of Pesto scales linearly with the number of tips in the phylogeny, and can be run on huge phylogenies (> 30k taxa) without much trouble on a standard laptop computer.
## Installation
```julia
import Pkg
Pkg.add("Pesto")
```
## Example rate analysis
```julia
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635 ## taxon sampling fraction
primates = SSEdata(phy, ρ)
model, rates = pesto(primates)
```
The output if plotted using the `ggtree` R-package will look something like the following figure. In the primates phylogeny, it is clear that there was one large shift in speciation rate in the branch that led to the Old World Monkeys clade.

See the website at [https://kopperud.github.io/Pesto.jl/dev](https://kopperud.github.io/Pesto.jl/dev) for the documentation, and for an explanation of how the model is set up.
## References
* Maddison, W. P., Midford, P. E., & Otto, S. P. (2007). Estimating a binary character's effect on speciation and extinction. Systematic biology, 56(5), 701-710.
* Höhna, S., Freyman, W. A., Nolen, Z., Huelsenbeck, J. P., May, M. R., & Moore, B. R. (2019). A Bayesian approach for estimating branch-specific speciation and extinction rates. BioRxiv, 555805.
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 840 | # Functions
List of available functions
```@docs
birth_death_shift(model, data)
```
```@docs
Econstant(t, λ, µ, ρ)
```
```@docs
ψ(t, λ, µ, ρ)
```
```@docs
lp(λ::Vector{Float64}, μ::Vector{Float64}, data::SSEdata)
```
```@docs
estimate_constant_bdp(data::SSEdata)
```
```@docs
optimize_eta(λ::Vector{Float64}, µ::Vector{Float64}, data::SSEdata)
```
```@docs
make_descendants(data::SSEdata)
```
```@docs
make_ancestors(data::SSEdata)
```
```@docs
lrange(from::Float64, to::Float64, length::Int64)
```
```@docs
allpairwise(λ, µ)
```
```@docs
writenewick(fpath::String, data::SSEdata, rates::DataFrames.DataFrame)
```
```@docs
make_SSEdata(phy::Pesto.phylo, ρ::Float64)
```
```@docs
make_descendants_nodes(data::SSEdata)
```
```@docs
readtree(path::String)
```
```@docs
postorder_nosave(model::SSEconstant, data::SSEdata, E)
``` | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 4637 | ## Background
Pesto is an acronym for "Phylogenetic Estimation of Shifts in the Tempo of Origination".
Broadly speaking, it is a method for detecting shifts in the process of diversification that led to the biodiversity present today.
In order to study diversification, we are interested in two events: speciation and extinction events.
Since these events are difficult to observe, we use a simple model, the birth-death process, to model what happened in the past.
On this page, there is some background material that introduces the mathematical models that we use.
## The birth-death model
Under the standard birth-death model, the tempo at which species speciate and go extinct are controlled by two parameters (Nee et al. 1994):
* The speciation rate ($\lambda$)
* The extinction rate ($\mu$)
In its simplest model, the rates of the birth-death process are the same across different lineages in the tree, and across time.
By simulating under the birth-death process, and pruning the extinct lineages from the tree, one gets a **reconstructed phylogenetic tree** as a result (i.e. it is ultrametric, all tips end at the same time point).
## The birth-death-shift model
The question we are interested in, is whether the process of diversification changed throughout the phylogenetic tree.
In other words, was there a shift or not, and if so, how large was the shift?
To do so, we are employing a variant of the state-dependent birth-death model (first presented by Maddison et al. 2007).
This is also called the birth-death-shift model, or the lineage-specific birth-death model (Höhna et al. 2019).
The birth-death-shift model has three parameters:
* The state-dependent speciation rate ($\lambda_i$)
* The state-dependent extinction rate ($\mu_i$)
* The common shift rate ($\eta$).
```@raw html
<center><img src="assets/bdshift.png" alt="bdshift" width="300" height="300"></center>
```
When a shift event occurs, the speciation and extinction rate shifts from the previous state (say $\lambda_1,\mu_1$) to a new state with different rates ($\lambda_2,\mu_2$). A rate shift to any other rate category occurs with rate $\eta$, and a rate shift from state $i$ to a specific other state $j$ occurs with rate $\eta/(K-1)$. The figure above depicts a three-state model. However, depending on how the model is set up, there can by any $K$ number of states.
## The likelihood
The probability of observing the tree is the same as for the binary-state speciation and extinction model (BiSSE, Maddison et al. 2007), and the multi-state speciation and extinction model (MuSSE, FitzJohn 2012). We first calculate the probability of going extinct before the present, if a lineage was alive at some age `t` in the past:
```math
\frac{dE_{i}(t)}{dt} = \mu_i - (\lambda_i + \mu_i + \eta) E_{i}(t) + \lambda_i E_{i}(t)^2 + \frac{\eta}{K-1} \sum_{j \neq i}^K E_{j}(t)
```
The initial state for $E_i(t)$ is equal to $1-\rho$ for all states, where $\rho$ is the taxon sampling fraction (we assume uniform taxon sampling probability).
Next, we calculate the probability that a lineage alive at age `t` was observed in the reconstructed tree:
```math
\frac{dD_{M,i}(t)}{dt} = - (\lambda_i + \mu_i + \eta) D_{M,i}(t) + 2 \lambda_i D_{M,i}(t) E_i(t) + \frac{\eta}{K-1} \sum_{j \neq i}^K D_{M,j}(t)
```
We solve $D_{M,i}(t)$ for each branch `M` and each state `i`, in a postorder tree traversal.
At the tips, the initial state is $D_{M,i}(t)=\rho$ for all states.
At the branching events, the initial state for the parent branch `P` is assigned $D_{P,i}(t) := \lambda_i \times D_{L,i}(t) \times D_{R,i}(t)$ where `L` and `R` are the left and right descendant branches.
Continuing this toward the root of the tree, we calculate the likelihood as follows:
```math
L = \frac{1}{K}\sum_{i=1}^K \Big [ \frac{D_{L,i}(t) \times D_{R,i}(t)}{(1 - E_k(t))^2} \Big ]
```
where `t` is the age of the most recent common ancestor.
## References
* Nee, S., May, R. M., & Harvey, P. H. (1994). The reconstructed evolutionary process. Philosophical Transactions of the Royal Society of London. Series B: Biological Sciences, 344(1309), 305-311.
* Maddison, W. P., Midford, P. E., & Otto, S. P. (2007). Estimating a binary character's effect on speciation and extinction. Systematic biology, 56(5), 701-710.
* FitzJohn, R. G. (2012). Diversitree: comparative phylogenetic analyses of diversification in R. Methods in Ecology and Evolution, 3(6), 1084-1092.
* Höhna, S., Freyman, W. A., Nolen, Z., Huelsenbeck, J. P., May, M. R., & Moore, B. R. (2019). A Bayesian approach for estimating branch-specific speciation and extinction rates. BioRxiv, 555805.
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 2132 | # Installation
## Installing Julia
Julia is a high-level programming language similar to R, Matlab or Python.
However, it is also a high-performance language.
Julia programs can be similarly fast to those written in compiled languages like [C/C++/Fortran](https://julialang.org/benchmarks/).
To install Julia, follow the instructions on the [official website](https://julialang.org/downloads/). If you are new to Julia, there are several [community-made tutorials](https://julialang.org/learning/tutorials/). There is also an extensive [official manual](https://docs.julialang.org/en/v1/manual/getting-started/), useful for looking up specifics.
## Editors
There are several options for how to work with Julia:
* **(Recommended)** [Visual Studio Code](https://code.visualstudio.com) is an integrated developer environment (IDE), which has both a file editor and a console for entering commands. If you have used RStudio before, Visual Studio Code will be very similar.
* You can also run Julia in a [Jupyter](http://jupyter.org) notebook, as is often done with Python projects.
* Alternatively, you can edit script files with your editor of choice (for example notepad/vim), and either copy-paste lines of copy into the Julia console or use the `include("script.jl")` command.
## Installing Pesto.jl (stable)
To install the latest release of `Pesto.jl`, enter this in Julia:
```julia
import Pkg
Pkg.add("Pesto")
```
The package manager (`Pkg`) will automatically resolve and install any necessary dependencies.
## Installing Pesto.jl (dev)
If you want to use the developmental version of `Pesto.jl`, you can install it using a git repository URL. This can be done as follows:
```julia
import Pkg
Pkg.add(url="https://github.com/kopperud/Pesto.jl")
```
## Loading Pesto.jl
Pesto can be loaded like so:
```julia
using Pesto
```
Since Julia is a JIT (just-in-time) compiled language, any code must be compiled before it can be run. To save some time, there is also a pre-compiling step the first time a module is loaded. This means we have to wait a short while. Once the module is finished pre-compiling, you are now ready! | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 4386 | # [Significant shifts](@id bayesfactor)
In this vignette we will assess the support for that a branch has at least one rate shift, versus the null hypothesis of no shifts. We will test this using Bayes factors, i.e. the relative support for one or more versus no shifts.
### Tree file
First, we load the necessary modules and read in the tree file.
```@setup bayes
using Pesto
ρ = 0.635
include("../../src/primates.jl")
```
```julia bayes
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
```
### Model setup
Let's use the standard `Pesto` analysis with empirical Bayes for the speciation and extinction rates, and maximum-likelihood shift rate.
```@example bayes
model, rates = pesto(primates; n = 6)
nothing # hide
```
## Bayes factors
A Bayes factor is a summary of the evidence in favor of one hypothesis, as opposed to a different hypothesis (Kass & Raftery 1995). In `Pesto`, we use Bayes factors to assess the evidence for the hypothesis that there was at least one diversification rate shift, versus the hypothesis that there were no rate shifts, per branch. The standard equation for a Bayes factor is as follows (see also Shi & Rabosky 2015)
```math
\text{Bayes factor} = \frac{\frac{P_M(\geq \text{1 shifts})}{\pi_M(\geq \text{1 shifts})}}{\frac{P_M(0 \text{ shifts})}{\pi_M(0 \text{ shifts})}}
```
where $P_M(\geq \text{1 shifts})$ is the posterior probability of at least one shift, $P_M(\geq \text{0 shifts})$ is the posterior probability of no shifts, $\pi_M(\geq \text{1 shifts})$ is the prior probability of at least one shift, and $\pi_M(\geq \text{0 shifts})$ is the prior probability of no shifts.
## Plotting Bayes factors
We can for example plot the Bayes factor directly on the tree. Since the Bayes factor can vary considerably, we instead plot the log-transformed Bayes factors, which are more concentrated around 0. A log Bayes factor with a value of 0 means that the prior and posterior support for the shift hypotheses are equal. A value much larger than 0 means that there is support for one or more shifts. A value of less than 0 means that there is more support for 0 rate shifts.
```@example bayes
using Makie, CairoMakie
min, max = extrema(rates[1:end-1,"shift_bf_log"])
cmap = Makie.cgrad([:gray, :black, :purple])
treeplot(primates, rates, "shift_bf_log"; cmap = cmap)
```
## Plotting supported branches
Alternatively, we can assess which branches had a strong support for there being at least one shift. We first set the significance level at 10, meaning strong support (Kass & Raftery 1995). Next, we can compute which branches has strong support for at least one diversification rate shift, vs the null hypothesis of zero shifts.
| Bayes factor | log10 Bayes factor | Level of support |
| -------------- | ------------------ | -------------------- |
| 0 to 3.2 | 0 to 0.5 | Not worth mentioning |
| 3.2 to 10 | 0.5 to 1 | Substantial |
| 10 to 100 | 1 to 2 | Strong |
| >100 | >2 | Decisive |
If we inspect the data frame with the branch-specific outputs, we can see specifically which branches has strong support.
```@example bayes
using DataFrames
cutoff = 10
filter(:shift_bf => x -> x > cutoff, rates)
```
The result is that one branch had strong support for there being at least one shift on the branch. The branch that led to the Old World Monkeys has decisive support (Bayes factor of 30.7).
In order to plot these, we can create a dummy variable and use it in the tree plotting function.
```@example bayes
rates[!,:strong_support] = Float64.(rates[!,:shift_bf] .> cutoff)
cmap = Makie.cgrad([:black, :red])
treeplot(primates, rates, "strong_support"; cmap = cmap)
```
Both the phylogeny plot, as well as the filtered data frame, only show one branch that had strong support (with a Bayes factor of >10) for one or more shifts. The number of estimated diversification rate shifts ($\hat{N}$) on this branch is also significantly larger than zero, almost at 1 number of shifts.
### References
* Kass, R. E., & Raftery, A. E. (1995). Bayes factors. Journal of the american statistical association, 90(430), 773-795.
* Shi, J. J., & Rabosky, D. L. (2015). Speciation dynamics during the global radiation of extant bats. Evolution, 69(6), 1528-1545.
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 2825 | # [Extended rate analysis](@id extended)
In this vignette, we will do the same as in the simple analysis, but we explain how the model is set up in more detail.
## Tree file
First, we load the necessary modules and read in the tree file.
```@setup extended
using Pesto
ρ = 0.635
include("../../src/primates.jl")
```
```julia extended
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
```
## Model choice
Next, we set up the SSE model, including its dimensionality and hyperparameters. For this model, we will draw the speciation rate (λ) and extinction rate (µ) from LogNormal distributions. We pick the median of the LogNormal distributions such that they correspond to the maximum-likelihood estimates of the constant-rate birth-death model. This is also called the "empirical Bayes" approach, where we use the data twice. We pick the log-sd as `H = 0.587`, which corresponds to a LogNormal distribution whose 2.5%-97.5% quantile spans one order of magnitude.
```@example extended
λml, μml = estimate_constant_bdp(primates)
H = 0.587
n = 6
using Distributions
dλ = LogNormal(log(λml), H)
dμ = LogNormal(log(µml), H)
λquantiles = make_quantiles(dλ, n)
µquantiles = make_quantiles(dμ, n)
λ, μ = allpairwise(λquantiles, µquantiles)
nothing # hide
```
The scatter plot of `λ` on the x-axis, and `µ` on the y-axis looks like the figure below (blue dots), with the quantiles of the LogNormal distributions on the margin.

Next, we estimate the rate shift parameter η under the SSE model, conditional on λ and µ.
```@example extended
η = optimize_eta(λ, µ, primates)
```
The units of $\eta$ are number of rate shift events per lineage per time. The product of the tree length (the sum of all branch lengths) times $\eta$ will give us the number of expected rate shifts under the prior:
```@example extended
sum(primates.branch_lengths) * η
```
This allows us to set up the SSE model object:
```@example extended
model = SSEconstant(λ, μ, η)
nothing # hide
```
With the model and data objects we can for example calculate the log likelihood
```@example extended
logL_root(model, primates)
```
## Branch rates and shifts
Or we can compute both the postorder and preorder pass, and get the expected speciation and extinction rates per branch. The result is a data frame object, and we print the first five rows:
```@example extended
rates = birth_death_shift(model, primates)
rates[1:5,:]
```
## Tree plots
As before, we can use `Makie` to make some quick tree plots. Here we are plotting the average net-diversification rate per branch, with a two-color scheme going from black to green.
```@example extended
using Makie, CairoMakie
cmap = Makie.cgrad([:black, :green])
treeplot(primates, rates, "mean_netdiv"; cmap = cmap)
``` | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 4248 | # [Number of rate shifts](@id shifts)
In this vignette we go a little more in-depth and explain how the number of rate shifts ($\hat{N}$) is estimated.
### Tree file
First, we load the necessary modules and read in the tree file.
```@setup shift
using Pesto
using CairoMakie
ρ = 0.635
include("../../src/primates.jl")
```
```julia shift
using Pesto
using CairoMakie
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
```
### Model setup
In this vignette, we pick the rate values by hand, and we don't use so many, in order to illustrate how the calculations work.
```@example shift
tree_length = sum(primates.branch_lengths)
λ = [0.1, 0.2, 0.3, 0.4, 0.20]
μ = [0.05, 0.15, 0.05, 0.15, 0.25]
η = 1 / tree_length
model = SSEconstant(λ, μ, η)
nothing # hide
```
## Number of total rate shifts
If we want to see the total number of rate shifts per branch, we can use the function `birth_death_shift` for the standard inference:
```@example shift
rates = birth_death_shift(model, primates)
rates[1:5,:]
```
We can use `Makie` to plot the number of accumulated diversification rate shifts on the branches of the phylogeny
```@example shift
using Makie, CairoMakie
cmap = Makie.cgrad([:black, :red])
treeplot(primates, rates, "nshift"; cmap = cmap)
```
## Number of rate shifts
The total number of rate shifts, as shown above, is a reduction or simplification of the number of rate shifts that can be inferred by `Pesto`.
The number of rate shifts from state `j` to state `i` accumulated over the branch length (from old to young) is described by the following differential equation
```math
\frac{d\hat{N}_{M,ij}}{dt} = S_{M,j}(t) \frac{-\eta}{K-1} \frac{D_{M,i}(t)}{D_{M,j}(t)} \text{ if } j \neq i
```
with initial condition $\hat{N}_{ij}(t_0) = 0$. In Pesto, we would compute this using
```@example shift
nshift = state_shifts(model, primates; ape_order = false)
nothing; # hide
```
The object returned `nshift` is a three-dimensional array. The first dimension corresponds to the branch index (what was `M`). The second dimension represents the arrival state (`i`), and the third dimension represents the departure state (`j`). If `ape_order = true`, then the first dimension is reordered such that the indices correspond to the node indices in the tree.
If we sum over second and third dimension, we get the number of rate shifts per branch:
```@example shift
sum(nshift, dims = 2:3)[:,1,1]
```
If instead we sum over the first dimension, we get a breakdown over which rate transitions were more frequent:
```@example shift
Nmatrix = sum(nshift, dims = 1)[1,:,:]
```
In this case, the most frequent rate shift was from state `2` to state `4`, with $\hat{N} = 0.95$ number of rate shifts. Going from state `2` to state `4` under this model means an increase of $0.4-0.2=0.2$ in speciation rate units. This can for example be visualized using a histogram:
```@example shift
mids, bins = makebins(Nmatrix, model, -0.35, 0.35; nbins = 7)
f = Makie.Figure(resolution = (500, 300))
ax = Makie.Axis(f[1,1],
ylabel = "Number of rate shifts",
xlabel = "Change in speciation rate (λi - λj)",
xticks = round.(mids; digits = 2),
xgridvisible = false,
ygridvisible = false)
Makie.barplot!(ax, mids, bins[:,1], color = :maroon)
f
```
Most of the rate shift events represent a shift from a smaller to a larger speciation rate (i.e. $\lambda_i - \lambda_j > 0$), however some rate shifts are in the other direction ($\lambda_i - \lambda_j < 0$). There are also a few rate shift events where the speciation rate does not change ($\lambda_i - \lambda_j = 0$). In these events, it is the extinction rate that changes, and not the speciation rate. If we are interested in the question, "what is the overall magnitude of shifts?", we can calculate the mean shift magnitude (weighted by their frequencies):
```math
\frac{1}{\sum_{i,j}\hat{N}_{ij}} \sum_{i,j} \hat{N}_{ij} (\lambda_i - \lambda_j).
```
In Julia we can calculate it like so
```@example shift
shifts_weighted = Nmatrix .* (λ .- λ')
mean_magnitude = sum(shifts_weighted) / sum(Nmatrix)
```
meaning that the overall shift magnitude for the primates tree under this model was an increase of 0.098 speciation rate units. | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 2290 | # [Simple analysis](@id simple)
Here is an example of an analysis of branch-specific rates under the birth-death-shift model.
## Tree file
First, we load the necessary modules and read in the tree file.
```@setup simple
using Pesto
ρ = 0.635
include("../../src/primates.jl")
```
```julia simple
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
```
## Analysis
A simple analysis can be done like so:
```@example simple
model, rates = pesto(primates)
nothing # hide
```
To see how this analysis is set up, see the next section ([Extended analysis](@ref extended)).
## Tree plots
If we want to plot the results immediately, we will need to use the `Makie` dependency. `Makie` has several backends, but we will use `CairoMakie` for now, as it is designed to plot high-quality 2-dimensional figures.
```@example simple
using Makie, CairoMakie
treeplot(primates, rates)
```
If you want to instead plot the number of shifts, and for example use a different color gradient, you can enter the following
```@example simple
cmap = Makie.cgrad([:black, :red])
f = treeplot(primates, rates, "nshift"; cmap = cmap)
```
The figure can be saved using the following code
```julia
Makie.save("path/to/figure.pdf", f)
```
See section [Plot with ggtree](@ref ggtree) for more instructions on how to make more customized and publication-quality figures.
## Save to file
A summary of the results of the analysis is stored in the `rates` object, which is a data frame. We can print the first five rows to get an idea of it:
```@example simple
rates[1:5,:]
```
Each row in the data frame represents a branch in the tree, indexed by the edge index `edge`.
Alternatively, you can index the rows using the `node` index.
It is possible to save the data-frame as is to a file, using `using CSV; CSV.write("rates.csv", rates)`. We can also represent the output as an extended newick string:
```@example simple
newick(primates, rates)
```
If we want to save the newick string to a file, we can use the `writenewick` function
```julia
writenewick("primates_analysis.tre", primates, rates)
```
This tree file can be loaded in other programs such as `R` and can be plotted using standard packages like `ape` and `ggtree` (see section [Plot with ggtree](@ref ggtree)). | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 2041 | # [Tip rates](@id tiprates)
In this vignette we will calculate the diversification rates at the tips of the phylogeny.
### Tree file
First, we load the necessary modules and read in the tree file.
```@setup tips
using Pesto
using CairoMakie
ρ = 0.635
include("../../src/primates.jl")
```
```julia tips
using Pesto
using CairoMakie
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
```
### Model setup
Let's use the standard `Pesto.jl` model again, with LogNormal quantiles and maximum-likelihood shift rate ($\eta$).
```@example tips
model, rates = pesto(primates)
nothing # hide
```
## Tip rates
In order to calculate the values of the rates at the present, we need to evalute the following expressions
```math
\begin{aligned}
\lambda_\text{tip} &= \mathbf{S}(t=0)^\top \boldsymbol{\lambda}\\
\mu_\text{tip} &= \mathbf{S}(t=0)^\top \boldsymbol{\mu}\\
r_\text{tip} &= \mathbf{S}(t=0)^\top (\boldsymbol{\lambda}-\boldsymbol{\mu})\\
\epsilon_\text{tip} &= \mathbf{S}(t=0)^\top (\boldsymbol{\mu} \oslash \boldsymbol{\lambda}),
\end{aligned}
```
where $\mathbf{S}(t=0)$ are the posterior state probabilities at time $t=0$, i.e. the present. We can compute the tip rates conveniently with the `tip_rates()` function, which gives a `DataFrame` as a result.
```@example tips
df = tip_rates(model, primates)
df[1:5,:]
```
## Distribution
If we plot the tip rates as a histogram, we can see that the primates tips are bimodally distributed. The high-rate species are the Old World Monkeys, and the low-rate species is everything else in the tree.
```@example tips
f = Figure(resolution = (300, 600))
axs = []
colors = [:blue, :red, :green, :orange]
for (i, rate) in enumerate([:lambda, :mu, :netdiv, :relext])
ax = Axis(f[i,1],
xgridvisible = false,
ygridvisible = false,
xlabel = string("Tip rate (", rate, ")"),
ylabel = "Frequency")
hist!(ax, df[!,rate], bins=30, color = colors[i])
end
rowgap!(f.layout, 5.0)
f
```
| Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.1.6 | 98e186b38d34174a6d8fc45f6906d6c007c7a4ad | docs | 1936 | # [Plot with ggtree](@id ggtree)
Here is an example of how the results can be plotted using `ggtree` in `R`.
## Tree file
First, we load the necessary modules, read in the tree file, and do a quick analysis.
```@setup simple
using Pesto
ρ = 0.635
include("../../src/primates.jl")
model, rates = pesto(primates)
```
```julia simple
using Pesto
phy = readtree(Pesto.path("primates.tre"))
ρ = 0.635
primates = SSEdata(phy, ρ)
model, rates = pesto(primates)
nothing # hide
```
## Tree plots in Makie
If we want to plot the results immediately, we will need to use the `Makie` dependency. `Makie` has several backends, but we will use `CairoMakie` for now, as it is designed to plot high-quality 2-dimensional figures.
```@example simple
using Makie, CairoMakie
treeplot(primates, rates)
```
## Tree plots in ggtree
The plotting functionality include in `Pesto` is intended as an interactive tool, and its functionality is rather rudimentary. If you wish to make a publication-quality plot, we recommend instead to use a more established, well-tested and feature-rich library especially for this purpose. A good choice is to use the `ggtree` library in `R`, although other libraries could be used as well.
In `Pesto`, we provide functionality to save the results as a Newick string with metadata for each node. If we want to save the newick string to a file, we can use the `writenewick` function
```julia
writenewick("primates_analysis.tre", primates, rates)
```
This tree file can be loaded in other programs such as `R` and can be opened using standard packages like `ape` and `treeio` or any other software that can handle extended Newick trees.
```R
library(treeio)
phy <- treeio::read.beast.newick("primates_analysis.tre")
library(ggplot2)
library(ggtree)
p1 <- ggtree(td, aes(color = mean_lambda)) +
geom_tiplab(size=2) +
labs(color = "Mean speciation rate")
```
 | Pesto | https://github.com/kopperud/Pesto.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 222 | using Documenter
using EDF
makedocs(modules=[EDF],
sitename="EDF.jl",
authors="Beacon Biosignals, Inc.",
pages=["API" => "index.md"])
deploydocs(repo="github.com/beacon-biosignals/EDF.jl.git")
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 119 | module EDF
using BitIntegers, Dates, Printf
include("types.jl")
include("read.jl")
include("write.jl")
end # module
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 11408 | #####
##### parsing utilities
#####
function Base.tryparse(::Type{PatientID}, raw::AbstractString)
metadata = split(raw, ' '; keepempty=false)
length(metadata) == 4 || return nothing
code_raw, sex_raw, dob_raw, name_raw = metadata
length(sex_raw) == 1 || return nothing
code = edf_unknown(code_raw)
sex = edf_unknown(first, sex_raw)
dob = edf_unknown(parse_date, dob_raw)
dob === nothing && return nothing
name = edf_unknown(name_raw)
return PatientID(code, sex, dob, name)
end
function Base.tryparse(::Type{RecordingID}, raw::AbstractString)
startswith(raw, "Startdate") || return nothing
metadata = split(chop(raw; head=9, tail=0), ' '; keepempty=false)
length(metadata) == 4 || return nothing
start_raw, admin_raw, tech_raw, equip_raw = metadata
startdate = edf_unknown(parse_date, start_raw)
startdate === nothing && return nothing
admincode = edf_unknown(admin_raw)
technician = edf_unknown(tech_raw)
equipment = edf_unknown(equip_raw)
return RecordingID(startdate, admincode, technician, equipment)
end
# EDF does not specify what to do if the month or day is not in a valid range
# so if it is not we snap month or day to "01" and try again
function parse_header_date(date_str::AbstractString)
m = match(r"^(\d{2})\.(\d{2})\.(\d{2}) (\d{2})\.(\d{2})\.(\d{2})$", date_str)
if m === nothing
throw(ArgumentError("Malformed date string: expected 'dd.mm.yy HH.MM.SS', " *
"got '$date_str'"))
end
day, month, year, hour, minute, second = parse.(Int, m.captures)
if year <= 84
year += 2000
else
year += 1900
end
# FIXME: EDF "avoids" the Y2K problem by punting it to the year 2084, after which
# we ignore the above entirely and use `recording_id.startdate`. We could add a
# check here on `year(today())`, but that will be dead code for the next 60+ years.
month = clamp(month, 1, 12)
day = clamp(day, 1, daysinmonth(year, month))
hour = clamp(hour, 0, 23)
minute = clamp(minute, 0, 59)
second = clamp(second, 0, 59)
return DateTime(year, month, day, hour, minute, second)
end
parse_float(raw::AbstractString) = something(tryparse(Float32, raw), NaN32)
parse_date(raw::AbstractString) = tryparse(Date, raw, dateformat"d-u-y")
"""
edf_unknown([f,] field::String)
Check whether the given text is simply "X", which per the EDF+ specification means that the
value is unknown, not applicable, or must be made anonymous, and if so return `missing`,
otherwise return the result of applying `f` to the given text. If unspecified, `f` defaults
to `identity`.
"""
edf_unknown(f, field::AbstractString) = field == "X" ? missing : f(field)
edf_unknown(field::AbstractString) = edf_unknown(identity, field)
#####
##### header reading utilities
#####
function read_file_header(io::IO)
version = strip(String(Base.read(io, 8)))
patient_id_raw = strip(String(Base.read(io, 80)))
patient_id = something(tryparse(PatientID, patient_id_raw), String(patient_id_raw))
recording_id_raw = strip(String(Base.read(io, 80)))
recording_id = something(tryparse(RecordingID, recording_id_raw), String(recording_id_raw))
start_raw = Base.read(io, 8)
push!(start_raw, UInt8(' '))
append!(start_raw, Base.read(io, 8)) # Add the time
# Parsing the date per the given format will validate EDF+ item 2
start = parse_header_date(String(start_raw))
# NOTE: These 8 bytes are supposed to define the byte count of the header,
# which in reality is trivially computable from constants defined by the
# specification + directly available information in the header already. I'm
# not sure why the EDF standard requires that it be written out at all; AFAICT
# it only serves as a potential bug source for readers/writers that might write
# the incorrect value here. Since we don't actually use this value anywhere in
# our read/write process, we skip it here.
skip(io, 8)
reserved = String(Base.read(io, 44))
is_contiguous = !startswith(reserved, "EDF+D")
record_count = parse(Int, String(Base.read(io, 8)))
seconds_per_record = parse(Float64, String(Base.read(io, 8)))
signal_count = parse(Int, String(Base.read(io, 4)))
return FileHeader(version, patient_id, recording_id, start, is_contiguous,
record_count, seconds_per_record), signal_count
end
function read_signal_headers(io::IO, signal_count)
fields = [String(Base.read(io, size)) for signal in 1:signal_count, (_, size) in SIGNAL_HEADER_FIELDS]
signal_headers = [SignalHeader(strip(fields[i, 1]), strip(fields[i, 2]),
strip(fields[i, 3]), parse_float(fields[i, 4]),
parse_float(fields[i, 5]), parse_float(fields[i, 6]),
parse_float(fields[i, 7]), strip(fields[i, 8]),
parse(Int16, fields[i, 9])) for i in 1:size(fields, 1)]
skip(io, 32 * signal_count) # reserved
return signal_headers
end
#####
##### signal reading utilities
#####
# NOTE: The fast-path in `Base.read!` that uses `unsafe_read` will read too much when
# the element type is `Int24`, since it will try to include the alignment padding for
# each value read and will thus read too much. To get around this, we'll fall back to
# a naive implementation when the size of the element type doesn't match its aligned
# size. (See also `write_from`)
function read_to!(io::IO, x::AbstractArray{T}) where {T}
if sizeof(T) == Base.aligned_sizeof(T)
Base.read!(io, x)
else
@inbounds for i in eachindex(x)
x[i] = Base.read(io, T)
end
end
return x
end
function read_signals!(file::File)
for record_index in 1:file.header.record_count, signal in file.signals
read_signal_record!(file, signal, record_index)
end
return nothing
end
function read_signal_record!(file::File, signal::Signal, record_index::Int)
if isempty(signal.samples)
resize!(signal.samples, file.header.record_count * signal.header.samples_per_record)
end
record_start = 1 + (record_index - 1) * signal.header.samples_per_record
record_stop = record_index * signal.header.samples_per_record
read_to!(file.io, view(signal.samples, record_start:record_stop))
return nothing
end
function read_signal_record!(file::File, signal::AnnotationsSignal, record_index::Int)
bytes_per_sample = sizeof(sample_type(file))
io_for_record = IOBuffer(Base.read(file.io, bytes_per_sample * signal.samples_per_record))
tals_for_record = TimestampedAnnotationList[]
while !eof(io_for_record) && Base.peek(io_for_record) != 0x00
push!(tals_for_record, read_tal(io_for_record))
end
push!(signal.records, tals_for_record)
return nothing
end
function read_tal(io::IO)
sign = read_tal_onset_sign(io)
bytes = readuntil(io, 0x14)
timestamp = split(String(bytes), '\x15'; keepempty=false)
onset_in_seconds = flipsign(parse(Float64, timestamp[1]), sign)
duration_in_seconds = length(timestamp) == 2 ? parse(Float64, timestamp[2]) : nothing
annotations = convert(Vector{String}, split(String(readuntil(io, 0x00)), '\x14'; keepempty=true))
isempty(last(annotations)) && pop!(annotations)
return TimestampedAnnotationList(onset_in_seconds, duration_in_seconds, annotations)
end
function read_tal_onset_sign(io::IO)
sign = Base.read(io, UInt8)
sign === 0x2b && return 1
sign === 0x2d && return -1
error("starting byte of a TAL must be '+' or '-'; found $sign")
end
#####
##### API functions
#####
"""
EDF.File(io::IO)
Return an `EDF.File` instance that wraps the given `io`, as well as EDF-formatted
file, signal, and annotation headers that are read from `io`. This constructor
only reads headers, not the subsequent sample data; to read the subsequent sample
data from `io` into the returned `EDF.File`, call `EDF.read!(file)`.
"""
function File(io::IO)
file_header, signal_count = read_file_header(io)
T = sample_type(file_header)
signals = Union{Signal{T},AnnotationsSignal}[]
for header in read_signal_headers(io, signal_count)
if header.label in ANNOTATIONS_SIGNAL_LABEL
push!(signals, AnnotationsSignal(header))
else
push!(signals, Signal{T}(header, T[]))
end
end
file_size = _size(io)
if file_size > 0
bytes_left = file_size - position(io)
total_expected_samples = sum(signals) do signal
if signal isa Signal
return signal.header.samples_per_record
else
return signal.samples_per_record
end
end
readable_records = div(div(bytes_left, sizeof(T)), total_expected_samples)
if file_header.record_count > readable_records
@warn("Number of data records in file header does not match file size. " *
"Skipping $(file_header.record_count - readable_records) truncated " *
"data record(s).")
file_header = FileHeader(file_header.version,
file_header.patient,
file_header.recording,
file_header.start,
file_header.is_contiguous,
readable_records,
file_header.seconds_per_record)
end
end
return File(io, file_header, signals)
end
_size(io::IOStream) = filesize(io)
_size(io::IOBuffer) = io.size
# NOTE: We're using -1 here as a type-stable way of denoting an unknown size.
# Also note that some `IO` types may have a specific method for `stat` which would be
# convenient except that it's difficult to determine since `stat` has an untyped method
# that won't work for `IO` types despite `applicable`/`hasmethod` thinking it applies.
# Instead, we'll check for the availability of seeking-related methods and try to use
# those to determine the size, returning -1 if the requisite methods don't apply.
function _size(io::IO)
applicable(position, io) || return -1
here = position(io)
applicable(seek, io, here) && applicable(seekend, io) || return -1
seekend(io)
nbytes = position(io)
seek(io, here)
return nbytes
end
"""
EDF.is_bdf(file)
Return `true` if `file` is a BDF (BioSemi Data Format) file, otherwise `false`.
"""
is_bdf(file::File) = is_bdf(file.header)
is_bdf(header::FileHeader) = header.version == "\xffBIOSEMI"
"""
EDF.sample_type(file::EDF.File{T})
Return the encoded type `T` of the samples stored in `file`.
"""
sample_type(file::File{T}) where {T} = T
sample_type(header::FileHeader) = is_bdf(header) ? BDF_SAMPLE_TYPE : EDF_SAMPLE_TYPE
"""
EDF.read!(file::File)
Read all EDF sample and annotation data from `file.io` into `file.signals` and
`file.annotations`, returning `file`. If `eof(file.io)`, return `file` unmodified.
"""
function read!(file::File)
isopen(file.io) && !eof(file.io) && read_signals!(file)
return file
end
"""
EDF.read(io::IO)
Return `EDF.read!(EDF.File(io))`.
See also: [`EDF.File`](@ref), [`EDF.read!`](@ref)
"""
read(io::IO) = read!(File(io))
"""
EDF.read(path)
Return `open(EDF.read, path)`.
"""
read(path) = open(read, path)
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 8666 | BitIntegers.@define_integers 24
const EDF_SAMPLE_TYPE = Int16
const BDF_SAMPLE_TYPE = Int24
const SUPPORTED_SAMPLE_TYPES = Union{EDF_SAMPLE_TYPE,BDF_SAMPLE_TYPE}
#####
##### `EDF.Signal`
#####
const SIGNAL_HEADER_FIELDS = [(:label, 16),
(:transducer_type, 80),
(:physical_dimension, 8),
(:physical_minimum, 8),
(:physical_maximum, 8),
(:digital_minimum, 8),
(:digital_maximum, 8),
(:prefilter, 80),
(:samples_per_record, 8)]
"""
EDF.SignalHeader
Type representing the header for a single EDF signal.
# Fields
* `label::String`: the signal's type/sensor label, see https://www.edfplus.info/specs/edftexts.html#label
* `transducer_type::String`: non-standardized transducer type information
* `physical_dimension::String`: see https://www.edfplus.info/specs/edftexts.html#physidim
* `physical_minimum::Float32`: physical minimum value of the signal
* `physical_maximum::Float32`: physical maximum value of the signal
* `digital_minimum::Float32`: minimum value of the signal that could occur in a data record
* `digital_maximum::Float32`: maximum value of the signal that could occur in a data record
* `prefilter::String`: non-standardized prefiltering information
* `samples_per_record::Int16`: number of samples in a data record (NOT overall)
"""
struct SignalHeader
label::String
transducer_type::String
physical_dimension::String
physical_minimum::Float32
physical_maximum::Float32
digital_minimum::Float32
digital_maximum::Float32
prefilter::String
samples_per_record::Int16
end
"""
EDF.Signal{T}
Type representing a single EDF signal with sample type `T`.
# Fields
* `header::SignalHeader`
* `samples::Vector{T}`
"""
struct Signal{T}
header::SignalHeader
samples::Vector{T}
end
Signal{T}(header::SignalHeader) where {T} = Signal(header, T[])
Signal(header::SignalHeader) = Signal{EDF_SAMPLE_TYPE}(header)
"""
EDF.decode(signal::Signal)
Return `signal.samples` decoded into the physical units specified by `signal.header`.
"""
function decode(signal::Signal)
dmin, dmax = signal.header.digital_minimum, signal.header.digital_maximum
pmin, pmax = signal.header.physical_minimum, signal.header.physical_maximum
return @. ((signal.samples - dmin) / (dmax - dmin)) * (pmax - pmin) + pmin
end
SignalHeader(signal::Signal) = signal.header
#####
##### `EDF.AnnotationsSignal`
#####
const ANNOTATIONS_SIGNAL_LABEL = ["EDF Annotations", "BDF Annotations"]
"""
EDF.TimestampedAnnotationList
A type representing a time-stamped annotations list (TAL).
Note that this type's constructor may attempt to round given `onset_in_seconds` and
`duration_in_seconds` arguments to their nearest representable values in accordance
with the EDF+ specification, which a) represents these values as ASCII, b) constrains
these values to an 8 character limit, and c) does not allow the use of scientific
notation for these fields.
See EDF+ specification for details.
# Fields
* `onset_in_seconds::Float64`: onset w.r.t. recording start time (may be negative)
* `duration_in_seconds::Union{Float64,Nothing}`: duration of this TAL
* `annotations::Vector{String}`: the annotations associated with this TAL
"""
struct TimestampedAnnotationList
onset_in_seconds::Float64
duration_in_seconds::Union{Float64,Nothing}
annotations::Vector{String}
function TimestampedAnnotationList(onset_in_seconds, duration_in_seconds, annotations)
onset_in_seconds = _nearest_representable_edf_time_value(onset_in_seconds)
duration_in_seconds = _nearest_representable_edf_time_value(duration_in_seconds)
return new(onset_in_seconds, duration_in_seconds, annotations)
end
end
_nearest_representable_edf_time_value(::Nothing) = nothing
function _nearest_representable_edf_time_value(x)
return round(x; digits=(8 - (ndigits(floor(Int, x)) + signbit(x) + isinteger(x))))
end
function Base.:(==)(a::TimestampedAnnotationList, b::TimestampedAnnotationList)
return a.onset_in_seconds == b.onset_in_seconds &&
a.duration_in_seconds == b.duration_in_seconds &&
a.annotations == b.annotations
end
"""
EDF.AnnotationsSignal
Type representing a single EDF Annotations signal.
# Fields
* `samples_per_record::Int16`
* `records::Vector{Vector{TimestampedAnnotationList}}`
"""
struct AnnotationsSignal
samples_per_record::Int16
records::Vector{Vector{TimestampedAnnotationList}}
end
function AnnotationsSignal(header::SignalHeader)
records = Vector{TimestampedAnnotationList}[]
return AnnotationsSignal(header.samples_per_record, records)
end
"""
AnnotationsSignal(records::Vector{Vector{TimestampedAnnotationList}})
Return `AnnotationsSignal(samples_per_record, records)` where `samples_per_record`
is the minimum value required to write out each record completely (i.e. the maximum
required `samples_per_record` across all records).
"""
function AnnotationsSignal(records::Vector{Vector{TimestampedAnnotationList}})
# Actually writing out the TALs in order to calculate the minimum necessary
# `samples_per_record` is super wasteful from a performance perspective, but
# is at least robust/self-consistent with the rest of the package. In theory
# we shouldn't even store this value, and rather always compute it on write,
# but cleanly refactoring the package to do this would be a more involved
# change than is meritted at the moment (since normal signals are already
# treated similarly, i.e. the `SignalHeader` is overly trusted).
max_bytes_per_record = maximum(sum(write_tal(IOBuffer(), tal) for tal in record) for record in records)
return AnnotationsSignal(Int16(cld(max_bytes_per_record, 2)), records)
end
function SignalHeader(signal::AnnotationsSignal)
return SignalHeader("EDF Annotations", "", "", -1, 1, -32768, 32767,
"", signal.samples_per_record)
end
#####
##### EDF+ Patient/Recording Metadata
#####
"""
EDF.PatientID
A type representing the local patient identification field of an EDF+ header.
See EDF+ specification for details.
# Fields
* `code::Union{String,Missing}`
* `sex::Union{Char,Missing}` (`'M'`, `'F'`, or `missing`)
* `birthdate::Union{Date,Missing}`
* `name::Union{String,Missing}`
"""
struct PatientID
code::Union{String,Missing}
sex::Union{Char,Missing}
birthdate::Union{Date,Missing}
name::Union{String,Missing}
end
"""
EDF.RecordingID
A type representing the local recording identification field of an EDF+ header.
See EDF+ specification for details.
# Fields
* `startdate::Union{Date,Missing}`
* `admincode::Union{String,Missing}`
* `technician::Union{String,Missing}`
* `equipment::Union{String,Missing}`
"""
struct RecordingID
startdate::Union{Date,Missing}
admincode::Union{String,Missing}
technician::Union{String,Missing}
equipment::Union{String,Missing}
end
#####
##### `EDF.File`
#####
const BYTES_PER_FILE_HEADER = 256
const BYTES_PER_SIGNAL_HEADER = 256
"""
EDF.FileHeader
Type representing the parsed header record of an `EDF.File` (excluding signal headers).
# Fields
* `version::String`: data format version
* `patient::Union{String,PatientID}`: local patient identification
* `recording::Union{String,RecordingID}`: local recording identification
* `start::DateTime`: start date/time of the recording
* `is_contiguous::Bool`: if `true`, data records are contiguous; is `true` for non-EDF+-compliant files
* `record_count::Int`: number of data records in the recording
* `seconds_per_record::Float64`: duration of a data record in seconds
"""
struct FileHeader
version::String
patient::Union{String,PatientID}
recording::Union{String,RecordingID}
start::DateTime
is_contiguous::Bool
record_count::Int
seconds_per_record::Float64
end
"""
EDF.File{T,I<:IO}
Type representing an EDF file with samples encoded as values of type `T`, which is
`Int16` for EDF files and `Int24` (internally defined) for BDF files.
# Fields
* `io::I`
* `header::FileHeader`
* `signals::Vector{Union{Signal{T},AnnotationsSignal}}`
"""
struct File{T<:SUPPORTED_SAMPLE_TYPES,I<:IO}
io::I
header::FileHeader
signals::Vector{Union{Signal{T},AnnotationsSignal}}
end
function Base.show(io::IO, edf::File{T}) where T
print(io, "EDF.File with ", length(edf.signals), ' ', 8 * sizeof(T),
"-bit-encoded signals")
return nothing
end
Base.close(file::File) = close(file.io)
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 6765 | #####
##### utilities
#####
_edf_repr(value::Union{String,Char}) = value
_edf_repr(date::Date) = uppercase(Dates.format(date, dateformat"dd-u-yyyy"))
_edf_repr(date::DateTime) = Dates.format(date, dateformat"dd\.mm\.yyHH\.MM\.SS")
# XXX this is really really hacky and doesn't support use of scientific notation
# where appropriate; keep in mind if you do improve this to support scientific
# notation, that scientific is NOT allowed in EDF annotation onset/duration fields
function _edf_repr(x::Real)
result = missing
if isinteger(x)
str = string(trunc(Int, x))
if length(str) <= 8
result = str
end
else
fpart, ipart = modf(x)
ipart_str = string('-'^signbit(x), Int(abs(ipart))) # handles `-0.0` case
fpart_str = @sprintf "%.7f" abs(fpart)
fpart_str = fpart_str[3:end] # remove leading `0.`
if length(ipart_str) < 7
result = ipart_str * '.' * fpart_str[1:(7 - length(ipart_str))]
elseif length(ipart_str) <= 8
result = ipart_str
end
end
if !ismissing(result)
if all(c -> c in ('0', '.', '-'), result)
x == 0 && return result
else
return result
end
end
error("failed to fit number into EDF's 8 ASCII character limit: $x")
end
_edf_metadata_repr(::Missing) = 'X'
_edf_metadata_repr(x) = _edf_repr(x)
function _edf_repr(metadata::T) where T<:Union{PatientID,RecordingID}
header = T <: RecordingID ? String["Startdate"] : String[]
return join([header; [_edf_metadata_repr(getfield(metadata, name)) for name in fieldnames(T)]], ' ')
end
function edf_write(io::IO, value, byte_limit::Integer)
edf_value = _edf_repr(value)
sizeof(edf_value) > byte_limit && error("EDF value exceeded byte limit (of $byte_limit bytes) while writing: $value")
bytes_written = Base.write(io, edf_value)
while bytes_written < byte_limit
bytes_written += Base.write(io, UInt8(' '))
end
return bytes_written
end
# NOTE: The fast-path in `Base.write` that uses `unsafe_write` will include alignment
# padding bytes, which is fine for `Int16` but causes `Int24` to write an extra byte
# for each value. To get around this, we'll fall back to a naive implementation when
# the size of the element type doesn't match its aligned size. (See also `read_to!`)
function write_from(io::IO, x::AbstractArray{T}) where {T}
if sizeof(T) == Base.aligned_sizeof(T)
return Base.write(io, x)
else
n = 0
for xi in x
n += Base.write(io, xi)
end
return n
end
end
#####
##### `write_header`
#####
function write_header(io::IO, file::File)
length(file.signals) <= 9999 || error("EDF does not allow files with more than 9999 signals")
expected_bytes_written = BYTES_PER_FILE_HEADER + BYTES_PER_SIGNAL_HEADER * length(file.signals)
bytes_written = 0
bytes_written += edf_write(io, file.header.version, 8)
bytes_written += edf_write(io, file.header.patient, 80)
bytes_written += edf_write(io, file.header.recording, 80)
bytes_written += edf_write(io, file.header.start, 16)
bytes_written += edf_write(io, expected_bytes_written, 8)
bytes_written += edf_write(io, file.header.is_contiguous ? "EDF+C" : "EDF+D", 44)
bytes_written += edf_write(io, file.header.record_count, 8)
bytes_written += edf_write(io, file.header.seconds_per_record, 8)
bytes_written += edf_write(io, length(file.signals), 4)
signal_headers = SignalHeader.(file.signals)
for (field_name, byte_limit) in SIGNAL_HEADER_FIELDS
for signal_header in signal_headers
field = getfield(signal_header, field_name)
bytes_written += edf_write(io, field, byte_limit)
end
end
bytes_written += edf_write(io, ' ', 32 * length(file.signals))
@assert bytes_written == expected_bytes_written
return bytes_written
end
#####
##### `write_signals`
#####
function write_signals(io::IO, file::File)
bytes_written = 0
for record_index in 1:file.header.record_count
for signal in file.signals
bytes_written += write_signal_record(io, signal, record_index)
end
end
return bytes_written
end
function write_signal_record(io::IO, signal::Signal, record_index::Int)
record_start = 1 + (record_index - 1) * signal.header.samples_per_record
record_stop = record_index * signal.header.samples_per_record
record_stop = min(record_stop, length(signal.samples))
return write_from(io, view(signal.samples, record_start:record_stop))
end
function write_signal_record(io::IO, signal::AnnotationsSignal, record_index::Int)
bytes_written = 0
for tal in signal.records[record_index]
bytes_written += write_tal(io, tal)
end
bytes_per_record = 2 * signal.samples_per_record
while bytes_written < bytes_per_record
bytes_written += Base.write(io, 0x00)
end
return bytes_written
end
function write_tal(io::IO, tal::TimestampedAnnotationList)
bytes_written = 0
if !signbit(tal.onset_in_seconds) # otherwise, the `-` will already be in number string
bytes_written += Base.write(io, '+')
end
bytes_written += Base.write(io, _edf_repr(tal.onset_in_seconds))
if tal.duration_in_seconds !== nothing
bytes_written += Base.write(io, 0x15)
bytes_written += Base.write(io, _edf_repr(tal.duration_in_seconds))
end
if isempty(tal.annotations)
bytes_written += Base.write(io, 0x14)
bytes_written += Base.write(io, 0x14)
else
for annotation in tal.annotations
bytes_written += Base.write(io, 0x14)
bytes_written += Base.write(io, annotation)
bytes_written += Base.write(io, 0x14)
end
end
bytes_written += Base.write(io, 0x00)
return bytes_written
end
#####
##### API functions
#####
"""
EDF.write(io::IO, file::EDF.File)
EDF.write(path::AbstractString, file::EDF.File)
Write `file` to the given output, returning the number of bytes written.
"""
function write(io::IO, file::File)
if !file.header.is_contiguous && !any(s -> s isa AnnotationsSignal, file.signals)
message = """
`file.header.is_contiguous` is `false` but `file.signals` does not contain
an `AnnotationsSignal`; this is required as per the EDF+ specification for
noncontiguous files in order to specify the start time of each data record
(see section 2.2.4 for details).
"""
throw(ArgumentError(message))
end
return write_header(io, file) + write_signals(io, file)
end
write(path::AbstractString, file::File) = Base.open(io -> write(io, file), path, "w")
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | code | 11724 | using EDF
using EDF: TimestampedAnnotationList, PatientID, RecordingID, SignalHeader,
Signal, AnnotationsSignal
using Dates
using FilePathsBase
using Test
#####
##### Testing utilities
#####
function deep_equal(a::T, b::T) where T
nfields = fieldcount(T)
if nfields == 0
return isequal(a, b) # Use `isequal` instead of `==` to handle `missing`
else
for i = 1:nfields
typeof(getfield(a, i)) <: IO && continue # Two different files will have different IO sources
isdefined(a, i) || return !isdefined(b, i) # Call two undefs equal
deep_equal(getfield(a, i), getfield(b, i)) || return false
end
end
return true
end
function deep_equal(a::T, b::T) where T<:AbstractArray
length(a) == length(b) || return false
for (x, y) in zip(a, b)
deep_equal(x, y) || return false
end
return true
end
deep_equal(a::T, b::S) where {T,S} = false
#####
##### Actual tests
#####
const DATADIR = joinpath(@__DIR__, "data")
@testset "Just Do It" begin
# test EDF.read(::AbstractString)
edf = EDF.read(joinpath(DATADIR, "test.edf"))
@test sprint(show, edf) == "EDF.File with 140 16-bit-encoded signals"
@test edf.header.version == "0"
@test edf.header.patient == PatientID(missing, missing, missing, missing)
@test edf.header.recording == RecordingID(Date(2014, 4, 29), missing, missing, missing)
@test edf.header.is_contiguous
@test edf.header.start == DateTime(2014, 4, 29, 22, 19, 44)
@test edf.header.record_count == 6
@test edf.header.seconds_per_record == 1.0
@test edf.signals isa Vector{Union{Signal{Int16},AnnotationsSignal}}
@test length(edf.signals) == 140
for signal in edf.signals
if signal isa EDF.Signal
@test length(signal.samples) == signal.header.samples_per_record * edf.header.record_count
else
@test length(signal.records) == edf.header.record_count
# XXX seems like this test file actually contains nonsensical onset timestamps...
# according to the EDF+ specification, onsets should be relative to the start time of
# the entire file, but it seems like whoever wrote these onsets might have used values
# that were relative to the start of the surrounding data record
expected = [[TimestampedAnnotationList(0.0, nothing, String[""]), TimestampedAnnotationList(0.0, nothing, ["start"])],
[TimestampedAnnotationList(1.0, nothing, String[""]), TimestampedAnnotationList(0.1344, 0.256, ["type A"])],
[TimestampedAnnotationList(2.0, nothing, String[""]), TimestampedAnnotationList(0.3904, 1.0, ["type A"])],
[TimestampedAnnotationList(3.0, nothing, String[""]), TimestampedAnnotationList(2.0, nothing, ["type B"])],
[TimestampedAnnotationList(4.0, nothing, String[""]), TimestampedAnnotationList(2.5, 2.5, ["type A"])],
[TimestampedAnnotationList(5.0, nothing, String[""])]]
@test all(signal.records .== expected)
@test AnnotationsSignal(signal.records).samples_per_record == 16
end
end
# test EDF.write(::IO, ::EDF.File)
io = IOBuffer()
EDF.write(io, edf)
seekstart(io)
file = EDF.File(io)
@test deep_equal(edf.header, file.header)
@test all(isempty(s isa Signal ? s.samples : s.records) for s in file.signals)
EDF.read!(file)
@test deep_equal(edf.signals, file.signals)
@test eof(io)
# ensure that multiple `EDF.read!` calls don't error and have no effect by
# simply rerunning the exact same test as above
EDF.read!(file)
@test deep_equal(edf.signals, file.signals)
# test that EDF.write(::IO, ::EDF.File) errors if file is
# discontiguous w/o an AnnotationsSignal present
bad_file = EDF.File(IOBuffer(),
EDF.FileHeader(file.header.version,
file.header.patient,
file.header.recording,
file.header.start,
false, # is_contiguous
file.header.record_count,
file.header.seconds_per_record),
filter(s -> !(s isa AnnotationsSignal), file.signals))
@test_throws ArgumentError EDF.write(IOBuffer(), bad_file)
# test EDF.write(::AbstractString, ::EDF.File)
mktempdir() do dir
path = joinpath(dir, "tmp.edf")
EDF.write(path, edf)
file = EDF.File(open(path, "r"))
@test deep_equal(edf.header, file.header)
@test all(isempty(s isa Signal ? s.samples : s.records) for s in file.signals)
EDF.read!(file)
@test deep_equal(edf.signals, file.signals)
@test eof(io)
end
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(-0.0023405432)) == "-0.00234"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(0.0023405432)) == "0.002340"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(1.002343)) == "1.002343"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(1011.05432)) == "1011.054"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(-1011.05432)) == "-1011.05"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(-1013441.5)) == "-1013442"
@test EDF._edf_repr(EDF._nearest_representable_edf_time_value(-1013441.3)) == "-1013441"
@test EDF._edf_repr(34577777) == "34577777"
@test EDF._edf_repr(0.0345) == "0.034500"
@test EDF._edf_repr(-0.02) == "-0.02000"
@test EDF._edf_repr(-187.74445) == "-187.744"
@test_throws ErrorException EDF._edf_repr(123456789)
@test_throws ErrorException EDF._edf_repr(-12345678)
@test_throws ErrorException EDF._edf_repr(0.00000000024)
@test_throws ErrorException EDF.edf_write(IOBuffer(), "hahahahaha", 4)
uneven = EDF.read(joinpath(DATADIR, "test_uneven_samp.edf"))
@test sprint(show, uneven) == "EDF.File with 2 16-bit-encoded signals"
@test uneven.header.version == "0"
@test uneven.header.patient == "A 3Hz sinewave and a 0.2Hz block signal, both starting in their positive phase"
@test uneven.header.recording == "110 seconds from 13-JUL-2000 12.05.48hr."
@test uneven.header.is_contiguous
@test uneven.header.start == DateTime(2000, 1, 31, 23, 0, 59)
@test uneven.header.record_count == 11
@test uneven.header.seconds_per_record == 10.0
@test uneven.signals[1].header.samples_per_record != uneven.signals[2].header.samples_per_record
@test length(uneven.signals) == 2
nonint = EDF.read(joinpath(DATADIR, "test_float_extrema.edf"))
signal = nonint.signals[1]
@test signal.header.physical_minimum ≈ -29483.1f0
@test signal.header.physical_maximum ≈ 29483.12f0
@test signal.header.digital_minimum ≈ -32767.0f0
@test signal.header.digital_maximum ≈ 32767.0f0
# Python code for generating the comparison values used here:
# ```
# import mne
# edf = mne.io.read_raw_edf("test/data/test_float_extrema.edf")
# signal = edf.get_data()[0] * 1e6 # The 1e6 converts volts back to microvolts
# with open("test/data/mne_values.csv", "w") as f:
# for x in signal:
# f.write("%s\n" % x)
# ```
mne = map(line->parse(Float32, line), eachline(joinpath(DATADIR, "mne_values.csv")))
for (a, b) in zip(EDF.decode(signal), mne)
@test a ≈ b atol=0.01
end
# Truncated files
dir = mktempdir(; cleanup=true)
for full_file in ["test.edf", "evt.bdf"]
# note that this tests a truncated final record, not an incorrect number of records
truncated_file = joinpath(dir, "test_truncated" * last(splitext(full_file)))
full_edf_bytes = read(joinpath(DATADIR, full_file))
write(truncated_file, full_edf_bytes[1:(end - 1)])
@test_logs((:warn, "Number of data records in file header does not match " *
"file size. Skipping 1 truncated data record(s)."),
EDF.read(truncated_file))
edf = EDF.read(joinpath(DATADIR, full_file))
truncated_edf = EDF.read(truncated_file)
for field in fieldnames(EDF.FileHeader)
a = getfield(edf.header, field)
b = getfield(truncated_edf.header, field)
if field === :record_count
@test b == a - 1
else
@test a == b
end
end
for i in 1:length(edf.signals)
good = edf.signals[i]
bad = truncated_edf.signals[i]
if good isa EDF.Signal
@test deep_equal(good.header, bad.header)
@test good.samples[1:(end - good.header.samples_per_record)] == bad.samples
else
@test good.samples_per_record == bad.samples_per_record
end
end
@test deep_equal(edf.signals[end].records[1:(edf.header.record_count - 1)],
truncated_edf.signals[end].records)
# Ensure that "exotic" IO types work for truncated records if the requisite
# methods exist
fb = FileBuffer(Path(truncated_file))
@test EDF._size(fb) == length(full_edf_bytes) - 1
fb_edf = EDF.read(fb)
@test deep_equal(truncated_edf.header, fb_edf.header)
@test deep_equal(truncated_edf.signals, fb_edf.signals)
end
@test EDF._size(IOBuffer("memes")) == 5
@test EDF._size(Base.DevNull()) == -1
@testset "BDF Files" begin
# The corresponding EDF file was exported by 3rd party software based on the BDF,
# so some differences are inevitable, but we just want to check that the values
# are Close Enough™.
bdf = EDF.read(joinpath(DATADIR, "bdf_test.bdf"))
comp = EDF.read(joinpath(DATADIR, "bdf_test.edf"))
for i in 1:8
bdf_values = EDF.decode(bdf.signals[i])
comp_values = EDF.decode(comp.signals[i])
@test bdf_values ≈ comp_values rtol=0.01
end
# Ensure that BDF files can also be round-tripped
mktempdir() do dir
path = joinpath(dir, "tmp.bdf")
EDF.write(path, bdf)
file = EDF.read(path)
@test deep_equal(bdf, file)
end
@test EDF.sample_type(bdf) == EDF.Int24
@test EDF.sample_type(comp) == Int16
@test EDF.is_bdf(bdf)
@test !EDF.is_bdf(comp)
@test sprint(show, bdf) == "EDF.File with 8 24-bit-encoded signals"
end
@testset "FilePathsBase support" begin
# test EDF.read(::AbstractPath)
edf = EDF.read(Path(joinpath(DATADIR, "test.edf")))
@test sprint(show, edf) == "EDF.File with 140 16-bit-encoded signals"
# emulate EDF.read(::S3Path)
io = FileBuffer(Path(joinpath(DATADIR, "test.edf")))
edf = EDF.File(io)
@test sprint(show, edf) == "EDF.File with 140 16-bit-encoded signals"
end
end
@testset "BDF+ Files" begin
# This is a `BDF+` file containing only trigger information.
# It is similiar to a `EDF Annotations` file except that
# The `ANNOTATIONS_SIGNAL_LABEL` is `BDF Annotations`.
# The test data has 1081 trigger events, and
# has 180 trials in total, and
# The annotation `255` signifies the offset of a trial.
# More information, contact: [email protected]
evt = EDF.read(joinpath(DATADIR, "evt.bdf"))
events = evt.signals[2].records
@test length(events) == 1081
annotations = [event[end].annotations[1] for event in events]
@test count(==("255"), annotations) == 180
end
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | docs | 1451 | # EDF.jl
[](https://github.com/beacon-biosignals/EDF.jl/actions/workflows/ci.yml)
[](https://codecov.io/gh/beacon-biosignals/EDF.jl)
[](https://beacon-biosignals.github.io/EDF.jl/stable)
[](https://beacon-biosignals.github.io/EDF.jl/dev)
Read and write [European Data Format (EDF/EDF+)](https://www.edfplus.info/) and [BioSemi Data Format (BDF)](https://www.biosemi.com/faq/file_format.htm) files in Julia.
Compared to all features implied by the EDF/EDF+ specifications, this package is currently missing:
- Out-of-core data record streaming; this package (and its type representations, i.e. `EDF.Signal`) generally assumes the user is loading all of a file's sample data into memory at once.
- Specialization for discontinuous EDF+ files ("EDF+D" files).
- Validation/specialization w.r.t. ["canonical/standard EDF texts"](https://www.edfplus.info/specs/edftexts.html)
- Validation-on-write of manually constructed `EDF.File`s
- Support for [the EDF+ `longinteger`/`float` extension](https://www.edfplus.info/specs/edffloat.html)
Where practical, this package chooses field names that are as close to EDF/EDF+ specification terminology as possible.
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.7.4 | 6b53bfd4ae712b3eb826db3536afd624fde8bf85 | docs | 1179 | # EDF.jl
EDF.jl is a Julia package for working with [European Data Format (EDF)](https://edfplus.info)
and [BioSemi Data Format (BDF)](https://www.biosemi.com/faq/file_format.htm) files,
including reading, writing, and an intermediate representation for direct access to data.
## Package API
```@meta
CurrentModule = EDF
```
### Representation of Data
```@docs
EDF.File
EDF.FileHeader
EDF.SignalHeader
EDF.Signal
EDF.AnnotationsSignal
EDF.TimestampedAnnotationList
EDF.PatientID
EDF.RecordingID
EDF.sample_type
EDF.is_bdf
```
The EDF+ specification introduced the notion of discontiguous signals, denoted with a value of "EDF+D" in one of the reserved fields; the `EDF.FileHeader` type notes this in a `Bool` field called `is_contiguous`. EDF.jl always *stores* signal data contiguously, regardless of whether the data records are declared to be contiguous, but, given an `EDF.Signal`, users of the package can construct a lazy iterator over non-overlapping chunks of a `signal::EDF.Signal` via:
```julia
Iterators.partition(signal.samples, signal.header.samples_per_record)
```
### Reading
```@docs
EDF.read
EDF.read!
EDF.decode
```
### Writing
```@docs
EDF.write
```
| EDF | https://github.com/beacon-biosignals/EDF.jl.git |
|
[
"MIT"
] | 0.1.0 | 24c0c59562f7551fe19af89b5ab2881234e4a621 | code | 676 | using TelegraphNoise
using Documenter
DocMeta.setdocmeta!(TelegraphNoise, :DocTestSetup, :(using TelegraphNoise); recursive=true)
makedocs(;
modules=[TelegraphNoise],
authors="W. Joe Meese <[email protected]> and contributors",
repo="https://github.com/meese-wj/TelegraphNoise.jl/blob/{commit}{path}#{line}",
sitename="TelegraphNoise.jl",
format=Documenter.HTML(;
prettyurls=get(ENV, "CI", "false") == "true",
canonical="https://meese-wj.github.io/TelegraphNoise.jl",
assets=String[],
),
pages=[
"Home" => "index.md",
],
)
deploydocs(;
repo="github.com/meese-wj/TelegraphNoise.jl",
devbranch="main",
)
| TelegraphNoise | https://github.com/meese-wj/TelegraphNoise.jl.git |
|
[
"MIT"
] | 0.1.0 | 24c0c59562f7551fe19af89b5ab2881234e4a621 | code | 6477 | @doc raw"""
# Random Telegraph Noise (RTN)
RTN is a type of random signal with two states, on or off, up or down, _etc._ with a mean
_dwell time_ ``T_D`` which characterizes the amount of time the signal spends in each state
before switching.
This module provides an easy framework to generate such signals, as they happen to have
well known analytical properties. For example, the autocovariance of the signal (often
called the _autocorrelation_ in physics literature) goes as
```math
\mathcal{A}(t, t_0; T_D) = \exp\left( -2\,\frac{\vert t - t_0 \vert}{T_D} \right).
```
Therefore, the characteristic _autocorrelation time_ ``\tau = T_D/2``. Importantly, the
expression above shows that these random signals are _stationary_ meaning that correlations
are time-translation invariant. This means a random telgraph signal is well-suited for testing
autocorrelations of random signals, for example those generated by Markov Chain Monte Carlo
methods.
"""
module TelegraphNoise
using Random
import Base
export Telegraph, length, eltype, expd_τ, generate_telegraph, generate_telegraph!, poisson_rand
"""
Telegraph{T}([amplitude = one(T)], dwell_time, signal)
Wrapper that contains the relevant information for a given `Telegraph` signal.
"""
struct Telegraph{T <: AbstractFloat}
amplitude::T
dwell_time::T
signal::Vector{T}
end
function Telegraph(amplitude::Real, dwell_time::Real, signal::Vector)
_check_telegraph_params(amplitude, dwell_time)
return Telegraph( convert(Float64, amplitude), convert(Float64, dwell_time), convert.(Float64, signal) )
end
function _check_telegraph_params(amplitude::Real, dwell_time::Real)
if amplitude <= zero(amplitude)
throw(ArgumentError("\nSignal amplitudes must be positive.\n"))
end
if dwell_time <= zero(dwell_time)
throw(ArgumentError("\nSignal dwell times must be positive.\n"))
end
return nothing
end
function _check_telegraph_params(amplitude::Real, dwell_time::Real, signal_length)
_check_telegraph_params(amplitude, dwell_time)
if signal_length <= zero(Int)
throw(ArgumentError("\nSignal length must be convertible to a positive integer.\n"))
end
return nothing
end
"""
length(tele::Telegraph) → Int
Dispatch `Base.length` for the [`Telegraph`](@ref) object.
# Additional information
* Wrapper around `length(tele.signal)`.
"""
Base.length(tele::Telegraph) = Base.length(tele.signal)
"""
eltype(tele::Telegraph) → Type
Dispatch `Base.eltype` for the [`Telegraph`](@ref) object.
# Additional information
* Wrapper around `Base.eltype(tele.signal)`.
"""
Base.eltype(tele::Telegraph) = eltype(tele.signal)
"""
expd_τ(tele::Telegraph{T}) → T
Return the expected autocorrelation time from a random telegraph signal.
"""
expd_τ(tele::Telegraph) = convert(typeof(tele.dwell_time), 0.5) * tele.dwell_time
Telegraph(dwell_time::T, signal::Vector{T}) where {T <: AbstractFloat} = Telegraph(one(T), dwell_time, signal)
"""
Telegraph{T}([rng = default_rng()], [amplitude = one(T)], dwell_time, signal_length::Int)
Constructor that specifies the length of the signal rather than the signal itself.
"""
Telegraph(rng::AbstractRNG, amplitude::Real, dwell_time::Real, signal_length::Real) = generate_telegraph(rng, dwell_time, signal_length; amplitude = amplitude)
Telegraph(rng::AbstractRNG, dwell_time::Real, signal_length::Real) = generate_telegraph(rng, convert(Float64, dwell_time), signal_length)
Telegraph(amplitude, dwell_time, signal_length::Real) = generate_telegraph(convert(Float64, dwell_time), signal_length; amplitude = convert(Float64, amplitude))
Telegraph(dwell_time::Real, signal_length::Real) = generate_telegraph(convert(Float64, dwell_time), signal_length)
"""
generate_telegraph([rng = default_rng()], dwell_time, signal_length; amplitude = one(T) ) → Telegraph
Function that initializes a random [`Telegraph`](@ref) signal with a
specified `dwell_time` and of a given length `signal_length`.
"""
function generate_telegraph(rng::AbstractRNG, dwell_time::Real, signal_length; amplitude = one(Float64) )
signal_length = convert(Int, signal_length)
_check_telegraph_params(amplitude, dwell_time)
tele = Telegraph(amplitude, dwell_time, zeros(Float64, signal_length))
last_idx = 1
tele.signal[last_idx] = ifelse( rand(rng) < 0.5, tele.amplitude, -tele.amplitude )
while last_idx < signal_length
stepsize = poisson_rand(dwell_time)
stepsize = ifelse( last_idx + 1 + stepsize > signal_length, signal_length - (last_idx + 1), stepsize )
next_value = ifelse( tele.signal[last_idx] == tele.amplitude, -tele.amplitude, tele.amplitude )
tele.signal[last_idx + 1 : last_idx + 1 + stepsize] .= next_value
last_idx = last_idx + 1 + stepsize
end
return tele
end
generate_telegraph(dwell_time::T, signal_length; amplitude = one(T)) where {T <: Real} = generate_telegraph(Random.default_rng(), dwell_time, signal_length; amplitude = amplitude)
@doc raw"""
poisson_rand([rng = default_rng()], ::Type{T}, dwell_time, []) → T
Generate a random number of steps in which to stay in the next state.
# Additional information
The probability that an RTN signal will _dwell_ in its current state for a time
``t \in (t_0, t_0 + {\rm d} t)`` is given by
```math
{\rm Pr}\left( t \in (t_0, t_0 + {\rm d} t) \right) = {\rm e}^{-t/T_D} \cdot \frac{{\rm d}t}{T_D}.
```
One then samples from this probability distribution using the _inverse-CDF_ method and obtains
```math
t \approx {\rm floor} \left[ -T_D \ln \left( 1 - u \right) \right],
```
with ``u \in (0, 1)`` being a uniform random `Float` generated by `rand()`. The approximation
is necessary as ``t`` represents a discrete time in a time series.
"""
poisson_rand(::Type{T}, dwell_time) where {T} = _poisson_floor(T, dwell_time)
poisson_rand(rng::AbstractRNG, ::Type{T}, dwell_time) where {T} = _poisson_floor(T, dwell_time, rng)
_poisson_func(dwell_time, rng::AbstractRNG = Random.default_rng()) = -dwell_time * log(1.0 - rand(rng))
_poisson_floor(::Type{T}, dwell_time, rng::AbstractRNG = Random.default_rng()) where {T} = floor(T, _poisson_func(dwell_time, rng))
"""
poisson_rand([rng = default_rng()], dwell_time) → Int
Default implementation of the [`poisson_rand`](@ref) is to return an `Int` for the size of the _dwell_.
"""
poisson_rand(dwell_time) = poisson_rand(Int, dwell_time)
poisson_rand(rng::AbstractRNG, dwell_time) = poisson_rand(rng, Int, dwell_time)
end
| TelegraphNoise | https://github.com/meese-wj/TelegraphNoise.jl.git |
|
[
"MIT"
] | 0.1.0 | 24c0c59562f7551fe19af89b5ab2881234e4a621 | code | 2368 | using TelegraphNoise
using Test
using Random
using StableRNGs # for testing
# For tesing purposes.
# The order of the tests cannot be changed without changing the output.
stable_rng = StableRNG(42)
@testset "TelegraphNoise.jl" begin
# 1) Test the expected autocorrelation time
@test let
tele = Telegraph(50.0, [0.])
expd_τ(tele) == 25.0
end
# 2) Test the (dwell_time, signal_length) constructor
@test let
tele = Telegraph(stable_rng, 2.0, 10)
( tele.dwell_time == 2.0 && length(tele.signal) == 10 )
end
# 3) Test whether the only unique values are tele.amplitude and -tele.amplitude
@test let
tele = Telegraph(500., 1e8)
unique_vals = sort(unique(tele.signal))
unique_vals == [-tele.amplitude, tele.amplitude]
end
# 4) Test whether the only unique values are tele.amplitude and -tele.amplitude
@test let
tele = Telegraph(42., 500., 1e8)
unique_vals = sort(unique(tele.signal))
unique_vals == [-tele.amplitude, tele.amplitude]
end
# 5) Test the populations the one state to see if its about 50%
@test let
tele = Telegraph(500., 1e8)
isapprox( count(x -> x == tele.amplitude, tele.signal) / length(tele), 0.5, atol = 0.03 )
end
# 6) Test the populations the one state to see if its about 50%
# This test changes the amplitude as well from the default to 25.
@test let
tele = Telegraph(25, 500., 1e8)
isapprox( count(x -> x == tele.amplitude, tele.signal) / length(tele), 0.5, atol = 0.03 )
end
# 7) Test the poisson_rand functionality for integers
@test let
poisson_rand(stable_rng, 50.) == 10 && poisson_rand(stable_rng, Int32, 5000) == 17725
end
# 8) Test that the amplitude is positive
@test_throws ArgumentError let
Telegraph(-25, 500, 1000)
end
# 9) Test that the dwell time is positive
@test_throws ArgumentError let
Telegraph(25, -500, 1000)
end
# 10) Test that the amplitude is positive without generate_telegraph
@test_throws ArgumentError let
Telegraph(-25, 500., zeros(1000))
end
# 11) Test that the dwell time is positive without generate_telegraph
@test_throws ArgumentError let
Telegraph(25, -500., zeros(1000))
end
end
| TelegraphNoise | https://github.com/meese-wj/TelegraphNoise.jl.git |
|
[
"MIT"
] | 0.1.0 | 24c0c59562f7551fe19af89b5ab2881234e4a621 | docs | 2006 | # TelegraphNoise
<!-- [](https://meese-wj.github.io/TelegraphNoise.jl/stable) -->
[](https://meese-wj.github.io/TelegraphNoise.jl/dev)
[](https://github.com/meese-wj/TelegraphNoise.jl/actions/workflows/CI.yml?query=branch%3Amain)
A Julia package for generating random telegraph noise (RTN).
RTN, also known as [_burst noise_](https://en.wikipedia.org/wiki/Burst_noise?oldformat=true) or a _random telegraph signal_, have a set of useful analytical properties which can make them ideal for testing the statistical analyses of time series. For example, in the simplest cases, RTNs have two equally probable states and are characterized by a single time scale, known as the _dwell time_ $T_D$, which represents the average time spent in either state before switching. The probability of the signal inhabiting either state for a time $t \in (t_0, t_0 + {\rm d}t)$ is given by
$$ {\rm Pr}\left( t \in (t_0, t_0 + {\rm d} t) \right) = {\rm e}^{-t/T_D} \cdot \frac{{\rm d}t}{T_D}.$$
One can then [show](https://dsp.stackexchange.com/questions/16596/autocorrelation-of-a-telegraph-process-constant-signal) that the autocovariance $\mathcal{A}$ (autocorrelation for de-meaned signals) goes as
$$ \mathcal{A}(t, t_0; T_D) = \exp\left( -2 \\; \frac{\vert t - t_0 \vert}{T_D} \right),$$
showing the stationarity of these processes [[1]](#1). Furthermore, the _autocorrelation time_ $\tau$ of such a signal then follows exactly as $\tau = T_D /2$.
## Additional Information
<a id="1">[1]</a>
For an additional derivation, and a great overall introduction to the science of noise analysis in signals, check out D. K. C. MacDonald's [_Noise and Fluctuations_](https://isbnsearch.org/isbn/9780486450292) (orig. 1962, _Dover Publications_ 2006). The derivation is specifically in Appendix IV.
| TelegraphNoise | https://github.com/meese-wj/TelegraphNoise.jl.git |
|
[
"MIT"
] | 0.1.0 | 24c0c59562f7551fe19af89b5ab2881234e4a621 | docs | 206 | ```@meta
CurrentModule = TelegraphNoise
```
# TelegraphNoise
Documentation for [TelegraphNoise](https://github.com/meese-wj/TelegraphNoise.jl).
```@index
```
```@autodocs
Modules = [TelegraphNoise]
```
| TelegraphNoise | https://github.com/meese-wj/TelegraphNoise.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 621 | using Documenter, ANSIColoredPrinters
makedocs(
clean = false,
checkdocs = :exports,
modules=[ANSIColoredPrinters],
format=Documenter.HTML(prettyurls = get(ENV, "CI", nothing) == "true",
assets = ["assets/default.css"]),
sitename="ANSIColoredPrinters",
pages=[
"Introduction" => "index.md",
"Output Formats" => "output-formats.md",
"Supported Codes" => "supported-codes.md",
"Reference" => "reference.md",
]
)
deploydocs(
repo="github.com/JuliaDocs/ANSIColoredPrinters.jl.git",
devbranch = "main",
push_preview = true
)
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 7115 | module ANSIColoredPrinters
import Base: ==, show, showable
export PlainTextPrinter, HTMLPrinter
abstract type AbstractPrinter end
"""
StackModelPrinter
An abstract printer type for stack-based or tree model formats.
"""
abstract type StackModelPrinter <: AbstractPrinter end
"""
FlatModelPrinter
An abstract printer type for non-stack-based formats.
"""
abstract type FlatModelPrinter <: AbstractPrinter end
struct SGRColor
class::String
hex::String
SGRColor(class::AbstractString="", hex::AbstractString="") = new(class, hex)
end
mutable struct SGRContext
fg::SGRColor
bg::SGRColor
flags::BitVector
SGRContext() = new(SGRColor(), SGRColor(), falses(128))
end
include("colors.jl")
include("plain.jl")
include("html.jl")
==(a::SGRContext, b::SGRContext) = a.fg == b.fg && a.bg == b.bg && a.flags == b.flags
isnormal(ctx::SGRContext) = isnormal(ctx.fg) && isnormal(ctx.bg) && !any(ctx.flags)
function reset_color(ctx::SGRContext)
ctx.fg = SGRColor()
ctx.bg = SGRColor()
end
function reset(ctx::SGRContext)
reset_color(ctx)
ctx.flags .= false
end
function reset(printer::AbstractPrinter)
seekstart(printer.buf)
if printer isa StackModelPrinter
while !isempty(printer.stack)
pop!(printer.stack)
end
end
reset(printer.ctx)
reset(printer.prevctx)
end
function copy!(dest::SGRContext, src::SGRContext)
dest.fg = src.fg
dest.bg = src.bg
dest.flags .= src.flags
end
escape_char(printer::AbstractPrinter, c::Char) = nothing
function show_body(io::IO, printer::AbstractPrinter)
reset(printer)
buf = printer.buf
ctx_changed = false
while !eof(buf)
c = read(buf, Char)
if c !== '\e'
if ctx_changed
apply_changes(io, printer)
copy!(printer.prevctx, printer.ctx)
ctx_changed = false
end
ec = escape_char(printer, c)
write(io, ec === nothing ? c : ec)
continue
end
ansiesc = IOBuffer()
c = read(buf, Char)
c === '[' || continue
while !eof(buf)
c = read(buf, Char)
if '0' <= c <= '9' || c === ';' # strip spaces
write(ansiesc, c)
elseif c >= Char(0x40)
break
end
end
astr = String(take!(ansiesc))
if c === 'm'
while true
astr = parse_sgrcodes(printer.ctx, astr)
isempty(astr) && break
end
end
ctx_changed = printer.prevctx != printer.ctx
end
finalize(io, printer)
end
function parse_sgrcodes(ctx::SGRContext, astr::AbstractString)
if (m = match(r"^0?(?:;|$)", astr)) !== nothing
reset(ctx)
elseif (m = match(r"^22;?", astr)) !== nothing
reset_color(ctx)
ctx.flags[1:2] .= false
elseif (m = match(r"^2([3-57-9]);?", astr)) !== nothing
di = parse(Int, m.captures[1])
ctx.flags[di] = false
ctx.flags[di + (di === 5)] = false
elseif (m = match(r"^39;?", astr)) !== nothing
ctx.fg = SGRColor()
elseif (m = match(r"^49;?", astr)) !== nothing
ctx.bg = SGRColor()
elseif (m = match(r"^([349][0-7]|10[0-7]);?", astr)) !== nothing
set_16colors!(ctx, m.captures[1])
elseif (m = match(r"^([345]8);5;(\d{0,3});?", astr)) !== nothing
d, col = m.captures
d != "58" && set_256colors!(ctx, d, col) # code 58 is not yet supported
elseif (m = match(r"^([345]8);2;(\d{0,3});(\d{0,3});(\d{0,3});?", astr)) !== nothing
d, rs, gs, bs = m.captures
d != "58" && set_24bitcolors!(ctx, d, rs, gs, bs) # code 58 is not yet supported
elseif (m = match(r"^(\d);?", astr)) !== nothing
di = parse(Int, m.captures[1])
if di === 1 || di === 2
ctx.flags[1:2] .= (di === 1, di === 2)
elseif di === 5 || di === 6
ctx.flags[5:6] .= (di === 5, di === 6)
else
ctx.flags[di] = true
end
elseif (m = match(r"^(\d+);?", astr)) !== nothing
# unsupported
else # unknown
return ""
end
return astr[m.offset + lastindex(m.match):end]
end
function apply_changes(io::IO, printer::StackModelPrinter)
stack = printer.stack
ctx = printer.ctx
prevctx = printer.prevctx
invert = prevctx.flags[7] != ctx.flags[7]
marks = zeros(Bool, length(stack))
nstack = String[]
for di = 1:9
if prevctx.flags[di] != ctx.flags[di]
class = string(di)
marks .|= map(c -> c == class, stack)
ctx.flags[di] && push!(nstack, class)
end
end
if prevctx.fg != ctx.fg || invert
marks .|= map(c -> occursin(r"^(?:3[0-7]|9[0-7]|38_[25])$", c), stack)
isnormal(ctx.fg) || push!(nstack, ctx.fg.class)
end
if prevctx.bg != ctx.bg || invert
marks .|= map(c -> occursin(r"^(?:4[0-7]|10[0-7]|48_[25])$", c), stack)
isnormal(ctx.bg) || push!(nstack, ctx.bg.class)
end
poplevel = findfirst(marks)
if poplevel !== nothing
while length(stack) >= poplevel
end_current_state(io, printer)
class = pop!(stack)
pop!(marks) || push!(nstack, class)
end
end
while !isempty(nstack)
class = pop!(nstack)
push!(stack, class)
start_new_state(io, printer)
end
end
function apply_changes(io::IO, printer::FlatModelPrinter)
ansicodes = Int[]
prevctx = printer.prevctx
ctx = printer.ctx
prevflags = prevctx.flags
flags = ctx.flags
prevctx == ctx && return
if isnormal(ctx)
change_state(io, printer, ansicodes)
return
end
if prevflags[1] & !flags[1] || prevflags[2] & !flags[2]
if !flags[1] && !flags[2]
push!(ansicodes, 22)
parse_sgrcodes(prevctx, "22")
end
end
if prevflags[5] & !flags[5] || prevflags[6] & !flags[6]
if !flags[5] && !flags[6]
push!(ansicodes, 25)
parse_sgrcodes(prevctx, "25")
end
end
prevctx.fg != ctx.fg && append!(ansicodes, isnormal(ctx.fg) ? (39,) : codes(ctx.fg))
prevctx.bg != ctx.bg && append!(ansicodes, isnormal(ctx.bg) ? (49,) : codes(ctx.bg))
for i in 1:length(flags)
prevflags[i] === flags[i] && continue
if 1 <= i <= 2 || 5 <= i <= 6
flags[i] && push!(ansicodes, i)
elseif i <= 9
push!(ansicodes, flags[i] ? i : i + 20)
end
end
isempty(ansicodes) || change_state(io, printer, ansicodes)
end
function finalize(io::IO, printer::StackModelPrinter)
while !isempty(printer.stack) # force closing
end_current_state(io, printer)
pop!(printer.stack)
end
end
function finalize(io::IO, printer::FlatModelPrinter)
reset(printer.ctx)
printer.prevctx != printer.ctx && apply_changes(io, printer)
end
function Base.show(io::IO, ::MIME"text/plain", printer::AbstractPrinter)
reset(printer)
if get(io, :color, false)::Bool
write(io, printer.buf)
end
end
end # module
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 3188 |
isnormal(c::SGRColor) = isempty(c.class)
is216color(c::SGRColor) = is216color(c.hex)
function is216color(hex::AbstractString)
hex == "000" || hex == "fff" || occursin(r"(?:00|5f|87|af|d7|ff){3}$", hex)
end
function codes(c::SGRColor)
m = match(r"^([345]8)_([25])$", c.class)
m === nothing && return (parse(Int, c.class),)
code, sub = m.captures
codei = parse(Int, code)
if sub == "5"
if is216color(c)
h = parse(UInt32, c.hex, base=16)
h === 0x00000fff && return (codei, 5, 231)
r = (h >> 0x10) % UInt8 ÷ 0x30
g = (h >> 0x08) % UInt8 ÷ 0x30
b = (h >> 0x00) % UInt8 ÷ 0x30
return (codei, 5, (r * 0x24 + g * 0x6 + b) + 16)
else
h = parse(UInt8, c.hex[1:2], base=16)
g = (h - 0x8) ÷ 0xa
return (codei, 5, g + 232)
end
else
if length(c.hex) == 3
h = parse(UInt16, c.hex, base=16)
r = (h >> 0x8)
g = (h >> 0x4) & 0xf
b = h & 0xf
return (codei, 2, r * 17, g * 17, b * 17)
else
h = parse(UInt32, c.hex, base=16)
r = (h >> 0x10)
g = (h >> 0x8) & 0xff
b = h & 0xff
return (codei, 2, Int(r), Int(g), Int(b))
end
end
end
function short_hex(r::UInt8, g::UInt8, b::UInt8)
rgb6 = UInt32(r) << 0x10 + UInt32(g) << 0x8 + b
rgb6 === (rgb6 & 0x0f0f0f) * 0x11 || return string(rgb6, pad=6, base=16)
string(UInt16(r >> 0x4) << 0x8 + UInt16(g >> 0x4) << 0x4 + b >> 0x4, pad=3, base=16)
end
function set_16colors!(ctx::SGRContext, d::AbstractString)
if d[1] === '3' || d[1] === '9'
ctx.fg = SGRColor(d)
else
ctx.bg = SGRColor(d)
end
end
const SCALE_216 = UInt8[0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff]
function set_256colors!(ctx::SGRContext, d::AbstractString, color::AbstractString)
fore = d[1] === '3'
hex = ""
colorid = isempty(color) ? 0x0 : parse(UInt8, color)
if colorid < 0x8
class = d[1] * string(colorid)
elseif colorid < 0x10
class = (fore ? "9" : "10") * string(colorid - 0x8)
else
if colorid < 0xe8
c = colorid - 0x10
r = SCALE_216[c ÷ 0x24 + 1]
g = SCALE_216[c ÷ 0x6 % 0x6 + 1]
b = SCALE_216[c % 0x6 + 1]
hex = short_hex(r, g, b)
else
g = (colorid - 0xe8) * 0xa + 0x8
hex = short_hex(g, g, g)
end
class = d * "_5"
end
if fore
ctx.fg = SGRColor(class, hex)
else
ctx.bg = SGRColor(class, hex)
end
end
function set_24bitcolors!(ctx::SGRContext, d::AbstractString,
r::AbstractString, g::AbstractString, b::AbstractString)
r8 = isempty(r) ? 0x0 : parse(UInt8, r)
g8 = isempty(g) ? 0x0 : parse(UInt8, g)
b8 = isempty(b) ? 0x0 : parse(UInt8, b)
hex = short_hex(r8, g8, b8)
if is216color(hex) || (r8 === g8 === b8 && (r8 - 8) % 10 == 0)
class = d * "_5"
else
class = d * "_2"
end
if d == "38"
ctx.fg = SGRColor(class, hex)
else
ctx.bg = SGRColor(class, hex)
end
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 3295 |
struct HTMLPrinter <: StackModelPrinter
buf::IO
stack::Vector{String}
prevctx::SGRContext
ctx::SGRContext
root_class::String
root_tag::String
callback::Any
function HTMLPrinter(buf::IO;
root_class::AbstractString = "",
root_tag::AbstractString = "pre",
callback::Any = nothing)
new(buf, String[], SGRContext(), SGRContext(),
String(root_class), String(root_tag), callback)
end
end
"""
HTMLPrinter(buf::IO; root_class="", root_tag="pre", callback=nothing)
Creates a printer for `MIME"text/html"` output.
# Arguments
- `buf`: A source `IO` object containing a text with ANSI escape codes.
- `root_class`: The `class` attribute value for the root element.
- `root_tag`: The tag name for the root element.
- `callback`: A callback method (see below).
# Callback method
callback(io::IO, printer::HTMLPrinter, tag::String, attrs::Dict{Symbol, String})
The `callback` method will be called just before writing HTML tags.
## Callback arguments
- `io`: The destination `IO` object.
- `printer`: The `HTMLPrinter` in use.
- `tag`: The HTML tag to be written. For closing tags, they have the prefix "/".
- `attrs`: A dictionary consisting of pairs of a `Symbol` for the attributes
(e.g. `:class`, `:style`) and the `String` for its value.
## Callback return value
If the return value is `nothing`, the printer writes the HTML tag to the `io`
according to the `tag` and the `attrs` after the call. If the return value is
not `nothing`, this default writing will be prevented.
"""
function HTMLPrinter end
Base.showable(::MIME"text/html", printer::HTMLPrinter) = isreadable(printer.buf)
const HTML_ESC_CHARS = Dict{Char, String}(
'\'' => "'",
'\"' => """,
'<' => "<",
'>' => ">",
'&' => "&",
)
escape_char(::HTMLPrinter, c::Char) = get(HTML_ESC_CHARS, c, nothing)
function Base.show(io::IO, ::MIME"text/html", printer::HTMLPrinter)
tag = printer.root_tag
attrs = Dict{Symbol, String}()
isempty(printer.root_class) || push!(attrs, :class => printer.root_class)
write_htmltag(io, printer, printer.root_tag, attrs)
show_body(io, printer)
write_htmltag(io, printer, "/" * printer.root_tag)
end
function start_new_state(io::IO, printer::HTMLPrinter)
class = printer.stack[end]
ctx = printer.ctx
attrs = Dict{Symbol, String}(:class => "sgr" * class)
if occursin(r"^38_[25]$", class)
push!(attrs, :style => "color:#" * ctx.fg.hex)
elseif occursin(r"^48_[25]$", class)
push!(attrs, :style => "background:#" * ctx.bg.hex)
end
write_htmltag(io, printer, "span", attrs)
end
function end_current_state(io::IO, printer::HTMLPrinter)
write_htmltag(io, printer, "/span", )
end
function write_htmltag(io::IO, printer::HTMLPrinter,
tag::String, attrs::Dict{Symbol, String} = Dict{Symbol, String}())
if printer.callback !== nothing
result = printer.callback(io, printer, tag, attrs)
result === nothing || return
end
write(io, "<", tag)
for k in sort!(collect(keys(attrs)))
v = attrs[k]
isempty(v) && continue
write(io, " ", k, "=\"", v, "\"")
end
write(io, ">")
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 839 |
struct PlainTextPrinter <: FlatModelPrinter
buf::IO
prevctx::SGRContext
ctx::SGRContext
function PlainTextPrinter(buf::IO)
new(buf, SGRContext(), SGRContext())
end
end
"""
PlainTextPrinter(buf::IO)
Creates a printer for `MIME"text/plain"` output.
# Arguments
- `buf`: A source `IO` object containing a text with ANSI escape codes.
"""
function PlainTextPrinter end
Base.showable(::MIME"text/plain", printer::PlainTextPrinter) = isreadable(printer.buf)
function Base.show(io::IO, ::MIME"text/plain", printer::PlainTextPrinter)
show_body(io, printer)
end
function change_state(io::IO, printer::PlainTextPrinter, ansicodes::Vector{Int})
get(io, :color, false) || return
if isempty(ansicodes)
print(io, "\e[m")
else
print(io, "\e[", join(ansicodes, ';'), 'm')
end
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 4841 | using Test, ANSIColoredPrinters
@testset "isnormal" begin
n = ANSIColoredPrinters.SGRColor()
@test ANSIColoredPrinters.isnormal(n)
c16 = ANSIColoredPrinters.SGRColor("30")
@test !ANSIColoredPrinters.isnormal(c16)
c256 = ANSIColoredPrinters.SGRColor("38_5", "000")
@test !ANSIColoredPrinters.isnormal(c256)
end
@testset "is216color" begin
c000 = ANSIColoredPrinters.SGRColor("38_5", "000")
@test ANSIColoredPrinters.is216color(c000)
c080808 = ANSIColoredPrinters.SGRColor("48_5", "080808")
@test !ANSIColoredPrinters.is216color(c080808)
cabc = ANSIColoredPrinters.SGRColor("38_2", "abc")
@test !ANSIColoredPrinters.is216color(cabc)
end
@testset "set_16colors!" begin
ctx = ANSIColoredPrinters.SGRContext()
ANSIColoredPrinters.set_16colors!(ctx, "30")
@test ctx.fg.class == "30"
@test isempty(ctx.fg.hex)
@test ANSIColoredPrinters.codes(ctx.fg) === (30,)
ANSIColoredPrinters.set_16colors!(ctx, "47")
@test ctx.bg.class == "47"
@test isempty(ctx.bg.hex)
@test ANSIColoredPrinters.codes(ctx.bg) === (47,)
ANSIColoredPrinters.set_16colors!(ctx, "97")
@test ctx.fg.class == "97"
@test isempty(ctx.fg.hex)
@test ANSIColoredPrinters.codes(ctx.fg) === (97,)
ANSIColoredPrinters.set_16colors!(ctx, "100")
@test ctx.bg.class == "100"
@test isempty(ctx.bg.hex)
@test ANSIColoredPrinters.codes(ctx.bg) === (100,)
end
@testset "set_256colors!" begin
ctx = ANSIColoredPrinters.SGRContext()
# 16 colors
ANSIColoredPrinters.set_256colors!(ctx, "38", "1")
@test ctx.fg.class == "31"
@test isempty(ctx.fg.hex)
@test ANSIColoredPrinters.codes(ctx.fg) === (31,)
ANSIColoredPrinters.set_256colors!(ctx, "48", "6")
@test ctx.bg.class == "46"
@test isempty(ctx.bg.hex)
@test ANSIColoredPrinters.codes(ctx.bg) === (46,)
ANSIColoredPrinters.set_256colors!(ctx, "38", "15")
@test ctx.fg.class == "97"
@test isempty(ctx.fg.hex)
@test ANSIColoredPrinters.codes(ctx.fg) === (97,)
ANSIColoredPrinters.set_256colors!(ctx, "48", "8")
@test ctx.bg.class == "100"
@test isempty(ctx.bg.hex)
@test ANSIColoredPrinters.codes(ctx.bg) === (100,)
# 216 colors (6 * 6 * 6)
ANSIColoredPrinters.set_256colors!(ctx, "38", "16")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "000"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 16)
ANSIColoredPrinters.set_256colors!(ctx, "48", "17")
@test ctx.bg.class == "48_5"
@test ctx.bg.hex == "00005f"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 5, 17)
ANSIColoredPrinters.set_256colors!(ctx, "38", "110")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "87afd7"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 110)
ANSIColoredPrinters.set_256colors!(ctx, "38", "230")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "ffffd7"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 230)
ANSIColoredPrinters.set_256colors!(ctx, "48", "231")
@test ctx.bg.class == "48_5"
@test ctx.bg.hex == "fff"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 5, 231)
# grays
ANSIColoredPrinters.set_256colors!(ctx, "38", "232")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "080808"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 232)
ANSIColoredPrinters.set_256colors!(ctx, "48", "255")
@test ctx.bg.class == "48_5"
@test ctx.bg.hex == "eee"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 5, 255)
end
@testset "set_24bitcolors" begin
ctx = ANSIColoredPrinters.SGRContext()
ANSIColoredPrinters.set_24bitcolors!(ctx, "38", "0", "128", "255")
@test ctx.fg.class == "38_2"
@test ctx.fg.hex == "0080ff"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 2, 0, 128, 255)
ANSIColoredPrinters.set_24bitcolors!(ctx, "48", "170", "187", "204")
@test ctx.bg.class == "48_2"
@test ctx.bg.hex == "abc"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 2, 170, 187, 204)
# 216 colors
ANSIColoredPrinters.set_24bitcolors!(ctx, "38", "0", "0", "0")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "000"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 16)
ANSIColoredPrinters.set_24bitcolors!(ctx, "48", "0", "0", "95")
@test ctx.bg.class == "48_5"
@test ctx.bg.hex == "00005f"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 5, 17)
# grays
ANSIColoredPrinters.set_24bitcolors!(ctx, "38", "8", "8", "8")
@test ctx.fg.class == "38_5"
@test ctx.fg.hex == "080808"
@test ANSIColoredPrinters.codes(ctx.fg) === (38, 5, 232)
ANSIColoredPrinters.set_24bitcolors!(ctx, "48", "238", "238", "238")
@test ctx.bg.class == "48_5"
@test ctx.bg.hex == "eee"
@test ANSIColoredPrinters.codes(ctx.bg) === (48, 5, 255)
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 9516 | using Test, ANSIColoredPrinters
function repr_html(printer::HTMLPrinter)
result = repr("text/html", printer)
take!(printer.buf)
return result
end
@testset "plain" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
print(buf, "This is a plain text.")
@test repr_html(printer) == "<pre>This is a plain text.</pre>"
end
@testset "escape" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
print(buf, "\"HTMLWriter\" uses '<pre>' & '<span>' elements.")
result = repr_html(printer)
@test result == "<pre>"HTMLWriter" uses '<pre>' & " *
"'<span>' elements.</pre>"
end
@testset "single modification" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
@testset "bold/faint" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[2m", " Faint ")
print(buf, "\e[22m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr1"> Bold </span>""" *
"""<span class="sgr2"> Faint </span> Normal </pre>"""
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[36m", " CyanFG ")
print(buf, "\e[39;44m", " BlueBG ")
print(buf, "\e[49m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr36"> CyanFG </span>""" *
"""<span class="sgr44"> BlueBG </span> Normal </pre>"""
end
@testset "blink" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[5m", " Blink ")
print(buf, "\e[6m", " RapidBlink ")
print(buf, "\e[25m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr5"> Blink </span>""" *
"""<span class="sgr6"> RapidBlink </span> Normal </pre>"""
end
end
@testset "nested modification" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
@testset "bold/italic" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[3m", " Bold-Italic ")
print(buf, "\e[23m", " Bold ")
print(buf, "\e[0m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr1"> Bold """ *
"""<span class="sgr3"> Bold-Italic </span>""" *
""" Bold </span> Normal </pre>"""
print(buf, "\e[3m", " Italic ")
print(buf, "\e[1m", " Bold-Italic ")
print(buf, "\e[22m", " Italic ")
print(buf, "\e[0m", " Normal ")
result = repr_html(printer)
@test result == """<pre><span class="sgr3"> Italic """ *
"""<span class="sgr1"> Bold-Italic </span>""" *
""" Italic </span> Normal </pre>"""
end
@testset "bold/fg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[36m", " Bold-CyanFG ")
print(buf, "\e[39m", " Bold ")
print(buf, "\e[0m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr1"> Bold """ *
"""<span class="sgr36"> Bold-CyanFG </span>""" *
""" Bold </span> Normal </pre>"""
end
@testset "strike/blink" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[9m", " Strike ")
print(buf, "\e[5m", " Strike-Blink ")
print(buf, "\e[25m", " Strike ")
print(buf, "\e[29m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr9"> Strike """ *
"""<span class="sgr5"> Strike-Blink </span>""" *
""" Strike </span> Normal </pre>"""
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[96m", " LightCyanFG ")
print(buf, "\e[45m", " LightCyanFG-MagentaBG ")
print(buf, "\e[49m", " LightCyanFG ")
print(buf, "\e[39m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr96"> LightCyanFG """ *
"""<span class="sgr45"> LightCyanFG-MagentaBG </span>""" *
""" LightCyanFG </span> Normal </pre>"""
end
end
@testset "overlapped modification" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
@testset "bold/fg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[36m", " Bold-CyanFG ")
print(buf, "\e[22;36m", " CyanFG ")
print(buf, "\e[39m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr1"> Bold """ *
"""<span class="sgr36"> Bold-CyanFG </span></span>""" *
"""<span class="sgr36"> CyanFG </span> Normal </pre>"""
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[96m", " LightCyanFG ")
print(buf, "\e[45m", " LightCyanFG-MagentaBG ")
print(buf, "\e[39m", " MagentaBG ")
print(buf, "\e[49m", " Normal ")
result = repr_html(printer)
@test result == """<pre> Normal <span class="sgr96"> LightCyanFG """ *
"""<span class="sgr45"> LightCyanFG-MagentaBG </span></span>""" *
"""<span class="sgr45"> MagentaBG </span> Normal </pre>"""
end
end
@testset "force reset" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
print(buf, "\e[7m", " Invert ")
print(buf, "\e[8m", " Conceal ")
result = repr_html(printer)
@test result == """<pre><span class="sgr7"> Invert <span class="sgr8"> Conceal """ *
"""</span></span></pre>"""
end
@testset "256 colors" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
print(buf, "\e[38;5;4m", " Blue(FG) ")
print(buf, "\e[48;5;16m", " #000(BG) ")
print(buf, "\e[38;5;110m", " #87afd7(FG) ")
print(buf, "\e[48;5;255m", " #eee(BG) ")
result = repr_html(printer)
@test result == """<pre><span class="sgr34"> Blue(FG) """ *
"""<span class="sgr48_5" style="background:#000"> #000(BG) """ *
"""</span></span><span class="sgr48_5" style="background:#000">""" *
"""<span class="sgr38_5" style="color:#87afd7"> #87afd7(FG) """ *
"""</span></span><span class="sgr38_5" style="color:#87afd7">""" *
"""<span class="sgr48_5" style="background:#eee"> #eee(BG) """ *
"""</span></span></pre>"""
end
@testset "24-bit colors" begin
buf = IOBuffer()
printer = HTMLPrinter(buf)
print(buf, "\e[38;2;0;128;255m", " #0080ff(FG) ")
print(buf, "\e[48;2;238;238;238m", " #eee(BG) ")
print(buf, "\e[38;2;170;187;204m", " #abc(FG) ")
result = repr_html(printer)
@test result == """<pre><span class="sgr38_2" style="color:#0080ff"> #0080ff(FG) """ *
"""<span class="sgr48_5" style="background:#eee"> #eee(BG) """ *
"""</span></span><span class="sgr48_5" style="background:#eee">""" *
"""<span class="sgr38_2" style="color:#abc"> #abc(FG) """ *
"""</span></span></pre>"""
end
@testset "callback" begin
@testset "use default" begin
counter = 0
function cb(io, printer, tag, attrs)
startswith(tag, "/") && return nothing
push!(attrs, :id => tag * string(counter))
counter += 1
return nothing
end
buf = IOBuffer()
printer = HTMLPrinter(buf, root_class = "root test", root_tag = "code", callback = cb)
print(buf, "\e[0m", " Normal ")
print(buf, "\e[38;5;255m", " #eee(FG) ")
print(buf, "\e[0m", " Normal ")
result = repr_html(printer)
@test result == """<code class="root test" id="code0"> Normal """ *
"""<span class="sgr38_5" id="span1" style="color:#eee">""" *
""" #eee(FG) </span> Normal </code>"""
end
@testset "prevent default" begin
dom = Tuple[(:rootnode, Tuple[])]
function cb(io, printer, tag, attrs)
text = String(take!(io))
parent = dom[end]
children = parent[end]
isempty(text) || push!(children, (:textnode, text))
if startswith(tag, "/")
pop!(dom)
else
parent = (Symbol(tag), attrs, Tuple[])
push!(children, parent)
push!(dom, parent)
end
return true
end
buf = IOBuffer()
printer = HTMLPrinter(buf, callback = cb)
print(buf, "\e[0m", " Normal ")
print(buf, "\e[38;5;255m", " #eee(FG) ")
print(buf, "\e[0m", " Normal ")
result = repr_html(printer)
@test result == ""
@test dom[1] ==
(:rootnode, Tuple[
(:pre, Dict{Symbol,String}(), Tuple[
(:textnode, " Normal "),
(:span, Dict(:class => "sgr38_5", :style => "color:#eee"), Tuple[
(:textnode, " #eee(FG) ")
]),
(:textnode, " Normal ")
])
])
end
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 5449 | using Test, ANSIColoredPrinters
function repr_color(printer::PlainTextPrinter)
result = repr("text/plain", printer, context = :color => true)
get(ENV, "JULIA_DEBUG", "") == "" || println(result)
take!(printer.buf)
return result
end
@testset "no color" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
print(buf, "\e[36m", " CyanFG ")
print(buf, "\e[39m", " Normal ")
result = repr("text/plain", printer)
@test result == " CyanFG Normal "
end
@testset "single modification" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
@testset "bold/faint" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[2m", " Faint ")
print(buf, "\e[22m", " Normal ")
@test repr_color(printer) == " Normal \e[1m Bold \e[2m Faint \e[m Normal "
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[36m", " CyanFG ")
print(buf, "\e[39;44m", " BlueBG ")
print(buf, "\e[49m", " Normal ")
@test repr_color(printer) == " Normal \e[36m CyanFG \e[39;44m BlueBG \e[m Normal "
end
@testset "blink" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[5m", " Blink ")
print(buf, "\e[6m", " RapidBlink ")
print(buf, "\e[25m", " Normal ")
@test repr_color(printer) == " Normal \e[5m Blink \e[6m RapidBlink \e[m Normal "
end
end
@testset "nested modification" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
@testset "bold/italic" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[3m", " Bold-Italic ")
print(buf, "\e[23m", " Bold ")
print(buf, "\e[0m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[1m Bold \e[3m Bold-Italic \e[23m Bold \e[m Normal "
print(buf, "\e[3m", " Italic ")
print(buf, "\e[1m", " Bold-Italic ")
print(buf, "\e[22m", " Italic ")
print(buf, "\e[0m", " Normal ")
result = repr_color(printer)
@test result == "\e[3m Italic \e[1m Bold-Italic \e[22m Italic \e[m Normal "
end
@testset "bold/fg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[36m", " Bold-CyanFG ")
print(buf, "\e[39m", " Bold ")
print(buf, "\e[0m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[1m Bold \e[36m Bold-CyanFG \e[39m Bold \e[m Normal "
end
@testset "strike/blink" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[9m", " Strike ")
print(buf, "\e[5m", " Strike-Blink ")
print(buf, "\e[25m", " Strike ")
print(buf, "\e[29m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[9m Strike \e[5m Strike-Blink \e[25m Strike \e[m Normal "
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[96m", " LightCyanFG ")
print(buf, "\e[45m", " LightCyanFG-MagentaBG ")
print(buf, "\e[49m", " LightCyanFG ")
print(buf, "\e[39m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[96m LightCyanFG \e[45m LightCyanFG-MagentaBG " *
"\e[49m LightCyanFG \e[m Normal "
end
end
@testset "overlapped modification" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
@testset "bold/fg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[1m", " Bold ")
print(buf, "\e[36m", " Bold-CyanFG ")
print(buf, "\e[22;36m", " CyanFG ")
print(buf, "\e[39m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[1m Bold \e[36m Bold-CyanFG " *
"\e[22;36m CyanFG \e[m Normal "
end
@testset "fg/bg" begin
print(buf, "\e[0m", " Normal ")
print(buf, "\e[96m", " LightCyanFG ")
print(buf, "\e[45m", " LightCyanFG-MagentaBG ")
print(buf, "\e[39m", " MagentaBG ")
print(buf, "\e[49m", " Normal ")
result = repr_color(printer)
@test result == " Normal \e[96m LightCyanFG \e[45m LightCyanFG-MagentaBG " *
"\e[39m MagentaBG \e[m Normal "
end
end
@testset "force reset" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
print(buf, "\e[7m", " Invert ")
print(buf, "\e[8m", " Conceal ")
@test repr_color(printer) == "\e[7m Invert \e[8m Conceal \e[m"
end
@testset "256 colors" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
print(buf, "\e[38;5;4m", " Blue(FG) ")
print(buf, "\e[48;5;16m", " #000(BG) ")
print(buf, "\e[38;5;110m", " #87afd7(FG) ")
print(buf, "\e[48;5;255m", " #eee(BG) ")
result = repr_color(printer)
@test result == "\e[34m Blue(FG) \e[48;5;16m #000(BG) " *
"\e[38;5;110m #87afd7(FG) \e[48;5;255m #eee(BG) \e[m"
end
@testset "24-bit colors" begin
buf = IOBuffer()
printer = PlainTextPrinter(buf)
print(buf, "\e[38;2;0;128;255m", " #0080ff(FG) ")
print(buf, "\e[48;2;238;238;238m", " #eee(BG) ")
print(buf, "\e[38;2;170;187;204m", " #abc(FG) ")
result = repr_color(printer)
@test result == "\e[38;2;0;128;255m #0080ff(FG) \e[48;5;255m #eee(BG) " *
"\e[38;2;170;187;204m #abc(FG) \e[m"
end | ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | code | 204 | using Test, ANSIColoredPrinters
@testset "colors" begin
include("colors.jl")
end
@testset "PlainTextPrinter" begin
include("plain.jl")
end
@testset "HTMLPrinter" begin
include("html.jl")
end
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | docs | 724 | # ANSIColoredPrinters
[](https://juliadocs.github.io/ANSIColoredPrinters.jl/stable)
[](https://juliadocs.github.io/ANSIColoredPrinters.jl/dev)
[](https://github.com/JuliaDocs/ANSIColoredPrinters.jl/actions?query=workflow%3ACI)
[](https://codecov.io/gh/JuliaDocs/ANSIColoredPrinters.jl)
`ANSIColoredPrinters` convert a text qualified by
[ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) to another
format.
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | docs | 1506 | # ANSIColoredPrinters
ANSIColoredPrinters converts a text qualified by
[ANSI escape codes](https://en.wikipedia.org/wiki/ANSI_escape_code) to another
format. Currently, [plain text](@ref plain_text) and [HTML](@ref html) output
are supported.
## Installation
The package can be installed with the Julia package manager. Run:
```julia
import Pkg
Pkg.add("ANSIColoredPrinters")
```
or, from the Julia REPL, type `]` to enter the Pkg REPL mode and run:
```julia
pkg> add ANSIColoredPrinters
```
## Usage
All you need to do is to pass an `IO` object, which contains a text qualified
with ANSI escape codes as the first argument of the constructor of a printer
(e.g. [`HTMLPrinter`](@ref)).
On environments which support `MIME"text/html"` display (e.g. this Documenter's
HTML output), the text is displayed as HTML with its ANSI escape codes are
translated into HTML elements by [`HTMLPrinter`](@ref).
```@example ex
using ANSIColoredPrinters
using Crayons
buf = IOBuffer()
Crayons.print_logo(buf) # this outputs ANSI escape codes.
printer = HTMLPrinter(buf, root_class="documenter-example-output")
```
Perhaps your browser is displaying a colored logo, but the `HTMLPrinter`
actually outputs HTML code that looks like:
```@example ex
htmlsrc = IOBuffer() # hide
show(htmlsrc, MIME"text/html"(), printer) # hide
print(String(take!(htmlsrc))[1:119], "...") # hide
```
In addition, the colors and text styles are controlled by the CSS in the host
document (e.g. [`default.css`](./assets/default.css)).
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | docs | 1964 | # Output Formats
```@setup ex
using ANSIColoredPrinters
```
## [Plain Text](@id plain_text)
[`PlainTextPrinter`](@ref) prints a plain text with ANSI escape codes as a plain
text (`MIME"text/plain"`). This may seem useless, but it has two major benefits.
One is the stripping of ANSI escape codes. You can get rid of ANSI escape codes
by printing a text to an `IO` object with `:color` I/O property `false`.
The other is the optimization of verbose ANSI escape codes.
### Examples
```@repl ex
src = IOBuffer();
printstyled(IOContext(src, :color => true), "light ", color=:light_cyan);
printstyled(IOContext(src, :color => true), "cyan", color=:light_cyan);
read(seekstart(src), String) # source text
printer = PlainTextPrinter(src);
repr("text/plain", printer, context = :color => false) # stripped
repr("text/plain", printer, context = :color => true) # optimized
```
!!! note
The initial and final states are implicitly interpreted as being "Normal",
i.e. the state with `"\e[0m"`.
## [HTML](@id html)
[`HTMLPrinter`](@ref) prints a plain text with ANSI escape codes as an HTML
fragment (`MIME"text/html"`).
See [Supported Codes](@ref) for examples.
The [`HTMLPrinter`](@ref) constructor supports the `callback` keyword argument.
The `callback` method will be called just before writing HTML tags. You can
rewrite the attributes in your `callback` methods. You can also prevent the
default tag writing by setting the return value of the `callback` method to
something other than `nothing`.
```@example ex
src = IOBuffer();
print(src, " Normal ", "\e[48;5;246m", " GrayBG ", "\e[0m", " Normal ");
HTMLPrinter(src) # without callback method
```
```@repl ex
function cb(io::IO, printer::HTMLPrinter, tag::String, attrs::Dict{Symbol, String})
text = String(take!(io))
@show text
@show tag, attrs
return true # prevent default writing
end;
dummy = IOBuffer();
show(dummy, MIME"text/html"(), HTMLPrinter(src, callback = cb));
```
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | docs | 55 | # Reference
```@docs
PlainTextPrinter
HTMLPrinter
```
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.0.1 | 574baf8110975760d391c710b6341da1afa48d8c | docs | 4136 | # Supported Codes
The following are examples of the ANSI escape codes which are supported (i.e.
interpreted as a valid input) by [`HTMLPrinter`](@ref). Note that the
representation depends on the renderer, since some codes are not widely
supported and there are some code conflicts.
```@setup ex
using ANSIColoredPrinters
```
## Bold and Faint
```@example ex
buf = IOBuffer()
print(buf, "\e[0m", "Normal ")
print(buf, "\e[1m", "Bold ")
print(buf, "\e[2m", "Faint ") # this unsets the "bold"
print(buf, "\e[0m", "Normal ")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## Italic
```@example ex
buf = IOBuffer()
print(buf, "\e[0m", "Normal ")
print(buf, "\e[3m", "Italic ")
print(buf, "\e[1m", "Bold-Italic ") # this keeps the "italic"
print(buf, "\e[23m", "Bold ") # this keeps the "bold"
print(buf, "\e[0m", "Normal ")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## Underline and Strikethrough
```@example ex
buf = IOBuffer()
print(buf, "\e[0m", "Normal ")
print(buf, "\e[4m", " Underline ", "\e[24m", " ")
print(buf, "\e[9m", " Striethrough ", "\e[29m", " ")
print(buf, "\e[4;9m", " Both ", "\e[m")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## Invert
The invert code swaps the foreground and background colors. However, the support
is limited. You will need to force the foreground and background colors to be
switched manually, or convert the style afterwards using JavaScript etc.
```@example ex
buf = IOBuffer()
print(buf, "\e[0m", "Normal ")
print(buf, "\e[7m", "Invert ")
print(buf, "\e[27m", "Normal ")
print(buf, "\e[7;100m", "GrayText? ") # not supported by default.css
print(buf, "\e[34m", "BlueBG? ") # not supported by default.css
print(buf, "\e[0m", "Normal ")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## Conceal
```@example ex
buf = IOBuffer()
print(buf, "\e[0m", "Normal ")
print(buf, "\e[8m", "Conceal ")
print(buf, "\e[31;47m", "Red1 ") # this is still concealed
print(buf, "\e[0m", "Normal ")
print(buf, "\e[31;47m", "Red2 ")
print(buf, "\e[8m", "Conceal ")
print(buf, "\e[28m", "Red3 ")
print(buf, "\e[0m", "Normal ")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## 16 colors
The 16 colors correspond to the color symbols which can be specified in the
argument of
[`printstyled`](https://docs.julialang.org/en/v1/base/io-network/#Base.printstyled)
(e.g. `:black`, `:red`, `:green`, `:light_blue`). Their sRGB values are
environment-dependent. This document defines their actual colors in a CSS file.
### Basic colors
```@example ex
buf = IOBuffer()
for fg in [30:37; 39] # foreground color
for bg in [40:47; 49] # background color
print(buf, "\e[$fg;$(bg)m $fg; $bg ")
end
println(buf)
end
HTMLPrinter(buf, root_class="documenter-example-output")
```
### Light colors
```@example ex
buf = IOBuffer()
for fg in [90:97; 39] # foreground color
for bg in [100:107; 49] # background color
print(buf, "\e[$fg;$(bg)m $fg;$bg ")
end
println(buf)
end
HTMLPrinter(buf, root_class="documenter-example-output")
```
## 256 colors
The 256 colors correspond to the integer codes which can be specified in the
argument of printstyled.
```@example ex
buf = IOBuffer()
for color in 0:15 # same as the 16 colors above.
print(buf, "\e[38;5;$color;48;5;$(color)m ")
print(buf, "\e[49m", lpad(color, 3), " ")
color % 8 == 7 && println(buf)
end
for color in 16:231 # 6 × 6 × 6 = 216 colors
(color - 16) % 12 == 0 && println(buf)
print(buf, "\e[38;5;$color;48;5;$(color)m ")
print(buf, "\e[49m", lpad(color, 3), " ")
end
println(buf)
for color in 232:255 # grayscale in 24 steps
(color - 232) % 12 == 0 && println(buf)
print(buf, "\e[38;5;$color;48;5;$(color)m ")
print(buf, "\e[49m", lpad(color, 3), " ")
end
print(buf, "\e[m")
HTMLPrinter(buf, root_class="documenter-example-output")
```
## 24-bit colors
```@example ex
buf = IOBuffer()
print(buf, " \e[48;2;56;152;38m \n")
print(buf, "\e[48;2;203;60;51m ")
print(buf, "\e[48;2;149;88;178m ")
print(buf, "\e[49;38;2;64;99;216m 24-bit RGB\e[m")
HTMLPrinter(buf, root_class="documenter-example-output")
```
| ANSIColoredPrinters | https://github.com/JuliaDocs/ANSIColoredPrinters.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2161 | using SnoopCompile
@snoopi_bot "BinaryBuilder" begin
using BinaryBuilder
# Do an actual build
products = Product[
ExecutableProduct("hello_world_c", :hello_world_c),
ExecutableProduct("hello_world_cxx", :hello_world_cxx),
ExecutableProduct("hello_world_fortran", :hello_world_fortran),
ExecutableProduct("hello_world_go", :hello_world_go),
ExecutableProduct("hello_world_rust", :hello_world_rust),
]
# First, do the build, but only output the meta json, since we definitely want that to be fast
build_tarballs(
["--meta-json=/dev/null"],
"testsuite",
v"1.0.0",
# No sources
DirectorySource[],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Build testsuite
make -j${nproc} -sC /usr/share/testsuite install
# Install fake license just to silence the warning
install_license /usr/share/licenses/MIT
""",
[HostPlatform()],
products,
# No dependencies
Dependency[];
)
# Next, actually do a build, since we want that to be fast too.
build_tarballs(
["--verbose"],
"testsuite",
v"1.0.0",
# Add some sources, so that we actually download them
[
ArchiveSource("https://github.com/staticfloat/small_bin/raw/master/socrates.tar.gz",
"e65d2f13f2085f2c279830e863292312a72930fee5ba3c792b14c33ce5c5cc58"),
DirectorySource("src"),
],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Build testsuite
make -j${nproc} -sC /usr/share/testsuite install
# Install fake license just to silence the warning
install_license /usr/share/licenses/MIT
""",
[HostPlatform()],
products,
# Add a dependency on Zlib_jll, our favorite scapegoat
Dependency[
Dependency("Zlib_jll"),
];
compilers=[:c, :rust, :go],
)
rm("build"; recursive=true, force=true)
rm("products"; recursive=true, force=true)
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2719 | using LightXML, JSON
###
# Formatter taken from the README of JSON.jl
import JSON.Writer
import JSON.Writer.JSONContext
mutable struct PYONContext <: JSONContext
underlying::JSONContext
end
for delegate in [:indent,
:delimit,
:separate,
:begin_array,
:end_array,
:begin_object,
:end_object]
@eval JSON.Writer.$delegate(io::PYONContext) = JSON.Writer.$delegate(io.underlying)
end
Base.write(io::PYONContext, byte::UInt8) = write(io.underlying, byte)
JSON.Writer.show_null(io::PYONContext) = print(io, "None")
pyonprint(io::IO, obj) = let io = PYONContext(JSON.Writer.PrettyContext(io, 4))
JSON.print(io, obj)
return
end
###
function generate_dict()
file = joinpath(@__DIR__, "instructions.xml")
# Download the list of instructions from https://uops.info/xml.html
if !isfile(file)
download("https://uops.info/instructions.xml", file)
end
xml = parse_file(file)
# Get the list of all extensions
extensions = get_elements_by_tagname(root(xml), "extension")
# Accumulator for all instructions. The strategy is the following: we loop
# over the extensions from the lower ones to the higher ones, if an
# instruction is already found in a lower extension, then it is not added to
# the higher one.
all_instructions = String[]
dict = Dict{String,Vector{String}}("unknown" => String[], "cpuid" => ["cpuid"])
for name in (
"mmx", "sse", "sse2", "sse3", "ssse3", "sse4", "avx", "aes", "pclmulqdq" , # sandybridge (aka AVX)
"movbe", "avx2", "rdwrfsgs", "fma", "bmi1", "bmi2", "f16c", # haswell (aka AVX2)
"pku", "rdseed", "adcx", "clflush", "xsavec", "xsaves", "clwb", "avx512evex", "avx512vex", # skylake-avx512 (aka AVX512)
)
instructions = String[]
for idx in findall(x -> name == lowercase(attribute(x, "name")), extensions)
for instruction in get_elements_by_tagname(extensions[idx], "instruction")
instruction = lowercase(replace(attribute(instruction, "asm"), r"{[a-z]+} " => ""))
if instruction ∉ all_instructions
unique!(sort!(push!(all_instructions, instruction)))
unique!(sort!(push!(instructions, instruction)))
end
end
end
dict[name] = instructions
end
free(xml)
# We're basically converting an XML to a JSON, funny isn't it?
open(joinpath(@__DIR__, "..", "src", "auditor", "instructions.json"), "w") do io
pyonprint(io, dict)
# Be nice and add a newline at the end of the file
println(io)
end
return dict
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 727 | using Documenter, BinaryBuilder, BinaryBuilderBase
makedocs(
modules = [BinaryBuilder],
sitename = "BinaryBuilder.jl",
pages = [
"Home" => "index.md",
"Building Packages" => "building.md",
"Build Tips" => "build_tips.md",
"JLL packages" => "jll.md",
"FAQ" => "FAQ.md",
"Build Troubleshooting" => "troubleshooting.md",
"Internals" => [
"RootFS" => "rootfs.md",
"Environment Variables" => "environment_variables.md",
"Tricksy Gotchas" => "tricksy_gotchas.md",
"Reference" => "reference.md",
],
],
)
deploydocs(
repo = "github.com/JuliaPackaging/BinaryBuilder.jl.git",
push_preview = true,
)
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 21532 | module Auditor
using BinaryBuilderBase
using Base.BinaryPlatforms
using Pkg
using ObjectFile
using BinaryBuilderBase: march
export audit, collect_files, collapse_symlinks
include("auditor/instruction_set.jl")
include("auditor/dynamic_linkage.jl")
include("auditor/symlink_translator.jl")
include("auditor/compiler_abi.jl")
include("auditor/soname_matching.jl")
include("auditor/filesystems.jl")
include("auditor/extra_checks.jl")
include("auditor/codesigning.jl")
# AUDITOR TODO LIST:
#
# * Build dlopen() clone that inspects and tries to figure out why
# something can't be opened. Possibly use that within BinaryProvider too?
"""
audit(prefix::Prefix, src_name::AbstractString = "";
io=stderr,
platform::AbstractPlatform = HostPlatform(),
verbose::Bool = false,
silent::Bool = false,
autofix::Bool = false,
has_csl::Bool = true,
require_license::Bool = true,
)
Audits a prefix to attempt to find deployability issues with the binary objects
that have been installed within. This auditing will check for relocatability
issues such as dependencies on libraries outside of the current `prefix`,
usage of advanced instruction sets such as AVX2 that may not be usable on many
platforms, linkage against newer glibc symbols, etc...
This method is still a work in progress, only some of the above list is
actually implemented, be sure to actually inspect `Auditor.jl` to see what is
and is not currently in the realm of fantasy.
"""
function audit(prefix::Prefix, src_name::AbstractString = "";
io=stderr,
platform::AbstractPlatform = HostPlatform(),
verbose::Bool = false,
silent::Bool = false,
autofix::Bool = false,
has_csl::Bool = true,
require_license::Bool = true,
)
# This would be really weird, but don't let someone set `silent` and `verbose` to true
if silent
verbose = false
end
# Canonicalize immediately
prefix = Prefix(realpath(prefix.path))
if verbose
@info("Beginning audit of $(prefix.path)")
end
# If this is false then it's bedtime for bonzo boy
all_ok = true
# Translate absolute symlinks to relative symlinks, if possible
translate_symlinks(prefix.path; verbose=verbose)
# Inspect binary files, looking for improper linkage
predicate = f -> (filemode(f) & 0o111) != 0 || valid_library_path(f, platform)
bin_files = collect_files(prefix, predicate; exclude_externalities=false)
for f in collapse_symlinks(bin_files)
# If `f` is outside of our prefix, ignore it. This happens with files from our dependencies
if !startswith(f, prefix.path)
continue
end
# Peel this binary file open like a delicious tangerine
try
readmeta(f) do ohs
foreach(ohs) do oh
if !is_for_platform(oh, platform)
if verbose
@warn("Skipping binary analysis of $(relpath(f, prefix.path)) (incorrect platform)")
end
else
# Check that the ISA isn't too high
all_ok &= check_isa(oh, platform, prefix; verbose, silent)
# Check that the OS ABI is set correctly (often indicates the wrong linker was used)
all_ok &= check_os_abi(oh, platform; verbose)
# Make sure all binary files are executables, if libraries aren't
# executables Julia may not be able to dlopen them:
# https://github.com/JuliaLang/julia/issues/38993. In principle this
# should be done when autofix=true, but we have to run this fix on MKL
# for Windows, for which however we have to set autofix=false:
# https://github.com/JuliaPackaging/Yggdrasil/pull/922.
all_ok &= ensure_executability(oh; verbose, silent)
# If this is a dynamic object, do the dynamic checks
if isdynamic(oh)
# Check that the libgfortran version matches
all_ok &= check_libgfortran_version(oh, platform; verbose, has_csl)
# Check whether the library depends on any of the most common
# libraries provided by `CompilerSupportLibraries_jll`.
all_ok &= check_csl_libs(oh, platform; verbose, has_csl)
# Check that the libstdcxx string ABI matches
all_ok &= check_cxxstring_abi(oh, platform; verbose)
# Check that this binary file's dynamic linkage works properly. Note to always
# DO THIS ONE LAST as it can actually mutate the file, which causes the previous
# checks to freak out a little bit.
all_ok &= check_dynamic_linkage(oh, prefix, bin_files;
platform, silent, verbose, autofix, src_name)
end
end
end
end
# Ensure this file is codesigned (currently only does something on Apple platforms)
all_ok &= ensure_codesigned(f, prefix, platform; verbose, subdir=src_name)
catch e
if !isa(e, ObjectFile.MagicMismatch)
rethrow(e)
end
# If this isn't an actual binary file, skip it
if verbose
@info("Skipping binary analysis of $(relpath(f, prefix.path))")
end
end
end
# Find all dynamic libraries
shlib_files = filter(f -> startswith(f, prefix.path) && valid_library_path(f, platform), collapse_symlinks(bin_files))
for f in shlib_files
# Inspect all shared library files for our platform (but only if we're
# running native, don't try to load library files from other platforms)
if platforms_match(platform, HostPlatform())
if verbose
@info("Checking shared library $(relpath(f, prefix.path))")
end
# dlopen() this library in a separate Julia process so that if we
# try to do something silly like `dlopen()` a .so file that uses
# LLVM in interesting ways on startup, it doesn't kill our main
# Julia process.
dlopen_cmd = """
using Libdl
try
dlopen($(repr(f)))
exit(0)
catch e
if $(repr(verbose))
Base.display_error(e)
end
exit(1)
end
"""
try
p = open(`$(Base.julia_cmd()) -e $dlopen_cmd`)
wait(p)
if p.exitcode != 0
throw("Invalid exit code!")
end
catch
# TODO: Use the relevant ObjFileBase packages to inspect why
# this file is being nasty to us.
if !silent
@warn("$(relpath(f, prefix.path)) cannot be dlopen()'ed")
end
all_ok = false
end
end
# Ensure that all libraries have at least some kind of SONAME, if we're
# on that kind of platform
if !Sys.iswindows(platform)
all_ok &= ensure_soname(prefix, f, platform; verbose, autofix, subdir=src_name)
end
# Ensure that this library is available at its own SONAME
all_ok &= symlink_soname_lib(f; verbose=verbose, autofix=autofix)
end
# remove *.la files generated by GNU libtool
la_files = collect_files(prefix, endswith(".la"))
for f in la_files
# Make sure the file still exists on disk
if isfile(f)
# sanity check: first byte should be '#', first line should contain 'libtool'
line = readline(f)
if length(line) == 0 || line[1] != '#' || !occursin("libtool", line)
continue
end
elseif !islink(f)
# If the file doesn't exist on disk but it's a symlink, it's a broken link to a
# no-longer-existing libtool file, we still want to remove it. In any other
# case, continue.
continue
end
# remove it
if verbose
@info("Removing libtool file $f")
end
rm(f; force=true)
end
if Sys.iswindows(platform)
# We also cannot allow any symlinks in Windows because it requires
# Admin privileges to create them. Orz
symlinks = collect_files(prefix, islink, exclude_dirs = false)
for f in symlinks
try
src_path = realpath(f)
if isfile(src_path) || isdir(src_path)
rm(f; force=true)
cp(src_path, f, follow_symlinks = true)
end
catch
end
end
# If we're targeting a windows platform, check to make sure no .dll
# files are sitting in `$prefix/lib`, as that's a no-no. This is
# not a fatal offense, but we'll yell about it.
lib_dll_files = filter(f -> valid_library_path(f, platform), collect_files(joinpath(prefix, "lib"), predicate))
for f in lib_dll_files
if !silent
@warn("$(relpath(f, prefix.path)) should be in `bin`!")
end
end
# Even more than yell about it, we're going to automatically move
# them if there are no `.dll` files outside of `lib`. This is
# indicative of a simplistic build system that just don't know any
# better with regards to windows, rather than a complicated beast.
outside_dll_files = [f for f in shlib_files if !(f in lib_dll_files)]
if autofix && !isempty(lib_dll_files) && isempty(outside_dll_files)
if !silent
@warn("Simple buildsystem detected; Moving all `.dll` files to `bin`!")
end
mkpath(joinpath(prefix, "bin"))
for f in lib_dll_files
mv(f, joinpath(prefix, "bin", basename(f)))
end
end
# Normalise timestamp of Windows import libraries.
import_libraries = collect_files(prefix, endswith(".dll.a"))
for implib in import_libraries
if verbose
@info("Normalising timestamps in import library $(implib)")
end
normalise_implib_timestamp(implib)
end
end
# Check that we're providing a license file
if require_license
all_ok &= check_license(prefix, src_name; verbose=verbose, silent=silent)
end
# Perform filesystem-related audit passes
predicate = f -> !startswith(f, joinpath(prefix, "logs"))
all_files = collect_files(prefix, predicate)
# Search for absolute paths in this prefix
all_ok &= check_absolute_paths(prefix, all_files; silent=silent)
# Search for case-sensitive ambiguities
all_ok &= check_case_sensitivity(prefix)
return all_ok
end
"""
compatible_marchs(p::AbstractPlatform)
Return a (sorted) list of compatible microarchitectures, starting from the most
compatible to the most highly specialized. If no microarchitecture is specified within
`p`, returns the most generic microarchitecture possible for the given architecture.
"""
function compatible_marchs(platform::AbstractPlatform)
if !haskey(BinaryPlatforms.arch_march_isa_mapping, arch(platform))
throw(ArgumentError("Architecture \"$(arch(platform))\" does not contain any known microarchitectures!"))
end
# This is the list of microarchitecture names for this architecture
march_list = String[march for (march, features) in BinaryPlatforms.arch_march_isa_mapping[arch(platform)]]
# Fast-path a `march()` of `nothing`, which defaults to only the most compatible microarchitecture
if march(platform) === nothing
return march_list[begin:begin]
end
# Search for this platform's march in the march list
idx = findfirst(m -> m == march(platform), march_list)
if idx === nothing
throw(ArgumentError("Microarchitecture \"$(march(platform))\" not valid for architecture \"$(arch(platform))\""))
end
# Return all up to that index
return march_list[begin:idx]
end
function check_isa(oh, platform, prefix;
verbose::Bool = false,
silent::Bool = false)
detected_march = analyze_instruction_set(oh, platform; verbose=verbose)
platform_marchs = compatible_marchs(platform)
if detected_march ∉ platform_marchs
# The object file is for a microarchitecture not compatible with
# the desired one
if !silent
msg = replace("""
Minimum instruction set detected for $(relpath(path(oh), prefix.path)) is
$(detected_march), not $(last(platform_marchs)) as desired.
""", '\n' => ' ')
@warn(strip(msg))
end
return false
elseif detected_march != last(platform_marchs)
# The object file is compatible with the desired
# microarchitecture, but using a lower instruction set: inform
# the user that they may be missing some optimisation, but there
# is no incompatibility, so return true.
if !silent
msg = replace("""
Minimum instruction set detected for $(relpath(path(oh), prefix.path)) is
$(detected_march), not $(last(platform_marchs)) as desired.
You may be missing some optimization flags during compilation.
""", '\n' => ' ')
@warn(strip(msg))
end
end
return true
end
function check_dynamic_linkage(oh, prefix, bin_files;
platform::AbstractPlatform = HostPlatform(),
verbose::Bool = false,
silent::Bool = false,
autofix::Bool = true,
src_name::AbstractString = "",
)
all_ok = true
# If it's a dynamic binary, check its linkage
if isdynamic(oh)
rp = RPath(oh)
if verbose
@info("Checking $(relpath(path(oh), prefix.path)) with RPath list $(rpaths(rp))")
end
# Look at every dynamic link, and see if we should do anything about that link...
libs = find_libraries(oh)
ignored_libraries = String[]
for libname in keys(libs)
if should_ignore_lib(libname, oh, platform)
push!(ignored_libraries, libname)
continue
end
# If this is a default dynamic link, then just rewrite to use rpath and call it good.
if is_default_lib(libname, oh)
if autofix
if verbose
@info("Rpathify'ing default library $(libname)")
end
relink_to_rpath(prefix, platform, path(oh), libs[libname]; verbose, subdir=src_name)
end
continue
end
if !isfile(libs[libname])
# If we couldn't resolve this library, let's try autofixing,
# if we're allowed to by the user
if autofix
# First, is this a library that we already know about?
known_bins = lowercase.(basename.(bin_files))
kidx = findfirst(known_bins .== lowercase(basename(libname)))
if kidx !== nothing
# If it is, point to that file instead!
new_link = update_linkage(prefix, platform, path(oh), libs[libname], bin_files[kidx]; verbose, subdir=src_name)
if verbose && new_link !== nothing
@info("Linked library $(libname) has been auto-mapped to $(new_link)")
end
else
if !silent
@warn("Linked library $(libname) could not be resolved and could not be auto-mapped")
if is_troublesome_library_link(libname, platform)
@warn("Depending on $(libname) is known to cause problems at runtime, make sure to link against the JLL library instead")
end
end
all_ok = false
end
else
if !silent
@warn("Linked library $(libname) could not be resolved within the given prefix")
end
all_ok = false
end
elseif !startswith(libs[libname], prefix.path)
if !silent
@warn("Linked library $(libname) (resolved path $(libs[libname])) is not within the given prefix")
end
all_ok = false
end
end
if verbose && !isempty(ignored_libraries)
@info("Ignored system libraries $(join(ignored_libraries, ", "))")
end
# If there is an identity mismatch (which only happens on macOS) fix it
if autofix
fix_identity_mismatch(prefix, platform, path(oh), oh; verbose, subdir=src_name)
end
end
return all_ok
end
if VERSION < v"1.5-DEV"
function walkdir_nosymlinks(path::String)
function adjuster(out_c::Channel)
for (root, dirs, files) in walkdir(path)
for d in dirs
if islink(joinpath(root, d))
push!(files, d)
end
end
filter!(d -> !islink(joinpath(root, d)), dirs)
put!(out_c, (root, dirs, files))
end
end
return Channel(adjuster)
end
else
# No adjustment necessary on 1.5+
walkdir_nosymlinks(path) = walkdir(path)
end
"""
collect_files(path::AbstractString, predicate::Function = f -> true)
Find all files that satisfy `predicate()` when the full path to that file is
passed in, returning the list of file paths.
"""
function collect_files(path::AbstractString, predicate::Function = f -> true;
exclude_externalities::Bool = true, exclude_dirs::Bool = true)
# Sometimes `path` doesn't actually live where we think it does, so canonicalize it immediately
path = Pkg.Types.safe_realpath(path)
if !isdir(path)
return String[]
end
# If we are set to exclude externalities, then filter out symlinks that point
# outside of our given `path`.
if exclude_externalities
old_predicate = predicate
predicate = f -> old_predicate(f) && !(islink(f) && !startswith(Pkg.Types.safe_realpath(f), path))
end
collected = String[]
for (root, dirs, files) in walkdir_nosymlinks(path)
if exclude_dirs
list = files
else
list = append!(files, dirs)
end
for f in list
f_path = joinpath(root, f)
if predicate(f_path)
push!(collected, f_path)
end
end
end
return collected
end
# Unwrap Prefix objects automatically
collect_files(prefix::Prefix, args...; kwargs...) = collect_files(prefix.path, args...; kwargs...)
"""
collapse_symlinks(files::Vector{String})
Given a list of files, prune those that are symlinks pointing to other files
within the list.
"""
function collapse_symlinks(files::Vector{String})
abs_files = String[]
# Collapse symlinks down to real files, but don't die if we've got a broken symlink
for f in files
try
push!(abs_files, realpath(f))
catch
end
end
# Return true if it's not a link and the real path exists in abs_files
predicate = f -> begin
try
return !(islink(f) && realpath(f) in abs_files)
catch
return false
end
end
return filter(predicate, files)
end
"""
check_license(prefix, src_name; verbose::Bool = false,, silent::Bool = false)
Check that there are license files for the project called `src_name` in the `prefix`.
"""
function check_license(prefix::Prefix, src_name::AbstractString = "";
verbose::Bool = false, silent::Bool = false)
if verbose
@info("Checking license file")
end
license_dir = joinpath(prefix.path, "share", "licenses", src_name)
if isdir(license_dir) && length(readdir(license_dir)) >= 1
if verbose
@info("Found license file(s): " * join(readdir(license_dir), ", "))
end
return true
else
if !silent
@error("Unable to find valid license file in \"\${prefix}/share/licenses/$(src_name)\"")
end
# This is pretty serious; don't let us get through without a license
return false
end
end
end # module Auditor
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 71291 | export build_tarballs, autobuild, print_artifacts_toml, build, get_meta_json
import GitHub: gh_get_json, DEFAULT_API
import SHA: sha256, sha1
using TOML, Dates, UUIDs
using RegistryTools
import LibGit2
import PkgLicenses
const DEFAULT_JULIA_VERSION_SPEC = "1.0"
const DEFAULT_JLLWRAPPERS_VERSION_SPEC = "1.2.0"
const PKG_VERSIONS = Base.VERSION >= v"1.7-" ? Pkg.Versions : Pkg.Types
mutable struct BuildTimer
begin_setup::Float64
end_setup::Float64
begin_build::Float64
end_build::Float64
begin_audit::Float64
end_audit::Float64
begin_package::Float64
end_package::Float64
BuildTimer() = new(NaN, NaN, NaN, NaN, NaN, NaN, NaN, NaN)
end
function Base.show(io::IO, t::BuildTimer)
function rnd(a, b)
min, sec = divrem(b - a, 60)
out = ""
if min ≥ 1
out *= string(Int(min), "m ")
end
out *= string(round(sec; digits=2), "s")
return out
end
# Sanity check: make sure all fields are non-NaN: if that's not the case, just skip.
if all(.!(isnan.(getfield.((t,), fieldnames(BuildTimer)))))
print(io, "Timings: ",
"setup: ", rnd(t.begin_setup, t.end_setup), ", ",
"build: ", rnd(t.begin_build, t.end_build), ", ",
"audit: ", rnd(t.begin_audit, t.end_audit), ", ",
"packaging: ", rnd(t.begin_package, t.end_package),
)
end
end
exclude_logs(_, f) = f != "logs"
only_logs(_, f) = f == "logs"
# Helper function to get the minimum version supported by the given compat
# specification, given as a string.
minimum_compat(compat::String) =
minimum(VersionNumber(rng.lower.t) for rng in PKG_VERSIONS.semver_spec(compat).ranges)
const BUILD_HELP = (
"""
Usage: build_tarballs.jl [target1,target2,...] [--help]
[--verbose] [--debug]
[--deploy] [--deploy-bin] [--deploy-jll]
[--register] [--meta-json]
Options:
targets By default `build_tarballs.jl` will build a tarball
for every target within the `platforms` variable.
To override this, pass in a list of comma-separated
target triplets for each target to be built. Note
that this can be used to build for platforms that
are not listed in the 'default list' of platforms
in the build_tarballs.jl script.
--verbose This streams compiler output to stdout during the
build which can be very helpful for finding bugs.
Note that it is colorized if you pass the
--color=yes option to julia, see examples below.
--debug=<mode> This causes a failed build to drop into an
interactive shell for debugging purposes. `<mode>`
can be one of `error`, `begin` or `end`. `error`
drops you into the interactive shell only when there
is an error during the build, this is the default
when no mode is specified. `begin` forces an error
at the beginning of the build, before any command in
the script is run. `end` forces an error at the end
of the build script, useful to debug a successful
build for which the auditor would fail.
--deploy=<repo> Deploy binaries and JLL wrapper code to a github
release of an autogenerated repository. Uses
`github.com/JuliaBinaryWrappers/<name>_jll.jl` by
default, unless `<repo>` is set, in which case it
should be set as `<owner>/<name>_jll.jl`. Setting
this option is equivalent to setting `--deploy-bin`
and `--deploy-jll`. If `<repo>` is set to "local"
then nothing will be uploaded, but JLL packages
will still be written out to `~/.julia/dev/`.
--deploy-bin=<repo> Deploy just the built binaries
--deploy-jll=<repo> Deploy just the JLL code wrappers
--register=<depot> Register into the given depot. If no path is
given, defaults to `~/.julia`. Registration
requires deployment of the JLL wrapper code, so
so using `--register` without `--deploy` or the
more specific `--deploy-jll` is an error.
--skip-build Skips building the platform binaries. This option
is useful if, e.g., you have already built all
platform binaries and now only wish to deploy the
JLL package to GitHub. Note that this will error if
not all tarballs for the listed platforms are
present.
--meta-json Output a JSON representation of the given build
instead of actually building. Note that this can
(and often does) output multiple JSON objects for
multiple platforms, multi-stage builds, etc...
--skip-audit Skips auditing of the output products.
--help Print out this message.
Examples:
julia --color=yes build_tarballs.jl --verbose
This builds all tarballs, with colorized output.
julia build_tarballs.jl x86_64-linux-gnu,i686-linux-gnu
This builds two tarballs for the two platforms given, with a
minimum of output messages.
Supported Platforms:
$(join(sort(triplet.(supported_platforms())), "\n "))
"""
)
"""
build_tarballs(ARGS, src_name, src_version, sources, script, platforms,
products, dependencies; kwargs...)
This should be the top-level function called from a `build_tarballs.jl` file.
It takes in the information baked into a `build_tarballs.jl` file such as the
`sources` to download, the `products` to build, etc... and will automatically
download, build and package the tarballs, generating a `build.jl` file when
appropriate.
Generally, `ARGS` should be the top-level Julia `ARGS` command-line arguments
object. `build_tarballs` does some rudimentary parsing of the arguments. To
see what it can do, you can call it with `--help` in the `ARGS` or see the
[Command Line](@ref) section in the manual.
The `kwargs` are passed on to [`autobuild`](@ref), see there for a list of
supported ones. A few additional keyword arguments are accept:
* `julia_compat` can be set to a version string which is used to set the
supported Julia version in the `[compat]` section of the `Project.toml` of
the generated JLL package. The default value is `"1.0"`.
* `lazy_artifacts` sets whether the artifacts should be lazy.
* `init_block` may be set to a string containing Julia code; if present, this
code will be inserted into the initialization path of the generated JLL
package. This can for example be used to invoke an initialization API of a
shared library.
* `augment_platform_block` may be set to a string containing Julia code; if
present, this code will be inserted into the top-level of the
generated JLL package. It must define a function `augment_platform!` that
takes as a single argument, the target platform and returns the target
platform, with amended tags. This augmented platform will then be used by the
JLL wrapper to select the artifact. Note that this option requires the Julia
compatibility `julia_compat` to be 1.6 or higher.
* `validate_name` ensures that `src_name` constitutes a valid Julia identifier.
Since the generated JLL package is named according to `src_name`, this should
only be set to `false` if you _really_ know what you're doing.
!!! note
The `init_block` and `augment_platform_block` keyword arguments are experimental
and may be removed in a future version of this package. Please use them sparingly.
"""
function build_tarballs(ARGS, src_name, src_version, sources, script,
platforms, products, dependencies;
julia_compat::String = DEFAULT_JULIA_VERSION_SPEC,
validate_name::Bool=true, kwargs...)
@nospecialize
# See if someone has passed in `--help`, and if so, give them the
# assistance they so clearly long for
if "--help" in ARGS
println(BUILD_HELP)
return nothing
end
if validate_name && !Base.isidentifier(src_name)
error("Package name \"$(src_name)\" is not a valid identifier")
end
# Throw an error if we're going to build for platforms not supported by Julia v1.5-.
if any(p -> arch(p) == "armv6l" || (Sys.isbsd(p) && arch(p) == "aarch64"), platforms) && minimum_compat(julia_compat) < v"1.6"
error("Experimental platforms cannot be used with Julia v1.5-.\nChange `julia_compat` to require at least Julia v1.6")
end
# XXX: These are needed as long as we support old-style sources and
# dependencies. Raise a warning for now, deprecate in BB 0.3+
sources = coerce_source.(sources)
dependencies = coerce_dependency.(dependencies)
# Reject user supplied dependencies using a VersionSpec: these should
# either use compat, or build_version, or both (depending on what they are
# trying to achieve). We cannot check for this in the Dependency
# constructor, as there are several valid situations in which we *do* want
# to store versions here (e.g. after running the dependency through the
# package resolver).
for dep in dependencies
if dep isa Dependency && dep.pkg.version != Pkg.Types.VersionSpec("*")
error("Dependency $(dep.pkg.name) specifies a version, use build_version and/or compat instead")
end
end
# Do not clobber caller's ARGS
ARGS = deepcopy(ARGS)
# This sets whether we should build verbosely or not
verbose = check_flag!(ARGS, "--verbose")
# This sets whether auditing should be skipped
skip_audit = check_flag!(ARGS, "--skip-audit")
# This sets whether we drop into a debug shell on failure or not
debug, debug_mode = extract_flag!(ARGS, "--debug", "error")
# Are we skipping building and just outputting JSON?
meta_json, meta_json_file = extract_flag!(ARGS, "--meta-json")
# This sets whether we are going to deploy our binaries/wrapper code to GitHub releases
deploy, deploy_repo = extract_flag!(ARGS, "--deploy", "JuliaBinaryWrappers/$(src_name)_jll.jl")
deploy_bin, deploy_bin_repo = extract_flag!(ARGS, "--deploy-bin", "JuliaBinaryWrappers/$(src_name)_jll.jl")
deploy_jll, deploy_jll_repo = extract_flag!(ARGS, "--deploy-jll", "JuliaBinaryWrappers/$(src_name)_jll.jl")
# Resolve deploy settings
if deploy
deploy_bin = true
deploy_jll = true
deploy_bin_repo = deploy_repo
deploy_jll_repo = deploy_repo
elseif deploy_bin # make sure bin repo and jll repo match
deploy_jll_repo = deploy_bin_repo
elseif deploy_jll
deploy_bin_repo = deploy_jll_repo
elseif deploy_bin && deploy_jll
if deploy_bin_repo != deploy_jll_repo
error("Binaries and JLLs must be deployed to the same repositories")
end
end
# This sets whether we are going to register, and if so, which
register, register_path = extract_flag!(ARGS, "--register", Pkg.depots1())
if register && !deploy_jll
error("Cannot register without deploying!")
end
if register && deploy_jll_repo == "local"
error("Cannot register with a local deployment!")
end
# This sets whether building should be skipped
skip_build = check_flag!(ARGS, "--skip-build")
if deploy_bin || deploy_jll
code_dir = joinpath(Pkg.devdir(), "$(src_name)_jll")
# Shove them into `kwargs` so that we are conditionally passing them along
kwargs = (; kwargs..., code_dir = code_dir)
end
# If --meta-json was passed, error out if any confusing options were passed
meta_json_stream = nothing
if meta_json
if deploy || deploy_bin || deploy_jll
error("Cannot specify --deploy* with --meta-json!")
end
if register
error("Cannot specify --register with --meta-json!")
end
if debug
error("Cannot specify --debug with --meta-json!")
end
# Otherwise, check to see if we're spitting it out to stdout or a file:
if meta_json_file === nothing
meta_json_stream = stdout
else
meta_json_stream = open(meta_json_file, "a")
end
end
# If the user passed in a platform (or a few, comma-separated) on the
# command-line, use that instead of our default platforms
if length(ARGS) > 0
platforms = BinaryBuilderBase.parse_platform.(split(ARGS[1], ","))
end
# Check to make sure we have the necessary environment stuff
if deploy_bin || deploy_jll
# Check to see if we've already got a wrapper package within the Registry,
# choose a version number that is greater than anything else existent.
build_version = get_next_wrapper_version(src_name, src_version)
if deploy_jll_repo != "local"
@info("Building and deploying version $(build_version) to $(deploy_jll_repo)")
# We need to make sure that the JLL repo at least exists, so that we can deploy binaries to it
# even if we're not planning to register things to it today.
init_jll_package(code_dir, deploy_jll_repo)
else
@info("Building and deploying version $(build_version) to $(code_dir)")
# XXX: should we intialize the git repository here? The problem is that if we
# don't clone for the remote we end up with a diverging history.
end
tag = "$(src_name)-v$(build_version)"
end
# Modify script for debugging
if debug
if debug_mode == "begin"
script = "false\n" * script
elseif debug_mode == "end"
script = script * "\nfalse"
end
end
args = (
# Source information
src_name,
src_version,
sources,
# Build script
script,
# Platforms to build for
platforms,
# Products we're expecting
products,
# Dependencies that must be downloaded
dependencies,
)
extra_kwargs = extract_kwargs(kwargs, (:lazy_artifacts, :init_block, :augment_platform_block))
if meta_json_stream !== nothing
# If they've asked for the JSON metadata, by all means, give it to them!
dict = get_meta_json(args...; extra_kwargs..., julia_compat=julia_compat)
println(meta_json_stream, JSON.json(dict))
if meta_json_stream !== stdout
close(meta_json_stream)
end
build_output_meta = Dict()
elseif skip_build
# If they do not want to build, there is nothing we can do here
build_output_meta = Dict()
if verbose
@info("Skipping the build process for the tarballs as requested...")
end
else
# Build the given platforms using the given sources
build_output_meta = autobuild(
# Controls output product placement, mount directory placement, etc...
pwd(),
args...;
# Flags
verbose,
debug,
skip_audit,
kwargs...,
)
end
if deploy_jll
if verbose
@info("Committing and pushing $(src_name)_jll.jl wrapper code version $(build_version)...")
end
# For deploy keep only runtime dependencies.
dependencies = [dep for dep in dependencies if is_runtime_dependency(dep)]
# The location the binaries will be available from
bin_path = "https://github.com/$(deploy_jll_repo)/releases/download/$(tag)"
if !skip_build
# Build JLL package based on output of autobuild
build_jll_package(src_name, build_version, sources, code_dir, build_output_meta,
dependencies, bin_path; verbose, julia_compat, extra_kwargs...)
else
# Rebuild output meta data from the information we have here
rebuild_jll_package(src_name, build_version, sources, platforms, products, dependencies,
joinpath(pwd(), "products"), bin_path;
code_dir, verbose, from_scratch=false,
julia_compat, extra_kwargs...)
end
if deploy_jll_repo != "local"
push_jll_package(src_name, build_version; code_dir=code_dir, deploy_repo=deploy_jll_repo)
end
if register
if verbose
@info("Registering new wrapper code version $(build_version)...")
end
register_jll(src_name, build_version, dependencies, julia_compat;
deploy_repo=deploy_jll_repo, code_dir=code_dir, extra_kwargs...)
end
end
if deploy_bin && deploy_bin_repo != "local"
# Upload the binaries
if verbose
@info("Deploying binaries to release $(tag) on $(deploy_bin_repo) via `ghr`...")
end
upload_to_github_releases(deploy_bin_repo, tag, joinpath(pwd(), "products"); verbose=verbose)
end
return build_output_meta
end
function check_flag!(ARGS, flag)
flag_present = flag in ARGS
filter!(x -> x != flag, ARGS)
return flag_present
end
function extract_flag!(ARGS, flag, val = nothing)
for f in ARGS
if f == flag || startswith(f, string(flag, "="))
# Check if it's just `--flag` or if it's `--flag=foo`
if f != flag
val = split(f, '=')[2]
end
# Drop this value from our ARGS
filter!(x -> x != f, ARGS)
return (true, val)
end
end
return (false, val)
end
"""
get_compilers_versions(; compilers = [:c])
Return the script string that is used to print the versions of the given `compilers`.
"""
function get_compilers_versions(; compilers = [:c])
output =
"""
set -x
"""
if :c in compilers
output *=
"""
cc --version
c++ --version
gcc --version
g++ --version
clang --version
clang++ --version
objc --version
f77 --version
gfortran --version
ld -v
"""
end
if :go in compilers
output *=
"""
go version
"""
end
if :rust in compilers
output *=
"""
rustc --version
rustup --version
cargo --version
"""
end
return output
end
function upload_to_github_releases(repo, tag, path; gh_auth=Wizard.github_auth(;allow_anonymous=false),
attempts::Int = 3, verbose::Bool = false)
for attempt in 1:attempts
try
# Note: in some cases we may want to reduce/avoid concurrency to avoid exceeding
# secondary rate limits:
# https://docs.github.com/en/rest/using-the-rest-api/best-practices-for-using-the-rest-api?apiVersion=2022-11-28#avoid-concurrent-requests
# https://docs.github.com/en/rest/using-the-rest-api/rate-limits-for-the-rest-api?apiVersion=2022-11-28#about-secondary-rate-limits
concurrency = get(ENV, "BINARYBUILDER_GHR_CONCURRENCY", string(Sys.CPU_THREADS))
run(`$(ghr()) -u $(dirname(repo)) -r $(basename(repo)) -t $(gh_auth.token) -p $(concurrency) $(tag) $(path)`)
return
catch
if verbose && attempt < attempts
@info("`ghr` upload step failed, beginning attempt #$(attempt + 1)...")
end
end
end
error("Unable to upload $(path) to GitHub repo $(repo) on tag $(tag)")
end
function get_next_wrapper_version(src_name::AbstractString, src_version::VersionNumber)
# If src_version already has a build_number, just return it immediately
if src_version.build != ()
return src_version
end
ctx = Pkg.Types.Context()
# Force-update the registry here, since we may have pushed a new version recently
update_registry(devnull)
jll_name = "$(src_name)_jll"
uuid = jll_uuid(jll_name)
# If it does, we need to bump the build number up to the next value
build_number = UInt64(0)
if uuid in Pkg.Types.registered_uuids(ctx.registries, jll_name)
# Collect all version numbers of the package across all registries.
versions = VersionNumber[]
for reg in ctx.registries
if !haskey(reg, uuid)
continue
end
pkg_info = Pkg.Registry.registry_info(reg[uuid])
append!(versions, sort!(collect(keys(pkg_info.version_info))))
end
unique!(sort!(versions))
# Find largest version number that matches ours
filter!(v -> (v.major == src_version.major) &&
(v.minor == src_version.minor) &&
(v.patch == src_version.patch) &&
(v.build isa Tuple{<:UInt}), versions)
# Our build number must be larger than the maximum already present in the registry
if !isempty(versions)
build_number = first(maximum(versions).build) + 1
end
end
# Construct build_version (src_version + build_number)
build_version = VersionNumber(src_version.major, src_version.minor,
src_version.patch, src_version.prerelease, (build_number,))
end
function _registered_packages(registry_url::AbstractString)
tmp_dir = mktempdir()
atexit(() -> rm(tmp_dir; force = true, recursive = true))
registry_dir = joinpath(tmp_dir, "REGISTRY")
LibGit2.clone(registry_url, registry_dir)
registry = TOML.parsefile(joinpath(registry_dir, "Registry.toml"))
packages = Vector{String}(undef, 0)
for p in registry["packages"]
push!(packages, p[2]["name"])
end
rm(tmp_dir; force = true, recursive = true)
return packages
end
function _package_is_registered(registry_url::AbstractString,
package::AbstractString)
registered_packages = _registered_packages(registry_url)
return package in registered_packages
end
is_yggdrasil() = get(ENV, "YGGDRASIL", "false") == "true"
# Use a Buildkite environment variable to get the current commit hash
yggdrasil_head() = get(ENV, "BUILDKITE_COMMIT", "")
function register_jll(name, build_version, dependencies, julia_compat;
deploy_repo="JuliaBinaryWrappers/$(name)_jll.jl",
code_dir=joinpath(Pkg.devdir(), "$(name)_jll"),
gh_auth=Wizard.github_auth(;allow_anonymous=false),
gh_username=gh_get_json(DEFAULT_API, "/user"; auth=gh_auth)["login"],
augment_platform_block::String="",
lazy_artifacts::Bool=!isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
kwargs...)
if !isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.6"
error("Augmentation blocks cannot be used with Julia v1.5-.\nChange `julia_compat` to require at least Julia v1.6")
end
# Calculate tree hash of wrapper code
wrapper_tree_hash = bytes2hex(Pkg.GitTools.tree_hash(code_dir))
wrapper_commit_hash = LibGit2.head(code_dir)
# Use RegistryTools to push up a new `General` branch with this JLL package registered within it
# TODO: Update our fork periodically from upstream `General`.
cache = RegistryTools.RegistryCache(joinpath(Pkg.depots1(), "registries_binarybuilder"))
registry_url = "https://$(gh_username):$(gh_auth.token)@github.com/JuliaRegistries/General"
cache.registries[registry_url] = Base.UUID("23338594-aafe-5451-b93e-139f81909106")
jllwrappers_compat = isempty(augment_platform_block) ? DEFAULT_JLLWRAPPERS_VERSION_SPEC : "1.4.0"
project = Pkg.Types.Project(build_project_dict(name, build_version, dependencies, julia_compat; jllwrappers_compat, lazy_artifacts, augment_platform_block))
project_file = joinpath(mktempdir(), "Project.toml")
Pkg.Types.write_project(project, project_file)
errors = setdiff(RegistryTools.registrator_errors, [:version_less_than_all_existing])
reg_branch = RegistryTools.register(
"https://github.com/$(deploy_repo).git",
project_file,
wrapper_tree_hash;
registry=registry_url,
cache=cache,
push=true,
checks_triggering_error = errors,
)
if haskey(reg_branch.metadata, "error")
@error(reg_branch.metadata["error"])
else
upstream_registry_url = "https://github.com/JuliaRegistries/General"
name_jll = "$(name)_jll"
if _package_is_registered(upstream_registry_url, name_jll)
pr_title = "New version: $(name_jll) v$(build_version)"
else
pr_title = "New package: $(name_jll) v$(build_version)"
end
# Open pull request against JuliaRegistries/General
body = """
Autogenerated JLL package registration
* Registering JLL package $(basename(deploy_repo))
* Repository: https://github.com/$(deploy_repo)
* Version: v$(build_version)
* Commit: $(wrapper_commit_hash)
"""
if is_yggdrasil()
commit_hash = yggdrasil_head()
body *= """
* Revision on Yggdrasil: https://github.com/JuliaPackaging/Yggdrasil/commit/$commit_hash
"""
commit_author_login = get_github_author_login("JuliaPackaging/Yggdrasil", commit_hash; gh_auth=gh_auth)
if commit_author_login !== nothing
body *= """
* Created by: @$commit_author_login
"""
end
end
params = Dict(
"base" => "master",
"head" => "$(reg_branch.branch)",
"maintainer_can_modify" => true,
"title" => pr_title,
"body" => body,
)
Wizard.create_or_update_pull_request("JuliaRegistries/General", params; auth=gh_auth)
end
end
function get_meta_json(
src_name::AbstractString,
src_version::VersionNumber,
sources::Vector{<:AbstractSource},
script::AbstractString,
platforms::Vector,
products::Vector{<:Product},
dependencies::Vector{<:AbstractDependency};
julia_compat::String = DEFAULT_JULIA_VERSION_SPEC,
init_block::String = "",
augment_platform_block::String = "",
lazy_artifacts::Bool=!isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
)
dict = Dict(
"name" => src_name,
"version" => "v$(src_version)",
"sources" => sources,
"script" => script,
"products" => products,
"dependencies" => dependencies,
"julia_compat" => julia_compat,
"lazy_artifacts" => lazy_artifacts,
"init_block" => init_block,
"augment_platform_block" => augment_platform_block,
)
# Do not write the list of platforms when building only for `AnyPlatform`
if platforms != [AnyPlatform()]
dict["platforms"] = triplet.(platforms)
end
return dict
end
function compose_debug_prompt(workspace)
log_files = String[]
for (root, dirs, files) in walkdir(joinpath(workspace, "srcdir"))
for file in files
if endswith(file, ".log")
push!(log_files, replace(joinpath(root, file), workspace => "\${WORKSPACE}"))
end
end
end
if length(log_files) > 0
log_files_str = join(log_files, "\n - ")
debug_shell_prompt = """
Build failed, the following log files were generated:
- $log_files_str
Launching debug shell:
"""
else
debug_shell_prompt = "Build failed, launching debug shell:"
end
return debug_shell_prompt
end
"""
autobuild(dir::AbstractString, src_name::AbstractString,
src_version::VersionNumber, sources::Vector,
script::AbstractString, platforms::Vector,
products::Vector, dependencies::Vector;
verbose = false, debug = false,
skip_audit = false, ignore_audit_errors = true,
autofix = true, code_dir = nothing,
meta_json_file = nothing, require_license = true,
dont_dlopen = false, kwargs...)
Runs the boiler plate code to download, build, and package a source package
for a list of platforms. This method takes a veritable truckload of arguments,
here are the relevant actors, broken down in brief:
* `dir`: the root of the build; products will be placed within `dir`/products,
and mountpoints will be placed within `dir`/build/.
* `src_name`: the name of the source package being built and will set the name
of the built tarballs.
* `src_version`: the version of the source package.
* `platforms`: a list of platforms to build for.
* `sources`: a vector of all sources to download and unpack before building
begins, as [`AbstractSource`](@ref)s.
* `script`: a string representing a shell script to run as the build.
* `products`: the list of `Product`s which shall be built.
* `dependencies`: a vector of JLL dependency packages as
[`AbstractDependency`](@ref) that should be installed before building begins.
* `verbose`: Enable verbose mode. What did you expect?
* `debug`: cause a failed build to drop into an interactive shell so that
the build can be inspected easily.
* `skip_audit`: disable the typical audit that occurs at the end of a build.
* `ignore_audit_errors`: do not kill a build even if a problem is found.
* `autofix`: give `BinaryBuilder` permission to automatically fix issues it
finds during audit passes. Highly recommended.
* `code_dir`: sets where autogenerated JLL packages will be put.
* `require_license` enables a special audit pass that requires licenses to be
installed by all packages.
* `dont_dlopen`: don't try to `dlopen` library products. This is separate from
specifying `dont_dlopen` on a `LibraryProduct` in that it still results in
the generated JLL loading the library at run time, and only prevents
BinaryBuilder from doing so during JLL generation.
"""
function autobuild(dir::AbstractString,
src_name::AbstractString,
src_version::VersionNumber,
sources::Vector{<:AbstractSource},
script::AbstractString,
platforms::Vector,
products::Vector{<:Product},
dependencies::Vector{<:AbstractDependency};
verbose::Bool = false,
debug::Bool = false,
skip_audit::Bool = false,
ignore_audit_errors::Bool = true,
autofix::Bool = true,
code_dir::Union{String,Nothing} = nothing,
require_license::Bool = true,
dont_dlopen::Bool = false,
kwargs...)
@nospecialize
# This is what we'll eventually return
@info("Building for $(join(sort(triplet.(platforms)), ", "))")
build_output_meta = Dict()
# Resolve dependencies into PackageSpecs now, ensuring we have UUIDs for all deps
all_resolved, dependencies = resolve_jlls(dependencies, outs=(verbose ? stdout : devnull))
if !all_resolved
error("Invalid dependency specifications!")
end
# If the user passed in a src_version with a build number, bail out
if any(!isempty, (src_version.prerelease, src_version.build))
error("Will not build with a `src_version` that does not have the format `major.minor.patch`! Do not set prerelease or build numbers.")
end
# We must prepare our sources. Download them, hash them, etc...
source_files = download_source.(sources; verbose=verbose)
# Our build products will go into ./products
out_path = joinpath(dir, "products")
try mkpath(out_path) catch; end
for platform in sort(collect(platforms), by = triplet)
timer = BuildTimer()
timer.begin_setup = time()
# We build in a platform-specific directory
build_path = joinpath(dir, "build", triplet(platform))
mkpath(build_path)
shards = choose_shards(platform; extract_kwargs(kwargs, (:preferred_gcc_version,:preferred_llvm_version,:bootstrap_list,:compilers))...)
concrete_platform = get_concrete_platform(platform, shards)
prefix = setup_workspace(
build_path,
source_files,
concrete_platform,
default_host_platform;
verbose=verbose,
)
setup_deps(f, prefix, dependencies, platform, verbose) =
setup_dependencies(prefix, Pkg.Types.PackageSpec[getpkg(d) for d in filter_platforms(dependencies, platform) if f(d) && is_build_dependency(d)], platform; verbose)
host_artifact_paths = setup_deps(is_host_dependency, prefix, dependencies, default_host_platform, verbose)
target_artifact_paths = setup_deps(is_target_dependency, prefix, dependencies, concrete_platform, verbose)
# Create a runner to work inside this workspace with the nonce built-in
ur = preferred_runner()(
prefix.path;
cwd = "/workspace/srcdir",
platform = concrete_platform,
verbose = verbose,
workspaces = [
joinpath(prefix, "metadir") => "/meta",
],
compiler_wrapper_dir = joinpath(prefix, "compiler_wrappers"),
src_name = src_name,
shards = shards,
extract_kwargs(kwargs, (:preferred_gcc_version,:preferred_llvm_version,:compilers,:allow_unsafe_flags,:lock_microarchitecture,:clang_use_lld))...,
)
# Set up some bash traps
trapper_wrapper = """
# Stop if we hit any errors.
set -e
# If we're running as `bash`, then use the `DEBUG` and `ERR` traps
if [ \$(basename \$0) = "bash" ]; then
trap "RET=\\\$?; \\
trap - DEBUG INT TERM ERR EXIT; \\
set +e +x; \\
auto_install_license; \\
save_env; \\
exit \\\$RET" \\
EXIT
trap "RET=\\\$?; \\
trap - DEBUG INT TERM ERR EXIT; \\
set +e +x; \\
echo Previous command \\\$! exited with \\\$RET >&2; \\
save_env; \\
exit \\\$RET" \\
INT TERM ERR
# Start saving everything into our history
trap save_history DEBUG
else
# If we're running in `sh` or something like that, we need a
# slightly slimmer set of traps. :(
trap "RET=\\\$?; \\
echo Previous command exited with \\\$RET >&2; \\
set +e +x; \\
save_env; \\
exit \\\$RET" \\
EXIT INT TERM
fi
$(script)
"""
dest_prefix = Prefix(BinaryBuilderBase.destdir(prefix.path, concrete_platform))
did_succeed = with_logfile(dest_prefix, "$(src_name).log"; subdir=src_name) do io
# Let's start the presentations with BinaryBuilder.jl
write(io, "BinaryBuilder.jl version: $(get_bb_version())\n\n")
# Get the list of compilers...
compilers = extract_kwargs(kwargs, (:compilers,))
# ...because we want to log all their versions. However, we don't
# want this to be shown in the console, so we first run this without
# teeing to stdout
run(ur, `/bin/bash -l -c $(get_compilers_versions(; compilers...))`, io;
verbose = verbose, tee_stream = devnull)
timer.end_setup = time()
# Run the build script
timer.begin_build = time()
res = run(ur, `/bin/bash -l -c $(trapper_wrapper)`, io; verbose=verbose)
timer.end_build = time()
res
end
if !did_succeed
if debug
# Print debug prompt and paths to any generated log files
debug_shell_prompt = compose_debug_prompt(prefix.path)
@warn(debug_shell_prompt)
run_interactive(ur, `/bin/bash -l -i`)
end
msg = "Build for $(src_name) on $(triplet(platform)) did not complete successfully\n"
error(msg)
end
# Run an audit of the prefix to ensure it is properly relocatable
timer.begin_audit = time()
if !skip_audit
audit_result = audit(dest_prefix, src_name;
platform=platform, verbose=verbose,
has_csl = any(getname.(dependencies) .== "CompilerSupportLibraries_jll"),
autofix=autofix, require_license=require_license)
if !audit_result && !ignore_audit_errors
msg = replace("""
Audit failed for $(dest_prefix.path).
Address the errors above to ensure relocatability.
To override this check, set `ignore_audit_errors = true`.
""", '\n' => ' ')
error(strip(msg))
end
end
timer.end_audit = time()
# Finally, error out if something isn't satisfied
unsatisfied_so_die = false
for p in products
if platform isa AnyPlatform && !(p isa FileProduct)
# `AnyPlatform` is by design platform-independent, so we allow
# only `FileProduct`s.
error("Cannot have $(typeof(p)) for AnyPlatform")
end
if !satisfied(p, dest_prefix; verbose=verbose, platform=platform,
skip_dlopen=dont_dlopen)
if !verbose
# If we never got a chance to see the verbose output, give it here:
locate(p, dest_prefix; verbose=true, platform=platform,
skip_dlopen=dont_dlopen)
end
@error("Built $(src_name) but $(variable_name(p)) still unsatisfied:")
unsatisfied_so_die = true
end
end
if unsatisfied_so_die
error("Cannot continue with unsatisfied build products!")
end
# We also need to capture some info about each product
products_info = Dict{Product,Any}()
for p in products
product_path = locate(p, dest_prefix; platform=platform, skip_dlopen=dont_dlopen)
products_info[p] = Dict("path" => relpath(product_path, dest_prefix.path))
if p isa LibraryProduct || p isa FrameworkProduct
products_info[p]["soname"] = something(
Auditor.get_soname(product_path),
basename(product_path),
)
end
end
# Unsymlink all the deps from the dest_prefix
cleanup_dependencies(prefix, host_artifact_paths, default_host_platform)
cleanup_dependencies(prefix, target_artifact_paths, concrete_platform)
# Search for dead links in dest_prefix; raise warnings about them.
Auditor.warn_deadlinks(dest_prefix.path)
# Cull empty directories, for neatness' sake, unless auditing is disabled
if !skip_audit
for (root, dirs, files) = walkdir(dest_prefix.path; topdown=false)
# We do readdir() here because `walkdir()` does not do a true in-order traversal
if isempty(readdir(root))
rm(root)
end
end
end
# Compress log files
compress_dir(logdir(dest_prefix; subdir=src_name); verbose)
# Once we're built up, go ahead and package this dest_prefix out
timer.begin_package = time()
tarball_path, tarball_hash, git_hash = package(
dest_prefix,
joinpath(out_path, src_name),
src_version;
platform=platform,
verbose=verbose,
force=true,
# Do not include logs into the main tarball
filter=exclude_logs,
)
# Create another tarball only for the logs
package(
dest_prefix,
joinpath(out_path, src_name * "-logs"),
src_version;
platform=platform,
verbose=verbose,
force=true,
filter=only_logs,
)
timer.end_package = time()
build_output_meta[platform] = (
tarball_path,
tarball_hash,
git_hash,
products_info,
)
# Destroy the workspace, taking care to make sure that we don't run into any
# permissions errors while we do so.
Base.Filesystem.prepare_for_deletion(prefix.path)
rm(prefix.path; recursive=true)
# If the whole build_path is empty, then remove it too. If it's not, it's probably
# because some other build is doing something simultaneously with this target, and we
# don't want to mess with their stuff.
if isempty(readdir(build_path))
rm(build_path; recursive=true)
end
verbose && @info timer
end
# Return our product hashes
return build_output_meta
end
function download_github_release(download_dir, repo, tag; gh_auth=Wizard.github_auth(), verbose::Bool=false)
release = gh_get_json(DEFAULT_API, "/repos/$(repo)/releases/tags/$(tag)", auth=gh_auth)
assets = [a for a in release["assets"] if endswith(a["name"], ".tar.gz")]
for asset in assets
if verbose
@info("Downloading $(asset["name"])")
end
download(asset["browser_download_url"], joinpath(download_dir, asset["name"]))
end
return assets
end
function get_github_author_login(repository, commit_hash; gh_auth=Wizard.github_auth())
try
commit = GitHub.commit(repository, commit_hash; auth=gh_auth)
commit.author.login
catch
nothing
end
end
# Init remote repository, and its local counterpart
function init_jll_package(code_dir, deploy_repo;
gh_auth = Wizard.github_auth(;allow_anonymous=false))
url = "https://github.com/$(deploy_repo)"
try
# This throws if it does not exist
GitHub.repo(deploy_repo; auth=gh_auth)
catch e
# If it doesn't exist, create it.
# check whether gh_org might be a user, not an organization.
gh_org = dirname(deploy_repo)
isorg = GitHub.owner(gh_org; auth=gh_auth).typ == "Organization"
owner = GitHub.Owner(gh_org, isorg)
@info("Creating new wrapper code repo at $(url)")
try
GitHub.create_repo(owner, basename(deploy_repo), Dict("license_template" => "mit", "has_issues" => "false"); auth=gh_auth)
catch create_e
# If creation failed, it could be because the repo was created in the meantime.
# Check for that; if it still doesn't exist, then freak out. Otherwise, continue on.
try
GitHub.repo(deploy_repo; auth=gh_auth)
catch
rethrow(create_e)
end
end
end
if !isdir(code_dir)
# If it does exist, clone it down:
@info("Cloning wrapper code repo from $(url) into $(code_dir)")
Wizard.with_gitcreds("x-access-token", gh_auth.token) do creds
LibGit2.clone(url, code_dir; credentials=creds)
end
else
# Otherwise, hard-reset to latest main:
repo = LibGit2.GitRepo(code_dir)
Wizard.with_gitcreds("x-access-token", gh_auth.token) do creds
LibGit2.fetch(repo; credentials=creds)
end
main_branch = LibGit2.lookup_branch(repo, "origin/main", true)
# Starting from 2020-10-01 GitHub uses `main` as the default name of the
# main branch of a repository, but it used to use `master`
if isnothing(main_branch)
main_branch = LibGit2.lookup_branch(repo, "origin/master", true)
remote_branch = "master"
else
remote_branch = "main"
end
origin_main_oid = LibGit2.GitHash(main_branch)
LibGit2.reset!(repo, origin_main_oid, LibGit2.Consts.RESET_HARD)
if string(LibGit2.head_oid(repo)) != string(origin_main_oid) || remote_branch == "master"
LibGit2.branch!(repo, "main", string(origin_main_oid); force=true)
end
end
end
# rebuild_jll_package is not called from anywhere in BinaryBuilder,
# but rather from JuliaPackaging/Yggdrasil/.ci/register_package.jl
function rebuild_jll_package(obj::Dict;
download_dir = nothing,
upload_prefix = nothing,
build_version = nothing,
gh_org::String = "JuliaBinaryWrappers",
verbose::Bool = false,
from_scratch::Bool = true)
if build_version === nothing
build_version = BinaryBuilder.get_next_wrapper_version(obj["name"], obj["version"])
end
if download_dir === nothing
download_dir = mktempdir()
repo = "$(gh_org)/$(obj["name"])_jll.jl"
tag = "$(obj["name"])-v$(build_version)"
download_github_release(download_dir, repo, tag; verbose=verbose)
upload_prefix = "https://github.com/$(repo)/releases/download/$(tag)"
elseif upload_prefix === nothing
error("If download_dir is specified, you must specify upload_prefix as well!")
end
julia_compat = get(obj, "julia_compat", DEFAULT_JULIA_VERSION_SPEC)
augment_platform_block = get(obj, "augment_platform_block", "")
lazy_artifacts = get(obj, "lazy_artifacts", !isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7")
return rebuild_jll_package(
obj["name"],
build_version,
obj["sources"],
obj["platforms"],
obj["products"],
obj["dependencies"],
download_dir,
upload_prefix;
verbose,
lazy_artifacts,
julia_compat,
init_block = get(obj, "init_block", ""),
augment_platform_block,
from_scratch,
)
end
function rebuild_jll_package(name::String, build_version::VersionNumber, sources::Vector,
platforms::Vector, products::Vector, dependencies::Vector,
download_dir::String, upload_prefix::String;
code_dir::String = joinpath(Pkg.devdir(), "$(name)_jll"),
verbose::Bool = false, from_scratch::Bool = true,
kwargs...)
# We're going to recreate "build_output_meta"
build_output_meta = Dict()
# For each platform, we have two tarballs: the main with the full product,
# and the logs-only one. This function filters out the logs one.
filter_main_tarball(f, platform) = occursin(".$(triplet(platform)).tar", f) && !occursin("-logs.", f)
# Then generate a JLL package for each platform
downloaded_files = readdir(download_dir)
for platform in sort(collect(platforms), by = triplet)
# Find the corresponding tarball:
tarball_idx = findfirst(f -> filter_main_tarball(f, platform), downloaded_files)
# No tarball matching the given platform...
if tarball_idx === nothing
# ..but wait, maybe the tarball still uses the os version number for
# FreeBSD or macOS?
for (isos, try_os_version) in ((Sys.isfreebsd, "11.1"), (Sys.isapple, "14"))
if isos(platform) && os_version(platform) === nothing
tmp_platform = deepcopy(platform)
tmp_platform["os_version"] = try_os_version
tarball_idx = findfirst(f -> filter_main_tarball(f, tmp_platform), downloaded_files)
end
end
end
# Ok, really no tarball matching the given platform
if tarball_idx === nothing
error("Incomplete JLL release! Could not find tarball for $(triplet(platform))")
end
tarball_path = joinpath(download_dir, downloaded_files[tarball_idx])
# Begin reconstructing all the information we need
tarball_hash = open(tarball_path, "r") do io
bytes2hex(sha256(io))
end
# Unpack the tarball into a new location, calculate the git hash and locate() each product;
mktempdir() do dest_prefix
unpack(tarball_path, dest_prefix)
git_hash = Base.SHA1(Pkg.GitTools.tree_hash(dest_prefix))
if verbose
@info("Calculated git tree hash $(bytes2hex(git_hash.bytes)) for $(basename(tarball_path))")
end
# Determine locations of each product
products_info = Dict{Product,Any}()
for p in products
product_path = locate(p, Prefix(dest_prefix); platform=platform, verbose=verbose, skip_dlopen=true)
if product_path === nothing
error("Unable to locate $(p) within $(dest_prefix) for $(triplet(platform))")
end
products_info[p] = Dict("path" => relpath(product_path, dest_prefix))
if p isa LibraryProduct || p isa FrameworkProduct
products_info[p]["soname"] = something(
Auditor.get_soname(product_path),
basename(product_path),
)
end
end
# Store all this information within build_output_meta:
build_output_meta[platform] = (
joinpath(upload_prefix, downloaded_files[tarball_idx]),
tarball_hash,
git_hash,
products_info,
)
# Override read-only permissions before cleaning-up the directory
Base.Filesystem.prepare_for_deletion(dest_prefix)
end
end
# If `from_scratch` is set (the default) we clear out any old crusty code
# before generating our new, pristine, JLL package within it. :)
if from_scratch
rm(joinpath(code_dir, "src"); recursive=true, force=true)
rm(joinpath(code_dir, "Artifacts.toml"); force=true)
end
# Finally, generate the full JLL package
build_jll_package(name, build_version, sources, code_dir, build_output_meta,
dependencies, upload_prefix; verbose=verbose,
kwargs...)
end
function build_jll_package(src_name::String,
build_version::VersionNumber,
sources::Vector,
code_dir::String,
build_output_meta::Dict,
dependencies::Vector,
bin_path::String;
verbose::Bool = false,
julia_compat::String = DEFAULT_JULIA_VERSION_SPEC,
init_block::String = "",
augment_platform_block::String = "",
# If we support versions older than Julia v1.7 the artifact
# should be lazy when we augment the platform.
lazy_artifacts::Bool = !isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
)
# Make way, for prince artifacti
mkpath(joinpath(code_dir, "src", "wrappers"))
# Drop build dependencies
dependencies = [d for d in dependencies if is_runtime_dependency(d)]
platforms = keys(build_output_meta)
products_info = Dict{Product,Any}()
for platform in sort(collect(platforms), by = triplet)
if verbose
@info("Generating jll package for $(triplet(platform)) in $(code_dir)")
end
# Extract this platform's information. Each of these things can be platform-specific
# (including the set of products!) so be general here.
tarball_name, tarball_hash, git_hash, products_info = build_output_meta[platform]
# Add an Artifacts.toml
artifacts_toml = joinpath(code_dir, "Artifacts.toml")
download_info = Tuple[
(joinpath(bin_path, basename(tarball_name)), tarball_hash),
]
if platform isa AnyPlatform
# AnyPlatform begs for a platform-independent artifact
bind_artifact!(artifacts_toml, src_name, git_hash; download_info=download_info, force=true, lazy=lazy_artifacts)
else
bind_artifact!(artifacts_toml, src_name, git_hash; platform=platform, download_info=download_info, force=true, lazy=lazy_artifacts)
end
# Generate the platform-specific wrapper code
open(joinpath(code_dir, "src", "wrappers", "$(triplet(platform)).jl"), "w") do io
println(io, "# Autogenerated wrapper script for $(src_name)_jll for $(triplet(platform))")
if !isempty(products_info)
println(io, """
export $(join(sort(variable_name.(first.(collect(products_info)))), ", "))
""")
end
for dep in filter_platforms(dependencies, platform)
if !is_top_level_dependency(dep)
println(io, "using $(getname(dep))")
end
end
# Generate header definitions like `find_artifact_dir()`
println(io, "JLLWrappers.@generate_wrapper_header($(repr(src_name)))")
# Next, begin placing products
function global_declaration(p::LibraryProduct, p_info::Dict)
return "JLLWrappers.@declare_library_product($(variable_name(p)), $(repr(p_info["soname"])))"
end
global_declaration(p::FrameworkProduct, p_info::Dict) = global_declaration(p.libraryproduct, p_info)
function global_declaration(p::ExecutableProduct, p_info::Dict)
vp = variable_name(p)
# An executable product's public interface is a do-block wrapper function
return "JLLWrappers.@declare_executable_product($(variable_name(p)))"
end
function global_declaration(p::FileProduct, p_info::Dict)
return "JLLWrappers.@declare_file_product($(variable_name(p)))"
end
# Create relative path mappings that are compile-time constant, and mutable
# mappings that are initialized by __init__() at load time.
for (p, p_info) in sort(products_info)
println(io, global_declaration(p, p_info))
end
print(io, """
function __init__()
JLLWrappers.@generate_init_header($(join(getname.(filter_platforms(dependencies, platform)), ", ")))
""")
for (p, p_info) in sort(products_info)
vp = variable_name(p)
if Sys.iswindows(platform)
# `dlopen` on Windows isn't occasionally happy to see
# forward slashes in the path:
# https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/941.
# Workaround the issue by normalising the path separator to
# the backslash.
p_info["path"] = replace(p_info["path"], "/" => "\\")
end
if p isa LibraryProduct || p isa FrameworkProduct
println(io, """
JLLWrappers.@init_library_product(
$(vp),
$(repr(p_info["path"])),
$(BinaryBuilderBase.dlopen_flags_str(p)),
)
""")
elseif p isa ExecutableProduct
println(io, """
JLLWrappers.@init_executable_product(
$(vp),
$(repr(p_info["path"])),
)
""")
elseif p isa FileProduct
println(io, """
JLLWrappers.@init_file_product(
$(vp),
$(repr(p_info["path"])),
)
""")
end
end
println(io, " JLLWrappers.@generate_init_footer()")
if !isempty(init_block)
print(io, """
$(init_block)
""")
end
print(io, """
end # __init__()
""")
end
end
if !isempty(augment_platform_block)
pkg_dir = joinpath(code_dir, ".pkg")
!ispath(pkg_dir) && mkdir(pkg_dir)
write(joinpath(pkg_dir, "platform_augmentation.jl"), augment_platform_block)
write(joinpath(pkg_dir, "select_artifacts.jl"),
"""
push!(Base.LOAD_PATH, dirname(@__DIR__))
using TOML, Artifacts, Base.BinaryPlatforms
include("./platform_augmentation.jl")
artifacts_toml = joinpath(dirname(@__DIR__), "Artifacts.toml")
# Get "target triplet" from ARGS, if given (defaulting to the host triplet otherwise)
target_triplet = get(ARGS, 1, Base.BinaryPlatforms.host_triplet())
# Augment this platform object with any special tags we require
platform = augment_platform!(HostPlatform(parse(Platform, target_triplet)))
# Select all downloadable artifacts that match that platform
artifacts = select_downloadable_artifacts(artifacts_toml; platform, include_lazy=true)
#Output the result to `stdout` as a TOML dictionary
TOML.print(stdout, artifacts)
""")
end
# Generate target-demuxing main source file.
open(joinpath(code_dir, "src", "$(src_name)_jll.jl"), "w") do io
print(io, """
# Use baremodule to shave off a few KB from the serialized `.ji` file
baremodule $(src_name)_jll
using Base
using Base: UUID
""")
if lazy_artifacts
println(io, "using LazyArtifacts")
end
for dep in dependencies
if is_top_level_dependency(dep)
println(io, "using $(getname(dep))")
end
end
if !isempty(augment_platform_block)
print(io, """
Base.include(@__MODULE__, joinpath("..", ".pkg", "platform_augmentation.jl"))
""")
end
print(io, """
import JLLWrappers
JLLWrappers.@generate_main_file_header($(repr(src_name)))
JLLWrappers.@generate_main_file($(repr(src_name)), $(repr(jll_uuid("$(src_name)_jll"))))
end # module $(src_name)_jll
""")
end
print_source(io, s::ArchiveSource) = println(io, "* compressed archive: ", s.url, " (SHA256 checksum: `", s.hash,"`)")
print_source(io, s::GitSource) = println(io, "* git repository: ", s.url, " (revision: `", s.hash,"`)")
print_source(io, s::FileSource) = println(io, "* file: ", s.url, " (SHA256 checksum: `", s.hash,"`)")
function print_source(io, s::DirectorySource)
print(io, "* files in directory, relative to originating `build_tarballs.jl`: ")
if is_yggdrasil()
println(io, "[`", s.path, "`](https://github.com/JuliaPackaging/Yggdrasil/tree/", yggdrasil_head(), "/", ENV["PROJECT"], "/", basename(s.path), ")")
else
println(io, "`", s.path, "`")
end
end
function print_jll(io, dep)
depname = getname(dep)
if is_yggdrasil()
# In this case we can easily add a direct link to the repo
println(io, "* [`", depname, "`](https://github.com/JuliaBinaryWrappers/", depname, ".jl)")
else
println(io, "* `", depname, "`")
end
end
print_product(io, p::Product) = println(io, "* `", typeof(p), "`: `", variable_name(p), "`")
# Add a README.md
open(joinpath(code_dir, "README.md"), "w") do io
println(io,
"""
# `$(src_name)_jll.jl` (v$(build_version))
""")
if is_yggdrasil()
println(io, "[_jll/deps.svg)](https://juliahub.com/ui/Packages/General/$(src_name)_jll/)\n")
end
println(io, """
This is an autogenerated package constructed using [`BinaryBuilder.jl`](https://github.com/JuliaPackaging/BinaryBuilder.jl).
""")
if is_yggdrasil()
println(io, """
The originating [`build_tarballs.jl`](https://github.com/JuliaPackaging/Yggdrasil/blob/$(yggdrasil_head())/$(ENV["PROJECT"])/build_tarballs.jl) script can be found on [`Yggdrasil`](https://github.com/JuliaPackaging/Yggdrasil/), the community build tree.
## Bug Reports
If you have any issue, please report it to the Yggdrasil [bug tracker](https://github.com/JuliaPackaging/Yggdrasil/issues).
""")
end
println(io, """
## Documentation
For more details about JLL packages and how to use them, see `BinaryBuilder.jl` [documentation](https://docs.binarybuilder.org/stable/jll/).
""")
if length(sources) > 0
# `sources` can be empty, and it is for some HelloWorld examples
println(io, """
## Sources
The tarballs for `$(src_name)_jll.jl` have been built from these sources:""")
println(io)
print_source.(Ref(io), sources)
println(io)
end
println(io, """
## Platforms
`$(src_name)_jll.jl` is available for the following platforms:
""")
for p in sort(collect(platforms), by = triplet)
println(io, "* `", p, "` (`", triplet(p), "`)")
end
# Note: here we list _all_ runtime dependencies, including those that may be
# required only for some platforms.
if length(dependencies) > 0
println(io)
println(io, """
## Dependencies
The following JLL packages are required by `$(src_name)_jll.jl`:""")
println(io)
print_jll.(Ref(io), sort(dependencies, by = getname))
end
if length(keys(products_info)) > 0
println(io)
println(io, """
## Products
The code bindings within this package are autogenerated from the following `Products`:
""")
for (p, _) in sort(products_info)
print_product(io, p)
end
end
end
# Add before the license a note about to what files this applies
license = if isfile(joinpath(code_dir, "LICENSE"))
# In most cases we have a file called `LICENSE`...
strip(read(joinpath(code_dir, "LICENSE"), String))
else
# ...but sometimes this is missing.
strip("MIT License\n\nCopyright (c) $(year(now()))\n" * PkgLicenses.readlicense("MIT"))
end
note_lines = split("""
The Julia source code within this repository (all files under `src/`) are
released under the terms of the MIT \"Expat\" License, the text of which is
included below. This license does not apply to the binary package wrapped by
this Julia package and automatically downloaded by the Julia package manager
upon installing this wrapper package. The binary package's license is shipped
alongside the binary itself and can be found within the
`share/licenses/$(src_name)` directory within its prefix.""", "\n")
# Since this function can be called multiple times, we must make sure that
# the note is written only once. Do nothing it is already there.
if !startswith(license, first(note_lines))
open(joinpath(code_dir, "LICENSE"), "w") do io
println.(Ref(io), note_lines)
println(io)
println(io, license)
end
end
# We used to have a duplicate license file, remove it.
rm(joinpath(code_dir, "LICENSE.md"); force=true)
# Add a Project.toml. Note: here we list _all_ runtime dependencies, including those
# that may be required only for some platforms.
jllwrappers_compat = isempty(augment_platform_block) ? "1.2.0" : "1.4.0"
project = build_project_dict(src_name, build_version, dependencies, julia_compat; lazy_artifacts, jllwrappers_compat, augment_platform_block)
open(joinpath(code_dir, "Project.toml"), "w") do io
TOML.print(io, project)
end
# Add a `.gitignore`
open(joinpath(code_dir, ".gitignore"), "w") do io
println(io, "override/")
end
end
function push_jll_package(name, build_version;
code_dir = joinpath(Pkg.devdir(), "$(name)_jll"),
deploy_repo = "JuliaBinaryWrappers/$(name)_jll.jl",
gh_auth = Wizard.github_auth(;allow_anonymous=false))
# Next, push up the wrapper code repository
wrapper_repo = LibGit2.GitRepo(code_dir)
LibGit2.add!(wrapper_repo, ".")
commit = LibGit2.commit(wrapper_repo, "$(name)_jll build $(build_version)")
Wizard.with_gitcreds("x-access-token", gh_auth.token) do creds
refspecs = ["refs/heads/main"]
# Fetch the remote repository, to have the relevant refspecs up to date.
LibGit2.fetch(
wrapper_repo;
refspecs=refspecs,
credentials=creds,
)
LibGit2.branch!(wrapper_repo, "main", string(LibGit2.GitHash(commit)); track="main")
LibGit2.push(
wrapper_repo;
refspecs=refspecs,
remoteurl="https://github.com/$(deploy_repo).git",
credentials=creds,
)
end
end
# For historical reasons, our UUIDs are generated with some rather strange constants
function bb_specific_uuid5(namespace::UUID, key::String)
data = [reinterpret(UInt8, [namespace.value]); codeunits(key)]
u = reinterpret(UInt128, sha1(data)[1:16])[1]
u &= 0xffffffffffff0fff3fffffffffffffff
u |= 0x00000000000050008000000000000000
return UUID(u)
end
const uuid_package = UUID("cfb74b52-ec16-5bb7-a574-95d9e393895e")
# For even more interesting historical reasons, we append an extra
# "_jll" to the name of the new package before computing its UUID.
jll_uuid(name) = bb_specific_uuid5(uuid_package, "$(name)_jll")
function find_uuid(ctx, pkg)
if Pkg.Types.has_uuid(pkg)
return pkg.uuid
end
depname = getname(pkg)
@static if VERSION >= v"1.7"
uuids = Pkg.Types.registered_uuids(ctx.registries, depname)
else
uuids = Pkg.Types.registered_uuids(ctx, depname)
end
if isempty(uuids)
return nothing
end
if length(uuids) == 1
return first(uuids)
end
error("""
Multiple UUIDs found for package `$(depname)`.
Use `PackageSpec(name = \"$(depname)\", uuid = ..." to specify the UUID explicitly.
""")
end
function build_project_dict(name, version, dependencies::Array{<:AbstractDependency},
julia_compat::String=DEFAULT_JULIA_VERSION_SPEC;
jllwrappers_compat::String=DEFAULT_JLLWRAPPERS_VERSION_SPEC,
augment_platform_block::String="",
lazy_artifacts::Bool=!isempty(augment_platform_block) && minimum_compat(julia_compat) < v"1.7",
kwargs...)
# Make sure we only have runtime dependecies at this point.
@assert all(is_runtime_dependency, dependencies)
Pkg.Types.semver_spec(julia_compat) # verify julia_compat is valid
project = Dict(
"name" => "$(name)_jll",
"uuid" => string(jll_uuid("$(name)_jll")),
"version" => string(version),
"deps" => Dict{String,Any}(),
# We require at least Julia 1.3+, for Pkg.Artifacts support, but we only claim
# Julia 1.0+ by default so that empty JLLs can be installed on older versions.
"compat" => Dict{String,Any}(
"JLLWrappers" => "$(jllwrappers_compat)",
"julia" => "$(julia_compat)",
# Stdlibs always used, we need to have compat bounds also for them.
# The "< 0.0.1" trick is needed to prevent `Pkg.test` from breaking
# on older Julia versions.
"Libdl" => "< 0.0.1, 1",
"Artifacts" => "< 0.0.1, 1",
)
)
ctx = Pkg.Types.Context()
for dep in dependencies
pkgspec = getpkg(dep)
depname = getname(dep)
uuid = find_uuid(ctx, pkgspec)
if uuid === nothing
uuid = jll_uuid(depname)
end
project["deps"][depname] = string(uuid)
if dep isa Dependency && length(dep.compat) > 0
Pkg.Types.semver_spec(dep.compat) # verify dep.compat is valid
project["compat"][depname] = dep.compat
end
end
# Always add Libdl, Artifacts, and JLLWrappers as dependencies.
project["deps"]["Libdl"] = "8f399da3-3557-5675-b5ff-fb832c97cbdb"
project["deps"]["Artifacts"] = "56f22d72-fd6d-98f1-02f0-08ddc0907c33"
project["deps"]["JLLWrappers"] = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210"
if minimum_compat(julia_compat) < v"1.6"
# `Pkg` is used in JLLWrappers only when we require Julia v1.5-.
project["deps"]["Pkg"] = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f"
project["compat"]["Pkg"] = "< 0.0.1, 1"
end
if lazy_artifacts
project["deps"]["LazyArtifacts"] = "4af54fe1-eca0-43a8-85a7-787d91b784e3"
project["compat"]["LazyArtifacts"] = "< 0.0.1, 1"
end
if !isempty(augment_platform_block)
project["deps"]["TOML"] = "fa267f1f-6049-4f14-aa54-33bafae1ed76"
project["compat"]["TOML"] = "< 0.0.1, 1"
end
return project
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2563 | module BinaryBuilder
using Libdl, LibGit2, Random, JSON
using BinaryBuilderBase
using ObjectFile
using GitHub
using Pkg, Base.BinaryPlatforms, Pkg.Artifacts
using ghr_jll
# Re-export useful stuff from Base.BinaryPlatforms:
export HostPlatform, platform_dlext, valid_dl_path, arch, libc, nbits,
libgfortran_version, libstdcxx_version, cxxstring_abi, detect_libgfortran_version,
detect_libstdcxx_version, detect_cxxstring_abi, call_abi, wordsize, triplet,
select_platform, platforms_match, AbstractPlatform, Platform, os
# BinaryBuilderBase/src/Prefix.jl
export Prefix, bindir, libdirs, includedir, logdir, activate, deactivate,
isinstalled, install, uninstall, list_tarball_files, verify, temp_prefix
# BinaryBuilderBase/src/Rootfs.jl
export supported_platforms, expand_gfortran_versions, expand_cxxstring_abis, expand_microarchitectures
# BinaryBuilderBase/src/Platforms.jl
export AnyPlatform
# BinaryBuilderBase/src/Products.jl
export Product, LibraryProduct, FileProduct, ExecutableProduct, FrameworkProduct, satisfied,
locate, write_deps_file, variable_name
# BinaryBuilderBase/src/Dependency.jl
export Dependency, RuntimeDependency, BuildDependency, HostBuildDependency
# BinaryBuilderBase/src/Sources.jl
export ArchiveSource, FileSource, GitSource, DirectorySource
# Auditor.jl
export audit, collect_files, collapse_symlinks
# Autocomplete BinaryBuilder.runshell
const runshell = BinaryBuilderBase.runshell
include("Auditor.jl")
include("Wizard.jl")
using OutputCollectors, BinaryBuilderBase, .Auditor, .Wizard
# Autocomplete BinaryBuilder.run_wizard
const run_wizard = Wizard.run_wizard
include("AutoBuild.jl")
include("Declarative.jl")
include("Logging.jl")
function __init__()
if Base.thisminor(VERSION) >= v"1.8" && get(ENV, "JULIA_REGISTRYCI_AUTOMERGE", "false") != "true"
error("""
BinaryBuilder supports only Julia v1.7.
Contribute to JuliaPackaging/JLLPrefixes.jl#6 (<https://github.com/JuliaPackaging/JLLPrefixes.jl/issues/6>)
if you care about supporting newer versions of Julia.
""")
end
# If we're running on Azure, enable azure logging:
if !isempty(get(ENV, "AZP_TOKEN", ""))
enable_azure_logging()
end
end
get_bb_version() =
BinaryBuilderBase.get_bbb_version(@__DIR__, "12aac903-9f7c-5d81-afc2-d9565ea332ae")
versioninfo() = BinaryBuilderBase.versioninfo(; name=@__MODULE__, version=get_bb_version())
# Precompilation ahoy!
# include("../deps/SnoopCompile/precompile/precompile_BinaryBuilder.jl")
# _precompile_()
end # module
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2120 | # merge multiple JSON objects garnered via `--meta-json`
function merge_json_objects(objs::Vector)
merged = Dict()
for obj in objs
for k in keys(obj)
if !haskey(merged, k)
merged[k] = obj[k]
else
if merged[k] != obj[k]
if !isa(merged[k], Array)
merged[k] = [merged[k]]
end
if isa(obj[k], Array)
append!(merged[k], obj[k])
else
push!(merged[k], obj[k])
end
merged[k] = unique(merged[k])
end
end
end
end
return merged
end
# Fix various things that go wrong with the JSON translation
function cleanup_merged_object!(meta::Dict)
# Turn `source` values back into AbstractSource's
meta["sources"] = sourcify.(meta["sources"])
# Turn `dependencies` back into AbstractDependency's
meta["dependencies"] = dependencify.(meta["dependencies"])
# Turn `version` back into a VersionNumber (not a string)
meta["version"] = VersionNumber(meta["version"])
# Reconstruct our `Product`s
function reconstruct_product(p::Dict)
if p["type"] == "exe"
return ExecutableProduct(p)
elseif p["type"] == "lib"
return LibraryProduct(p)
elseif p["type"] == "framework"
return FrameworkProduct(p)
elseif p["type"] == "file"
return FileProduct(p)
else
error("Unknown product type $(p["type"])")
end
end
meta["products"] = Product[reconstruct_product(p) for p in meta["products"]]
if haskey(meta, "platforms")
# Convert platforms back to actual Platform objects
meta["platforms"] = [parse(Platform, p) for p in meta["platforms"]]
else
# If the key isn't there it's because this is a platform-independent
# build, so use `AnyPlatform()`.
meta["platforms"] = [AnyPlatform()]
end
# Return the cleaned-up meta for fun
return meta
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 1415 | using LoggingExtras, Logging
import Logging: AbstractLogger, handle_message
struct AzureSinkLogger{F} <: Logging.AbstractLogger
action_chooser::F
io::IO
end
AzureSinkLogger(f=_->"task.logissue") = AzureSinkLogger(f, stderr)
function Logging.handle_message(logger::AzureSinkLogger, args...; kwargs...)
#Make it a named tuple for easier working
log = LoggingExtras.handle_message_args(args...)
task::String = logger.action_chooser(log)
properties=String[]
# AZP calls it `type` not level`, and `warning` not `warn`:
levelmap = Dict(Logging.Error => "error", Logging.Warn => "warning")
push!(properties, "type=$(levelmap[log[:level]])")
for key in (:_module, :group, :id, :file, :line)
push!(properties, "$key=$(log[key])")
end
for key in keys(log.kwargs)
push!(properties, "$key=$(log.kwargs[key])")
end
props=join(properties, ";")
# format: ##vso[area.action property1=value;property2=value;...]message
println(logger.io, "##vso[$task $props]$(log.message)")
end
Logging.shouldlog(::AzureSinkLogger, arg...) = true
Logging.min_enabled_level(::AzureSinkLogger) = Logging.Warn
Logging.catch_exceptions(::AzureSinkLogger) = true
function enable_azure_logging()
# Tee-in AzureSinkLogger so that `@warn` and `@error` are printed out nicely
global_logger(TeeLogger(
global_logger(),
AzureSinkLogger(),
))
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 4645 | module Wizard
using BinaryBuilderBase, OutputCollectors, ..Auditor
using Random
using GitHub, LibGit2, Pkg, Sockets, ObjectFile
import GitHub: gh_get_json, DEFAULT_API
using JSON
import SHA: sha256
using REPL
using REPL.Terminals
using REPL.TerminalMenus
using ObjectFile.ELF
using HTTP
import PkgLicenses
using JLD2
using Base.BinaryPlatforms
using Dates
using Scratch
# It's Magic (TM)!
export run_wizard, deploy
include("wizard/state.jl")
include("wizard/github.jl")
include("wizard/yggdrasil.jl")
include("wizard/utils.jl")
include("wizard/obtain_source.jl")
include("wizard/interactive_build.jl")
include("wizard/deploy.jl")
function save_wizard_state(state::WizardState,
wizard_state_dir::String = @get_scratch!("wizard_state"))
jldopen(joinpath(wizard_state_dir, "wizard.state"), "w") do f
serialize(f, state)
end
return state
end
function load_wizard_state(wizard_state_dir::String = @get_scratch!("wizard_state");
as_is::Bool = false)
# If no state dir exists, early-exit
if wizard_state_dir === nothing
return WizardState()
end
try
state = jldopen(joinpath(wizard_state_dir, "wizard.state"), "r") do f
return unserialize(f)
end
if as_is
# Return the state as it is, without further questions.
return state
end
# Looks like we had an incomplete build; ask the user if they want to continue
if !(state.step in (:done, :step1))
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
choice = request(terminal,
"Would you like to resume the previous incomplete wizard run?",
RadioMenu([
"Resume previous run",
"Start from scratch",
]),
)
if choice == 1
return state
else
return WizardState()
end
end
catch e
if isa(e, InterruptException)
rethrow(e)
end
@error(e)
end
# Either something went wrong, or there was nothing interesting stored.
# Either way, just return a blank slate.
return WizardState()
end
function run_wizard(state::Union{Nothing,WizardState} = nothing)
global last_wizard_state
if state === nothing
# If we weren't given a state, check to see if we'd like to resume a
# previous run or start from scratch again.
state = load_wizard_state()
end
try
while state.step != :done
if state.step == :step1
step1(state)
state.step = :step2
elseif state.step == :step2
step2(state)
state.step = :step3
elseif state.step == :step3
step34(state)
elseif state.step == :step3_retry
step3_retry(state)
elseif state.step == :step5a
step5a(state)
elseif state.step == :step5b
step5b(state)
elseif state.step == :step5c
step5c(state)
state.step = :step6
elseif state.step == :step6
step6(state)
elseif state.step == :step7
step7(state)
state.step = :done
end
# Save it every step along the way
save_wizard_state(state)
end
catch err
# If anything goes wrong, immediately save the current wizard state
save_wizard_state(state)
if isa(err, InterruptException)
msg = "\n\nWizard stopped, use BinaryBuilder.run_wizard() to resume"
if state.step in (:step3, :step3_retry, :step5a, :step5b, :step5c, :step6)
# We allow deploying a partial script only if the wizard got at
# least to the interactive build and it isn't done. `:step7`
# would be about to deploy, no need to suggest using `deploy()`
# at this stage.
msg *= ",\nor BinaryBuilder.deploy() to deploy the incomplete script"
end
msg *= "\n\n"
printstyled(state.outs, msg, bold=true, color=:red)
else
bt = catch_backtrace()
Base.showerror(stderr, err, bt)
println(state.outs, "\n")
end
return state
end
# We did it!
save_wizard_state(state)
println(state.outs, "\nWizard Complete. Press Enter to exit...")
read(state.ins, Char)
state
end
end # module Wizard
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 917 | function check_codesigned(path::AbstractString, platform::AbstractPlatform)
# We only perform ad-hoc codesigning on Apple platforms
if !Sys.isapple(platform)
return true
end
ur = preferred_runner()(dirname(path); cwd="/workspace/", platform=platform)
return run(ur, `/usr/local/bin/ldid -d $(basename(path))`)
end
function ensure_codesigned(path::AbstractString, prefix::Prefix, platform::AbstractPlatform;
verbose::Bool = false, subdir::AbstractString="")
# We only perform ad-hoc codesigning on Apple platforms
if !Sys.isapple(platform)
return true
end
rel_path = relpath(path, prefix.path)
ur = preferred_runner()(prefix.path; cwd="/workspace/", platform=platform)
with_logfile(prefix, "ldid_$(basename(rel_path)).log"; subdir) do io
run(ur, `/usr/local/bin/ldid -S -d $(rel_path)`, io; verbose=verbose)
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 10139 | import Base.BinaryPlatforms: detect_libstdcxx_version, detect_cxxstring_abi
using ObjectFile
csl_warning(lib) = @warn(
"""
To ensure that the correct version of $(lib) is found at runtime, add the following entry to the list of dependencies of this builder
Dependency(PackageSpec(name="CompilerSupportLibraries_jll", uuid="e66e0078-7015-5450-92f7-15fbd957f2ae"))
""")
"""
detect_libgfortran_version(oh::ObjectHandle, platform::AbstractPlatform)
Given an ObjectFile, examine its dynamic linkage to discover which (if any)
`libgfortran` it's linked against. The major SOVERSION will determine which
GCC version we're restricted to.
"""
function detect_libgfortran_version(oh::ObjectHandle, platform::AbstractPlatform)
# We look for linkage to libgfortran
libs = basename.(path.(DynamicLinks(oh)))
fortran_libs = filter(l -> occursin("libgfortran", l), libs)
if isempty(fortran_libs)
return nothing
end
# If we find one, pass it off to `parse_dl_name_version`
name, version = parse_dl_name_version(first(fortran_libs), os(platform))
return version
end
function check_libgfortran_version(oh::ObjectHandle, platform::AbstractPlatform; verbose::Bool = false,
has_csl::Bool = true)
version = nothing
try
version = detect_libgfortran_version(oh, platform)
catch e
if isa(e, InterruptException)
rethrow(e)
end
@warn "$(path(oh)) could not be scanned for libgfortran dependency!" exception=(e, catch_backtrace())
return true
end
if verbose && version !== nothing
@info("$(path(oh)) locks us to libgfortran v$(version)")
end
if !has_csl && version !== nothing
csl_warning("libgfortran")
end
if libgfortran_version(platform) === nothing && version !== nothing
msg = strip(replace("""
$(path(oh)) links to libgfortran! This causes incompatibilities across
major versions of GCC. To remedy this, you must build a tarball for
each major version of GCC. To do this, immediately after your `platforms`
definition in your `build_tarballs.jl` file, add the line:
""", '\n' => ' '))
msg *= "\n\n platforms = expand_gfortran_versions(platforms)"
@warn(msg)
return false
end
if libgfortran_version(platform) !== nothing !== version && libgfortran_version(platform) != version
msg = strip(replace("""
$(path(oh)) links to libgfortran$(version.major), but we are supposedly building
for libgfortran$(libgfortran_version(platform).major). This usually indicates that
the build system is somehow ignoring our choice of compiler!
""", '\n' => ' '))
@warn(msg)
return false
end
return true
end
function check_csl_libs(oh::ObjectHandle, platform::AbstractPlatform; verbose::Bool=false,
has_csl::Bool=true, csl_libs::Vector{String}=["libgomp", "libatomic"])
if has_csl
# No need to do any check, CompilerSupportLibraries_jll is already a dependency
return true
end
# Collect list of dependencies
libs = try
basename.(path.(DynamicLinks(oh)))
catch e
if isa(e, InterruptException)
rethrow(e)
end
@warn "$(path(oh)) could not be scanned for $(lib) dependency!" exception=(e, catch_backtrace())
return true
end
# If any of the libs is a library provided by
# `CompilerSupportLibraries_jll`, suggest to add the package as dependency
for lib in csl_libs
if length(filter(l -> occursin(lib, l), libs)) >= 1
csl_warning(lib)
return false
end
end
return true
end
"""
detect_libstdcxx_version(oh::ObjectHandle, platform::AbstractPlatform)
Given an ObjectFile, examine its dynamic linkage to discover which (if any)
`libgfortran` it's linked against. The major SOVERSION will determine which
GCC version we're restricted to.
"""
function detect_libstdcxx_version(oh::ObjectHandle, platform::AbstractPlatform)
# We look for linkage to libstdc++
libs = basename.(path.(DynamicLinks(oh)))
libstdcxx_libs = filter(l -> occursin("libstdc++", l), libs)
if isempty(libstdcxx_libs)
return nothing
end
# Extract all pieces of `.gnu.version_d` from libstdc++.so, find the `GLIBCXX_*`
# symbols, and use the maximum version of that to find the GLIBCXX ABI version number
version_symbols = readmeta(first(libstdcxx_libs)) do ohs
unique(vcat((x -> x.names).(vcat(ELFVersionData.(ohs)...))...))
end
version_symbols = filter(x -> startswith(x, "GLIBCXX_"), version_symbols)
if isempty(version_symbols)
# This would be weird, but let's be prepared
return nothing
end
return maximum([VersionNumber(split(v, "_")[2]) for v in version_symbols])
end
function check_libstdcxx_version(oh::ObjectHandle, platform::AbstractPlatform; verbose::Bool = false)
libstdcxx_version = nothing
try
libstdcxx_version = detect_libstdcxx_version(oh, platform)
catch e
if isa(e, InterruptException)
rethrow(e)
end
@warn "$(path(oh)) could not be scanned for libstdcxx dependency!" exception=(e, catch_backtrace())
return true
end
if verbose && libstdcxx_version != nothing
@info("$(path(oh)) locks us to libstdc++ v$(libstdcxx_version)+")
end
# This actually isn't critical, so we don't complain. Yet.
# if libstdcxx_version(platform) === nothing && libstdcxx_version != nothing
# msg = strip(replace("""
# $(path(oh)) links to libstdc++! This causes incompatibilities across
# major versions of GCC. To remedy this, you must build a tarball for
# each major version of GCC. To do this, immediately after your `platforms`
# definition in your `build_tarballs.jl` file, add the line:
# """, '\n' => ' '))
# msg *= "\n\n platforms = expand_cxxstring_abis(platforms)"
# warn(io, msg)
# return false
# end
return true
end
function cppfilt(symbol_names::Vector, platform::AbstractPlatform; strip_underscore::Bool=false)
input = IOBuffer()
for name in symbol_names
println(input, name)
end
seekstart(input)
output = IOBuffer()
mktempdir() do dir
ur = preferred_runner()(dir; cwd="/workspace/", platform=platform)
cmd = Cmd(`/opt/bin/$(triplet(ur.platform))/c++filt`; ignorestatus=true)
if strip_underscore
cmd = `$(cmd) --strip-underscore`
end
run_interactive(ur, cmd; stdin=input, stdout=output)
end
return filter!(s -> !isempty(s), split(String(take!(output)), "\n"))
end
"""
detect_cxxstring_abi(oh::ObjectHandle, platform::AbstractPlatform)
Given an ObjectFile, examine its symbols to discover which (if any) C++11
std::string ABI it's using. We do this by scanning the list of exported
symbols, triggering off of instances of `St7__cxx11` or `_ZNSs` to give
evidence toward a constraint on `cxx11`, `cxx03` or neither.
"""
function detect_cxxstring_abi(oh::ObjectHandle, platform::AbstractPlatform)
try
# First, if this object doesn't link against `libstdc++`, it's a `:cxxany`
if !any(occursin("libstdc++", l) for l in ObjectFile.path.(DynamicLinks(oh)))
return nothing
end
# GCC on macOS prepends an underscore to symbols, strip it.
symbol_names = cppfilt(symbol_name.(Symbols(oh)), platform; strip_underscore=Sys.isapple(platform))
# Shove the symbol names through c++filt (since we don't want to have to
# reimplement the parsing logic in Julia). If anything has `cxx11` tags,
# then mark it as such.
if any(occursin("[abi:cxx11]", c) || occursin("std::__cxx11", c) for c in symbol_names)
return "cxx11"
end
# Otherwise, if we still have `std::string`'s or `std::list`'s in there, it's implicitly a
# `cxx03` binary, even though we don't have a __cxx03 namespace or something. Mark it.
if any(occursin("std::string", c) || occursin("std::basic_string", c) ||
occursin("std::list", c) for c in symbol_names)
return "cxx03"
end
catch e
if isa(e, InterruptException)
rethrow(e)
end
@warn "$(path(oh)) could not be scanned for cxx11 ABI!" exception=(e, catch_backtrace())
end
return nothing
end
function check_cxxstring_abi(oh::ObjectHandle, platform::AbstractPlatform; io::IO = stdout, verbose::Bool = false)
# First, check the stdlibc++ string ABI to see if it is a superset of `platform`. If it's
# not, then we have a problem!
cxx_abi = detect_cxxstring_abi(oh, platform)
# If no std::string symbols found, just exit out immediately
if cxx_abi == nothing
return true
end
if verbose && cxx_abi != nothing
@info("$(path(oh)) locks us to $(cxx_abi)")
end
if cxxstring_abi(platform) == nothing && cxx_abi != nothing
msg = strip(replace("""
$(path(oh)) contains std::string values! This causes incompatibilities across
the GCC 4/5 version boundary. To remedy this, you must build a tarball for
both GCC 4 and GCC 5. To do this, immediately after your `platforms`
definition in your `build_tarballs.jl` file, add the line:
""", '\n' => ' '))
msg *= "\n\n platforms = expand_cxxstring_abis(platforms)"
@warn(msg)
return false
end
if cxxstring_abi(platform) != cxx_abi
msg = strip(replace("""
$(path(oh)) contains $(cxx_abi) ABI std::string values within its public interface,
but we are supposedly building for $(cxxstring_abi(platform)) ABI. This usually
indicates that the build system is somehow ignoring our choice of compiler, as we manually
insert the correct compiler flags for this ABI choice!
""", '\n' => ' '))
@warn(msg)
return false
end
return true
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 17576 | using ObjectFile.ELF
"""
platform_for_object(oh::ObjectHandle)
Returns the platform the given `ObjectHandle` should run on. E.g.
if the given `ObjectHandle` is an `x86_64` Linux ELF object, this function
will return `Platform("x86_64", "linux")`. This function does not yet distinguish
between different libc's such as `:glibc` and `:musl`.
"""
function platform_for_object(oh::ObjectHandle)
if oh isa ELFHandle
mach_to_arch = Dict(
ELF.EM_386 => "i686",
ELF.EM_X86_64 => "x86_64",
ELF.EM_AARCH64 => "aarch64",
ELF.EM_PPC64 => "powerpc64le",
ELF.EM_ARM => "arm",
)
mach = oh.header.e_machine
if !haskey(mach_to_arch, mach)
error("Unknown ELF architecture $(mach)")
end
arch = mach_to_arch[mach]
if arch == "arm"
# See if we can find an `.ARM.attributes` section
attr = try
findfirst(Sections(oh), ".ARM.attributes")
catch
nothing
end
if attr !== nothing
attr_data = read(attr)
error("Elliot, you need to parse out the ARM version here!")
# Parse out the .ARM.attributes section to find ARM version
end
end
if oh.ei.osabi == ELF.ELFOSABI_LINUX || oh.ei.osabi == ELF.ELFOSABI_NONE
return Platform(arch, "linux")
elseif oh.ei.osabi == ELF.ELFOSABI_FREEBSD
return Platform(arch, "freebsd")
else
error("Unknown ELF OSABI $(oh.ei.osabi)")
end
elseif oh isa MachOHandle
mach_to_arch = Dict(
MachO.CPU_TYPE_X86_64 => "x86_64",
MachO.CPU_TYPE_ARM64 => "aarch64",
)
mach = oh.header.cputype
if !haskey(mach_to_arch, mach)
error("Unknown MachO architecture $(mach)")
end
return Platform(mach_to_arch[mach], "macos")
elseif oh isa COFFHandle
if is64bit(oh)
return Platform("x86_64", "windows")
else
return Platform("i686", "windows")
end
else
error("Unknown ObjectHandle type $(typeof(oh))")
end
end
function _rpaths(file::AbstractString)
readmeta(file) do ohs
vcat(rpaths.(RPath.(ohs))...)
end
end
function _canonical_rpaths(file::AbstractString)
readmeta(file) do ohs
vcat(canonical_rpaths.(RPath.(ohs))...)
end
end
"""
is_for_platform(h::ObjectHandle, platform::AbstractPlatform)
Returns `true` if the given `ObjectHandle` refers to an object of the given
`platform`; E.g. if the given `platform` is for AArch64 Linux, then `h` must
be an `ELFHandle` with `h.header.e_machine` set to `ELF.EM_AARCH64`.
In particular, this method and [`platform_for_object()`](@ref) both exist
because the latter is not smart enough to deal with `:glibc` and `:musl` yet.
"""
function is_for_platform(h::ObjectHandle, platform::AbstractPlatform)
if Sys.islinux(platform) || Sys.isfreebsd(platform)
# First off, if h isn't an ELF object, quit out
if !(h isa ELFHandle)
return false
end
# If the ELF object has an OSABI, check it matches platform
if h.ei.osabi != ELF.ELF.ELFOSABI_NONE
if Sys.islinux(platform)
if h.ei.osabi != ELF.ELFOSABI_LINUX
return false
end
elseif Sys.isfreebsd(platform)
if h.ei.osabi != ELF.ELFOSABI_FREEBSD
return false
end
else
error("Unknown OS ABI type $(typeof(platform))")
end
end
# Check that the ELF arch matches our own
m = h.header.e_machine
if arch(platform) == "i686"
return m == ELF.EM_386
elseif arch(platform) == "x86_64"
# Allow i686 on x86_64, because that's technically ok
return m == ELF.EM_386 || m == ELF.EM_X86_64
elseif arch(platform) == "aarch64"
return m == ELF.EM_AARCH64
elseif arch(platform) == "powerpc64le"
return m == ELF.EM_PPC64
elseif arch(platform) ∈ ("armv7l", "armv6l")
return m == ELF.EM_ARM
else
error("Unknown $(os(platform)) architecture $(arch(platform))")
end
elseif Sys.iswindows(platform)
if !(h isa COFFHandle)
return false
end
if arch(platform) == "x86_64"
return true
elseif arch(platform) == "i686"
return !is64bit(h)
else
error("Unknown $(os(platform)) architecture $(arch(platform))")
end
elseif Sys.isapple(platform)
# We'll take any old Mach-O handle
if !(h isa MachOHandle)
return false
end
return true
else
error("Unkown platform $(os(platform))")
end
end
# These are libraries we should straight-up ignore, like libsystem on OSX
function should_ignore_lib(lib, ::ELFHandle, platform::AbstractPlatform)
ignore_libs = [
# dynamic loaders
"ld-linux-x86-64.so.2",
"ld-linux.so.2",
"ld-linux-armhf.so.3",
"ld-linux-aarch64.so.1",
"ld-musl-x86_64.so.1",
"ld-musl-i386.so.1",
"ld-musl-aarch64.so.1",
"ld-musl-armhf.so.1",
"ld64.so.2",
# C runtime
"libc.so",
"libc.so.6",
"libc.so.7",
"libc.musl-x86_64.so.1",
"libc.musl-i386.so.1",
"libc.musl-aarch64.so.1",
"libc.musl-armhf.so.1",
# C++ runtime
"libstdc++.so.6",
"libc++.so.1",
"libcxxrt.so.1",
# libc libraries
"libdl.so.2",
"librt.so.1",
"libm.so.5",
"libm.so.6",
"libthr.so.3",
"libpthread.so.0",
]
if Sys.isfreebsd(platform)
push!(ignore_libs,
# From FreeBSD SDK
"libdevstat.sos.7",
"libdl.so.1",
"libexecinfo.so.1",
"libkvm.so.7",
"libutil.so.9",
)
elseif libc(platform) == "glibc"
push!(ignore_libs,
# Part of Glibc
"libresolv.so.2",
"libutil.so.1",
)
end
return lowercase(basename(lib)) in ignore_libs
end
function should_ignore_lib(lib, ::MachOHandle, platform::AbstractPlatform)
ignore_libs = [
"libbsm.0.dylib",
"libcups.2.dylib",
"libobjc.a.dylib",
"libpmenergy.dylib",
"libpmsample.dylib",
"libsandbox.1.dylib",
"libsystem.b.dylib",
# This is not built by clang or GCC, so we leave it as a system library
"libc++.1.dylib",
"libresolv.9.dylib",
# Frameworks in the SDK
"accelerate",
"appkit",
"applicationservices",
"audiotoolbox",
"audiounit",
"avfoundation",
"carbon",
"cfnetwork",
"cocoa",
"coreaudio",
"corebluetooth",
"corefoundation",
"coregraphics",
"corelocation",
"coremedia",
"coremidi",
"coreservices",
"coretext",
"corevideo",
"corewlan",
"diskarbitration",
"forcefeedback",
"foundation",
"gamecontroller",
"imageio",
"iobluetooth",
"iokit",
"iosurface",
"localauthentication",
"mediaaccessibility",
"metal",
"metalkit",
"opencl",
"opengl",
"opendirectory",
"quartz",
"quartzcore",
"security",
"securityinterface",
"systemconfiguration",
"videotoolbox",
]
return lowercase(basename(lib)) in ignore_libs
end
function should_ignore_lib(lib, ::COFFHandle, platform::AbstractPlatform)
ignore_libs = [
# Core runtime libs
"ntdll.dll",
"msvcrt.dll",
"kernel32.dll",
"user32.dll",
"shell32.dll",
"shlwapi.dll",
"advapi32.dll",
"crypt32.dll",
"ws2_32.dll",
"rpcrt4.dll",
"usp10.dll",
"dwrite.dll",
"gdi32.dll",
"gdiplus.dll",
"comdlg32.dll",
"secur32.dll",
"ole32.dll",
"dbeng.dll",
"wldap32.dll",
"opengl32.dll",
"winmm.dll",
"iphlpapi.dll",
"imm32.dll",
"comctl32.dll",
"oleaut32.dll",
"userenv.dll",
"netapi32.dll",
"winhttp.dll",
"msimg32.dll",
"dnsapi.dll",
"wsock32.dll",
"psapi.dll",
"bcrypt.dll",
# Compiler support libraries
"libgcc_s_seh-1.dll",
"libgcc_s_sjlj-1.dll",
"libgfortran-3.dll",
"libgfortran-4.dll",
"libgfortran-5.dll",
"libstdc++-6.dll",
"libwinpthread-1.dll",
# This one needs some special attention, eventually
"libgomp-1.dll",
]
return lowercase(basename(lib)) in ignore_libs
end
# Determine whether a library is a "default" library or not, if it is we need
# to map it to `@rpath/$libname` on OSX or `\$ORIGIN/$libname` on Linux/FreeBSD
is_default_lib(lib, oh) = false
function valid_library_path(f::AbstractString, p::AbstractPlatform)
if Sys.iswindows(p)
return endswith(f, ".dll")
elseif Sys.isapple(p)
return endswith(f, ".dylib")
else
return occursin(r".+\.so(\.[\d]+)*$", f)
end
end
function patchelf_flags(p::AbstractPlatform)
flags = []
# ppc64le and aarch64 have 64KB page sizes, don't muck up the ELF section load alignment
if arch(p) in ("powerpc64le", "aarch64")
append!(flags, ["--page-size", "65536"])
end
# We return arrays so that things interpolate properly
return flags
end
function relink_to_rpath(prefix::Prefix, platform::AbstractPlatform, path::AbstractString,
old_libpath::AbstractString; verbose::Bool=false, subdir::AbstractString="")
ur = preferred_runner()(prefix.path; cwd="/workspace/", platform=platform)
rel_path = relpath(path, prefix.path)
libname = basename(old_libpath)
relink_cmd = ``
if Sys.isapple(platform)
install_name_tool = "/opt/bin/$(triplet(ur.platform))/install_name_tool"
relink_cmd = `$install_name_tool -change $(old_libpath) @rpath/$(libname) $(rel_path)`
elseif Sys.islinux(platform) || Sys.isbsd(platform)
patchelf = "/usr/bin/patchelf"
relink_cmd = `$patchelf $(patchelf_flags(platform)) --replace-needed $(old_libpath) $(libname) $(rel_path)`
end
# Create a new linkage that looks like @rpath/$lib on OSX
with_logfile(prefix, "relink_to_rpath_$(basename(rel_path)).log"; subdir) do io
run(ur, relink_cmd, io; verbose=verbose)
end
end
function fix_identity_mismatch(prefix::Prefix, platform::AbstractPlatform, path::AbstractString,
oh::ObjectHandle; verbose::Bool=false, subdir::AbstractString="")
# Only macOS needs to fix identity mismatches
if !Sys.isapple(platform)
return nothing
end
id_lc = [lc for lc in MachOLoadCmds(oh) if typeof(lc) <: MachOIdDylibCmd]
if isempty(id_lc)
return nothing
end
id_lc = first(id_lc)
rel_path = relpath(path, prefix.path)
old_id = dylib_name(id_lc)
new_id = "@rpath/$(basename(old_id))"
if old_id == new_id
return nothing
end
if verbose
@info("Modifying dylib id from \"$(old_id)\" to \"$(new_id)\"")
end
ur = preferred_runner()(prefix.path; cwd="/workspace/", platform=platform)
install_name_tool = "/opt/bin/$(triplet(ur.platform))/install_name_tool"
id_cmd = `$install_name_tool -id $(new_id) $(rel_path)`
# Create a new linkage that looks like @rpath/$lib on OSX,
with_logfile(prefix, "fix_identity_mismatch_$(basename(rel_path)).log"; subdir) do io
run(ur, id_cmd, io; verbose=verbose)
end
end
"""
update_linkage(prefix::Prefix, platform::AbstractPlatform, path::AbstractString,
old_libpath, new_libpath; verbose::Bool = false)
Given a binary object located at `path` within `prefix`, update its dynamic
linkage to point to `new_libpath` instead of `old_libpath`. This is done using
a tool within the cross-compilation environment such as `install_name_tool` on
MacOS or `patchelf` on Linux. Windows platforms are completely skipped, as
they do not encode paths or RPaths within their executables.
"""
function update_linkage(prefix::Prefix, platform::AbstractPlatform, path::AbstractString,
old_libpath, new_libpath; verbose::Bool=false, subdir::AbstractString="")
# Windows doesn't do updating of linkage
if Sys.iswindows(platform)
return
end
ur = preferred_runner()(prefix.path; cwd="/workspace/", platform=platform)
rel_path = relpath(path, prefix.path)
normalize_rpath = rp -> rp
add_rpath = x -> ``
relink = (x, y) -> ``
patchelf = "/usr/bin/patchelf"
install_name_tool = "/opt/bin/$(triplet(ur.platform))/install_name_tool"
if Sys.isapple(platform)
normalize_rpath = rp -> begin
if !startswith(rp, "@loader_path")
return "@loader_path/$(rp)"
end
return rp
end
add_rpath = rp -> `$install_name_tool -add_rpath $(rp) $(rel_path)`
relink = (op, np) -> `$install_name_tool -change $(op) $(np) $(rel_path)`
elseif Sys.islinux(platform) || Sys.isbsd(platform)
normalize_rpath = rp -> begin
if rp == "."
return "\$ORIGIN"
end
if startswith(rp, ".") || !startswith(rp, "/")
# Relative paths starting with `.`, or anything which isn't an absolute
# path. It may also be a relative path without the leading `./`
return "\$ORIGIN/$(rp)"
end
return rp
end
current_rpaths = [r for r in _rpaths(path) if !isempty(r)]
add_rpath = rp -> begin
# Join together RPaths to set new one
rpaths = unique(vcat(current_rpaths, rp))
# I don't like strings ending in '/.', like '$ORIGIN/.'. I don't think
# it semantically makes a difference, but why not be correct AND beautiful?
chomp_slashdot = path -> begin
if length(path) > 2 && path[end-1:end] == "/."
return path[1:end-2]
end
return path
end
rpaths = chomp_slashdot.(rpaths)
# Remove paths starting with `/workspace`: they will not work outisde of the
# build environment and only create noise when debugging.
filter!(rp -> !startswith(rp, "/workspace"), rpaths)
rpath_str = join(rpaths, ':')
return `$patchelf $(patchelf_flags(platform)) --set-rpath $(rpath_str) $(rel_path)`
end
relink = (op, np) -> `$patchelf $(patchelf_flags(platform)) --replace-needed $(op) $(np) $(rel_path)`
end
# If the relative directory doesn't already exist within the RPATH of this
# binary, then add it in.
new_libdir = abspath(dirname(new_libpath) * "/")
if !(new_libdir in _canonical_rpaths(path))
libname = basename(old_libpath)
cmd = add_rpath(normalize_rpath(relpath(new_libdir, dirname(path))))
with_logfile(prefix, "update_rpath_$(basename(path))_$(libname).log"; subdir) do io
run(ur, cmd, io; verbose=verbose)
end
end
# Create a new linkage that uses the RPATH and/or environment variables to find things.
# This allows us to split things up into multiple packages, and as long as the
# libraries that this guy is interested in have been `dlopen()`'ed previously,
# (and have the appropriate SONAME) things should "just work".
if Sys.isapple(platform)
# On MacOS, we need to explicitly add `@rpath/` before our library linkage path.
# Note that this is still overridable through DYLD_FALLBACK_LIBRARY_PATH
new_libpath = joinpath("@rpath", basename(new_libpath))
else
# We just use the basename on all other systems (e.g. Linux). Note that using
# $ORIGIN, while cute, doesn't allow for overrides via LD_LIBRARY_PATH. :[
new_libpath = basename(new_libpath)
end
cmd = relink(old_libpath, new_libpath)
with_logfile(prefix, "update_linkage_$(basename(path))_$(basename(old_libpath)).log"; subdir) do io
run(ur, cmd, io; verbose=verbose)
end
return new_libpath
end
"""
is_troublesome_library_link(libname::AbstractString, platform::AbstractPlatform)
Return `true` if depending on `libname` is known to cause problems at runtime, `false` otherwise.
"""
function is_troublesome_library_link(libname::AbstractString, platform::AbstractPlatform)
if Sys.isapple(platform)
# In https://github.com/JuliaGtk/GtkSourceWidget.jl/pull/9 we found that
# depending on these libraries is an indication that system copies of libxml and
# libiconv has been picked up during compilation. At runtime, the system copies
# will be loaded, which are very likely to be incompatible with those provided
# by JLL packages. The solution is to make sure that JLL copies of these
# libraries are used.
if libname in ("/usr/lib/libxml2.2.dylib", "/usr/lib/libiconv.2.dylib")
return true
end
end
return false
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2496 | # This file contains some extra checks that don't fall into the other
# categories, for example because they're very platform-specific.
using Dates: DateTime, datetime2unix
function check_os_abi(oh::ObjectHandle, p::AbstractPlatform, rest...; verbose::Bool = false, kwargs...)
if Sys.isfreebsd(p)
if oh.ei.osabi != ELF.ELFOSABI_FREEBSD
# The dynamic loader should not have problems in this case, but the
# linker may not appreciate. Let the user know about this.
if verbose
msg = replace("""
$(basename(path(oh))) has an ELF header OS/ABI value that is not set to FreeBSD
($(ELF.ELFOSABI_FREEBSD)), this may be an issue at link time
""", '\n' => ' ')
@warn(strip(msg))
end
return false
end
elseif call_abi(p) == "eabihf"
# Make sure the object file has the hard-float ABI. See Table 4-2 of
# "ELF for the ARM Architecture" document
# (https://developer.arm.com/documentation/ihi0044/e/). Note: `0x000`
# means "no specific float ABI", `0x400` == EF_ARM_ABI_FLOAT_HARD.
if header(oh).e_flags & 0xF00 ∉ (0x000, 0x400)
if verbose
@error("$(basename(path(oh))) does not match the hard-float ABI")
end
return false
end
end
return true
end
# Problem: import libraries on Windows embed information about temporary object files,
# including their modification time on disk, which makes import libraries non-reproducible:
# <https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/1245>.
function normalise_implib_timestamp(path::AbstractString)
# Format of the object file info is something like
# / 1674301251 0 0 644 286 `
# where `1674301251` is the Unix timestamp of the modification time of the library, we
# normalise it to another timestamp. NOTE: it appears that changing the timestamp width
# would break the library, so we use another fixed 10-digit wide timestamp. 10-digit
# Unix timestamps span between 2001 and 2286, so we should be good for a while.
timestamp = trunc(Int, datetime2unix(DateTime(2013, 2, 13, 0, 49, 0))) # Easter egg
newlib = replace(read(path, String), r"(?<HEAD>/ +)(\d{10})(?<TAIL> +\d+ +\d+ +\d+ +\d+ +`)" => SubstitutionString("\\g<HEAD>$(timestamp)\\g<TAIL>"))
# Write back the file to disk.
write(path, newlib)
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2123 | function check_case_sensitivity(prefix::Prefix)
all_ok = true
function check_set(root, list)
lowered = Set()
for f in list
lf = lowercase(f)
if lf in lowered
@warn("$(relpath(joinpath(root, f), prefix.path)) causes a case-sensitivity ambiguity!")
all_ok = false
end
push!(lowered, lf)
end
end
for (root, dirs, files) in walkdir(prefix.path)
check_set(root, dirs)
check_set(root, files)
end
return all_ok
end
function check_absolute_paths(prefix::Prefix, all_files::Vector; silent::Bool = false)
# Finally, check for absolute paths in any files. This is not a "fatal"
# offense, as many files have absolute paths. We want to know about it
# though, so we'll still warn the user.
for f in all_files
try
file_contents = String(read(f))
if occursin(prefix.path, file_contents)
if !silent
@warn("$(relpath(f, prefix.path)) contains an absolute path")
end
end
catch
if !silent
@warn("Skipping abspath scanning of $(f), as we can't open it")
end
end
end
return true
end
function ensure_executability(oh::ObjectHandle; verbose::Bool=false, silent::Bool=false)
old_mode = filemode(path(oh))
# Execution permissions only for users who can read the file
read_mask = (old_mode & 0o444) >> 2
# Check whether the file has executable permission for all
if old_mode & read_mask != read_mask
if verbose
@info "Making $(path(oh)) executable"
end
try
# Add executable permission for all users that can read the file
chmod(path(oh), old_mode | read_mask)
catch e
if isa(e, InterruptException)
rethrow(e)
end
if !silent
@warn "$(path(oh)) could not be made executable!" exception=(e, catch_backtrace())
end
end
end
return true
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 6582 | using JSON
## We start with definitions of instruction mnemonics, broken down by category:
const instruction_categories = JSON.parsefile(joinpath(@__DIR__, "instructions.json");
dicttype=Dict{String,Vector{String}})
# Turn instructions "inside out", so e.g. we have "vzeroall" => "avx"
const mnemonics_by_category = Dict(
inst => cat for (cat, insts) in instruction_categories for inst in insts
)
"""
instruction_mnemonics(path::AbstractString, platform::AbstractPlatform)
Dump a binary object with `objdump`, returning a list of instruction mnemonics
for further analysis with `analyze_instruction_set()`.
Note that this function only really makes sense for x86/x64 binaries. Don't
run this on armv7l, aarch64, ppc64le etc... binaries and expect it to work.
This function returns the list of mnemonics as well as the counts of each,
binned by the mapping defined within `instruction_categories`.
"""
function instruction_mnemonics(path::AbstractString, platform::AbstractPlatform)
# The outputs we are calculating
counts = Dict{SubString{String}, Int}(k => 0 for k in keys(instruction_categories))
mnemonics = Set{SubString{String}}()
ur = preferred_runner()(
abspath(dirname(path));
cwd="/workspace/",
platform=platform,
verbose=false,
)
output = IOBuffer()
# Run objdump to disassemble the input binary
if Sys.isbsd(platform)
objdump_cmd = "llvm-objdump -d $(basename(path))"
else
objdump_cmd = "\${target}-objdump -d $(basename(path))"
end
run_interactive(ur, Cmd(`/bin/bash -c "$(objdump_cmd)"`; ignorestatus=true); stdout=output, stderr=devnull)
seekstart(output)
for line in eachline(output)
isempty(line) && continue
# First, ensure that this line of output is 3 fields long at least
@static if VERSION >= v"1.7.0-DEV.35"
count('\t', line) != 2 && continue
else
count(==('\t'), line) != 2 && continue
end
# Grab the mnemonic for this line as the first word of the 3rd field
idx = findlast('\t', line)
s = SubString(line, idx+1)
space = findfirst(' ', s)
space === nothing && (space = lastindex(s))
m = SubString(s, 1, space-1)
push!(mnemonics, m)
# For each mnemonic, find it in mnemonics_by_category, if we can, and
# increment the appropriate `counts` member:
if haskey(mnemonics_by_category, m)
counts[mnemonics_by_category[m]] += 1
else
counts["unknown"] += 1
end
end
# Return both the list of mnemonics as well as the binned counts
return mnemonics, counts
end
function generic_march(p::AbstractPlatform)
return first(first(Base.BinaryPlatforms.arch_march_isa_mapping[arch(p)]))
end
"""
minimum_march(counts::Dict, p::AbstractPlatform)
This function returns the minimum instruction set required, depending on
whether the object file being pointed to is a 32-bit or 64-bit one:
* For 32-bit object files, this returns one of ["i686", "prescott"]
* For 64-bit object files, this returns one of ["x86_64", "avx", "avx2", "avx512"]
"""
function minimum_march(counts::Dict, p::AbstractPlatform)
if arch(p) == "x86_64"
avx512_instruction_categories = (
"pku", "rdseed", "adcx", "clflush", "xsavec",
"xsaves", "clwb", "avx512evex", "avex512vex",
)
avx2_instruction_categories = (
"movbe", "avx2", "rdwrfsgs", "fma", "bmi1", "bmi2", "f16c",
)
# note that the extensions mmx, sse, and sse2 are part of the generic x86-64 architecture
avx_instruction_categories = (
"sse3", "ssse3", "sse4", "avx", "aes", "pclmulqdq",
)
if any(get.(Ref(counts), avx512_instruction_categories, 0) .> 0)
return "avx512"
elseif any(get.(Ref(counts), avx2_instruction_categories, 0) .> 0)
return "avx2"
elseif any(get.(Ref(counts), avx_instruction_categories, 0) .> 0)
return "avx"
end
elseif arch(p) == "i686"
if counts["sse3"] > 0
return "prescott"
end
elseif arch(p) == "aarch64"
# TODO: Detect instructions for aarch64 extensions
elseif arch(p) == "armv6l"
# We're just always going to assume we're running the single armv6l that Julia runs on.
elseif arch(p) == "armv7l"
# TODO: Detect NEON and vfpv4 instructions
elseif arch(p) == "powerpc64le"
# TODO Detect POWER9/10 instructions
end
return generic_march(p)
end
"""
analyze_instruction_set(oh::ObjectHandle, platform::AbstractPlatform; verbose::Bool = false)
Analyze the instructions within the binary located at the given path for which
minimum instruction set it requires, taking note of groups of instruction sets
used such as `avx`, `sse4.2`, `i486`, etc....
Some binary files (such as libopenblas) contain multiple versions of functions,
internally determining which version to call by using the `cpuid` instruction
to determine processor support. In an effort to detect this, we make note of
any usage of the `cpuid` instruction, disabling our minimum instruction set
calculations if such an instruction is found, and notifying the user of this
if `verbose` is set to `true`.
Note that this function only really makes sense for x86/x64 binaries. Don't
run this on armv7l, aarch64, ppc64le etc... binaries and expect it to work.
"""
function analyze_instruction_set(oh::ObjectHandle, platform::AbstractPlatform; verbose::Bool = false)
# Get list of mnemonics
mnemonics, counts = instruction_mnemonics(path(oh), platform)
# Analyze for minimum instruction set
min_march = minimum_march(counts, platform)
# If the binary uses `cpuid`, we can't know what it's doing, so just
# return the most conservative ISA and warn the user if `verbose` is set.
if counts["cpuid"] > 0
if verbose && generic_march(platform) != min_march
msg = replace("""
$(basename(path(oh))) contains a `cpuid` instruction; refusing to
analyze for minimum instruction set, as it may dynamically select
the proper instruction set internally. Would have chosen
$(min_march), instead choosing $(generic_march(platform)).
""", '\n' => ' ')
@warn(strip(msg))
end
return generic_march(platform)
end
# Otherwise, return `min_march` and let 'em know!
return min_march
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 4146 | # Not everything has an SONAME
get_soname(oh::ObjectHandle) = nothing
# Auto-open a path into an ObjectHandle
function get_soname(path::AbstractString)
try
only(readmeta(ns -> get_soname.(ns), path))
catch e
@warn "Could not probe $(path) for an SONAME!" exception=(e, catch_backtrace())
return nothing
end
end
function get_soname(oh::ELFHandle)
# Get the dynamic entries, see if it contains a DT_SONAME
es = ELFDynEntries(oh)
soname_idx = findfirst(e -> e.entry.d_tag == ELF.DT_SONAME, es)
if soname_idx === nothing
# If all else fails, just return the filename.
return nothing
end
# Look up the SONAME from the string table
return strtab_lookup(es[soname_idx])
end
function get_soname(oh::MachOHandle)
# Get the dynamic entries, see if it contains an ID_DYLIB_CMD
lcs = MachOLoadCmds(oh)
id_idx = findfirst(lc -> typeof(lc) <: MachOIdDylibCmd, lcs)
if id_idx === nothing
# If all else fails, just return the filename.
return nothing
end
# Return the Dylib ID
return dylib_name(lcs[id_idx])
end
function ensure_soname(prefix::Prefix, path::AbstractString, platform::AbstractPlatform;
verbose::Bool = false, autofix::Bool = false, subdir::AbstractString="")
# Skip any kind of Windows platforms
if Sys.iswindows(platform)
return true
end
# Skip if this file already contains an SONAME
rel_path = relpath(realpath(path), realpath(prefix.path))
soname = get_soname(path)
if soname != nothing
if verbose
@info("$(rel_path) already has SONAME \"$(soname)\"")
end
return true
else
soname = basename(path)
end
# If we're not allowed to fix it, fail out
if !autofix
return false
end
# Otherwise, set the SONAME
ur = preferred_runner()(prefix.path; cwd="/workspace/", platform=platform)
set_soname_cmd = ``
if Sys.isapple(platform)
install_name_tool = "/opt/bin/$(triplet(ur.platform))/install_name_tool"
set_soname_cmd = `$install_name_tool -id $(soname) $(rel_path)`
elseif Sys.islinux(platform) || Sys.isbsd(platform)
patchelf = "/usr/bin/patchelf"
set_soname_cmd = `$patchelf $(patchelf_flags(platform)) --set-soname $(soname) $(rel_path)`
end
# Create a new linkage that looks like @rpath/$lib on OSX,
retval = with_logfile(prefix, "set_soname_$(basename(rel_path))_$(soname).log"; subdir) do io
run(ur, set_soname_cmd, io; verbose=verbose)
end
if !retval
@warn("Unable to set SONAME on $(rel_path)")
return false
end
# Read the SONAME back in and ensure it's set properly
new_soname = get_soname(path)
if new_soname != soname
@warn("Set SONAME on $(rel_path) to $(soname), but read back $(string(new_soname))!")
return false
end
if verbose
@info("Set SONAME of $(rel_path) to \"$(soname)\"")
end
return true
end
"""
symlink_soname_lib(path::AbstractString)
We require that all shared libraries are accessible on disk through their
SONAME (if it exists). While this is almost always true in practice, it
doesn't hurt to make doubly sure.
"""
function symlink_soname_lib(path::AbstractString; verbose::Bool = false,
autofix::Bool = false)
# If this library doesn't have an SONAME, then just quit out immediately
soname = get_soname(path)
if soname === nothing
return true
end
# Absolute path to where the SONAME-named file should be
soname_path = joinpath(dirname(path), basename(soname))
if !isfile(soname_path)
if autofix
target = basename(path)
if verbose
@info("Library $(soname) does not exist, creating link to $(target)...")
end
symlink(target, soname_path)
else
if verbose
@info("Library $(soname) does not exist, failing out...")
end
return false
end
end
return true
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 1840 | """
translate_symlinks(root::AbstractString; verbose::Bool=false)
Walks through the root directory given within `root`, finding all symlinks that
point to an absolute path within `root`, and rewriting them to be a relative
symlink instead, increasing relocatability.
"""
function translate_symlinks(root::AbstractString; verbose::Bool=false)
for f in collect_files(root, islink)
link_target = readlink(f)
if isabspath(link_target) && startswith(link_target, "/workspace")
new_link_target = relpath(link_target, replace(dirname(f), root => "/workspace/destdir"))
if verbose
@info("Translating $f to point to $(new_link_target)")
end
rm(f; force=true)
symlink(new_link_target, f)
end
end
end
"""
warn_deadlinks(root::AbstractString)
Walks through the given `root` directory, finding broken symlinks and warning
the user about them. This is used to catch instances such as a build recipe
copying a symlink that points to a dependency; by doing so, it implicitly
breaks relocatability.
"""
function warn_deadlinks(root::AbstractString)
for f in collect_files(root, islink; exclude_externalities=false)
link_target = readlink(f)
if !startswith(link_target, "/")
# If the link is relative, prepend the dirname of `f` to
# `link_target`, otherwise `isfile(link_target)` will always be
# false, as the test isn't performed in dirname(f). Why Not using
# `isabspath`? Because that command is platform-dependent, but we
# always build in a Unix-like environment.
link_target = joinpath(dirname(f), link_target)
end
if !ispath(link_target)
@warn("Broken symlink: $(relpath(f, root))")
end
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 11329 | function print_build_tarballs(io::IO, state::WizardState)
urlfiles = zip(state.source_urls, state.source_files)
sources_strings = map(urlfiles) do x
# Try to be smart and automatically replace version number with `$(version)`.
url_string = replace(repr(x[1]), string(state.version) => "\$(version)")
if endswith(x[1], ".git")
"GitSource($(url_string), $(repr(x[2].hash)))"
elseif any(endswith(x[1], ext) for ext in BinaryBuilderBase.archive_extensions)
"ArchiveSource($(url_string), $(repr(x[2].hash)))"
else
"FileSource($(url_string), $(repr(x[2].hash)))"
end
end
if !isempty(state.patches)
push!(sources_strings, "DirectorySource(\"./bundled\")")
end
sources_string = strip(join(sources_strings, ",\n "))
if Set(state.platforms) == Set(supported_platforms())
platforms_string = "supported_platforms()"
else
platforms_string = """
[
$(strip(join(repr.(state.platforms),",\n ")))
]
"""
end
if any(isnothing, (state.files, state.file_kinds, state.file_varnames))
products_string = "Product[\n]"
else
stuff = collect(zip(state.files, state.file_kinds, state.file_varnames))
sort!(stuff, by = x -> x[2], lt=(x,y)-> (x == :library || y ==:other))
products_string = "[\n " * join(map(stuff) do x
file, kind, varname = x
if kind == :executable
# It's very common that executable products are in `bin`,
# so we special-case these to just print the basename:
dir_path = dirname(file)
if isempty(dir_path) || dirname(file) == "bin"
return "ExecutableProduct($(repr(basename(file))), $(repr(varname)))"
else
return "ExecutableProduct($(repr(basename(file))), $(repr(varname)), $(repr(dir_path)))"
end
elseif kind == :library
dir_path = dirname(file)
if isempty(dir_path) || dirname(file) in ("bin", "lib", "lib64")
return "LibraryProduct($(repr(normalize_name(file))), $(repr(varname)))"
else
return "LibraryProduct($(repr(normalize_name(file))), $(repr(varname)), $(repr(dir_path)))"
end
else
return "FileProduct($(repr(file)), $(repr(varname)))"
end
end,",\n ") * "\n]"
end
if length(state.dependencies) >= 1
function psrepr(ps)
s = "\n Dependency(PackageSpec(name=\"$(getname(ps))\""
if !isnothing(getpkg(ps).uuid)
s *= ", uuid=\"$(getpkg(ps).uuid)\""
end
s *= "))"
return s
end
dependencies_string = "[" * join(map(psrepr, state.dependencies)) * "\n]"
else
dependencies_string = "Dependency[\n]"
end
# Keyword arguments to `build_tarballs()`.
# All new recipes created by the wizard will require Julia v1.6, to make it
# easier to add support for new platforms, when possible.
kwargs_vec = ["julia_compat=\"1.6\""]
if state.compilers != [:c] && length(state.compilers) >= 1
push!(kwargs_vec, "compilers = [$(join(map(x -> ":$(x)", state.compilers), ", "))]")
end
# Default GCC version is the oldest one
if state.preferred_gcc_version != getversion(available_gcc_builds[1])
push!(kwargs_vec, "preferred_gcc_version = v\"$(state.preferred_gcc_version)\"")
end
# Default LLVM version is the latest one
if state.preferred_llvm_version != getversion(available_llvm_builds[end])
push!(kwargs_vec, "preferred_llvm_version = v\"$(state.preferred_llvm_version)\"")
end
kwargs = ""
if length(kwargs_vec) >= 1
kwargs = "; " * join(kwargs_vec, ", ")
end
print(io, """
# Note that this script can accept some limited command-line arguments, run
# `julia build_tarballs.jl --help` to see a usage message.
using BinaryBuilder, Pkg
name = $(repr(state.name))
version = $(repr(state.version))
# Collection of sources required to complete build
sources = [
$(sources_string)
]
# Bash recipe for building across all platforms
script = raw\"\"\"
$(strip(state.history))
\"\"\"
# These are the platforms we will build for by default, unless further
# platforms are passed in on the command line
platforms = $(platforms_string)
# The products that we will ensure are always built
products = $(products_string)
# Dependencies that must be installed before this package can be built
dependencies = $(dependencies_string)
# Build the tarballs, and possibly a `build.jl` as well.
build_tarballs(ARGS, name, version, sources, script, platforms, products, dependencies$(kwargs))
""")
end
"""
yggdrasil_deploy(state::WizardState)
Write out a WizardState to a `build_tarballs.jl` in an `Yggdrasil` clone, then
open a pull request against `Yggdrasil`.
"""
function yggdrasil_deploy(state::WizardState, open_pr::Bool = true)
btb = IOBuffer()
print_build_tarballs(btb, state)
seek(btb, 0)
build_tarballs_content = String(read(btb))
return yggdrasil_deploy(
state.name,
state.version,
state.patches,
build_tarballs_content;
open_pr=open_pr
)
end
function yggdrasil_deploy(name, version, patches, build_tarballs_content;
open_pr::Bool=false,
branch_name=nothing,
gh_auth = github_auth(;allow_anonymous=false))
# First, fork Yggdrasil (this just does nothing if it already exists)
fork = GitHub.create_fork("JuliaPackaging/Yggdrasil"; auth=gh_auth)
mktempdir() do tmp
# Clone our bare Yggdrasil out to a temporary directory
repo = LibGit2.clone(get_yggdrasil(), tmp)
# Check out a unique branch name
@info("Checking temporary Yggdrasil out to $(tmp)")
if branch_name === nothing
recipe_hash = bytes2hex(sha256(build_tarballs_content)[end-3:end])
branch_name = "wizard/$(name)-v$(version)_$(recipe_hash)"
end
LibGit2.branch!(repo, branch_name)
# Spit out the buildscript to the appropriate file:
rel_bt_path = yggdrasil_build_tarballs_path(name)
@info("Generating $(rel_bt_path)")
output_path = joinpath(tmp, rel_bt_path)
mkpath(dirname(output_path))
open(output_path, "w") do io
write(io, build_tarballs_content)
end
# If needed, add patches
if !isempty(patches)
rel_patches_path = joinpath(dirname(rel_bt_path), "bundled", "patches")
mkpath(joinpath(tmp, rel_patches_path))
for patch in patches
patch_path = joinpath(rel_patches_path, patch.name)
open(f->write(f, patch.patch), joinpath(tmp, patch_path), "w")
LibGit2.add!(repo, patch_path)
end
end
# Commit it and push it up to our fork
@info("Committing and pushing to $(fork.full_name)#$(branch_name)...")
LibGit2.add!(repo, rel_bt_path)
LibGit2.commit(repo, "New Recipe: $(name) v$(version)")
with_gitcreds("x-access-token", gh_auth.token) do creds
LibGit2.push(
repo,
refspecs=["+HEAD:refs/heads/$(branch_name)"],
remoteurl="https://github.com/$(fork.full_name).git",
credentials=creds,
# This doesn't work :rage: instead we use `+HEAD:` at the beginning of our
# refspec: https://github.com/JuliaLang/julia/issues/23057
#force=true,
)
end
if open_pr
# Open a pull request against Yggdrasil
@info("Opening a pull request against JuliaPackaging/Yggdrasil...")
params = Dict(
"base" => "master",
"head" => "$(dirname(fork.full_name)):$(branch_name)",
"maintainer_can_modify" => true,
"title" => "Wizard recipe: $(name)-v$(version)",
"body" => """
This pull request contains a new build recipe I built using the BinaryBuilder.jl wizard:
* Package name: $(name)
* Version: v$(version)
@staticfloat please review and merge.
"""
)
pr = create_or_update_pull_request("JuliaPackaging/Yggdrasil", params, auth=gh_auth)
@info("Pull request created: $(pr.html_url)")
else
println("Open the pull request by going to: ")
println("https://github.com/$(fork.full_name)/pull/new/$(HTTP.escapeuri(branch_name))?expand=1")
end
end
end
function _deploy(state::WizardState)
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
deploy_select = request(terminal,
"How should we deploy this build recipe?",
RadioMenu([
"Prepare a pull request against the community buildtree, Yggdrasil",
"Write to a local file",
"Print to stdout",
]; charset=:ascii)
)
println(state.outs)
if deploy_select == 1
yggdrasil_select = request(terminal,
"Would you like to actually open the pull request or just prepare it?",
RadioMenu([
"Go ahead, open it",
"No, just prepare it and let me look at it first",
]; charset=:ascii)
)
yggdrasil_deploy(state, yggdrasil_select == 1)
elseif deploy_select == 2
directory = nonempty_line_prompt(
"filename",
"Enter directory to write build_tarballs.jl to:";
ins=state.ins,
outs=state.outs,
)
mkpath(directory)
open(joinpath(directory, "build_tarballs.jl"), "w") do io
print_build_tarballs(io, state)
end
if !isempty(state.patches)
mkpath(joinpath(directory, "bundled", "patches"))
for patch in state.patches
patch_path = joinpath(directory, "bundled", "patches", patch.name)
open(f->write(f, patch.patch), patch_path, "w")
end
end
else
println(state.outs, "Your generated build_tarballs.jl:")
println(state.outs, "\n```")
print_build_tarballs(state.outs, state)
println(state.outs, "```")
end
end
function step7(state::WizardState)
print(state.outs, "Your build script was:\n\n\t")
println(state.outs, replace(state.history, "\n" => "\n\t"))
printstyled(state.outs, "\t\t\t# Step 7: Deployment\n\n", bold=true)
_deploy(state)
end
function deploy(state::WizardState = load_wizard_state(; as_is = true))
if state.step == :done
println(state.outs, "This state has been already deployed")
return
end
if state.step in (:step1, :step2)
println(state.outs, "This state cannot be deployed")
return
end
q = "This is an incomplete state, are you sure you want to deploy it?"
if yn_prompt(state, q, :n) == :y
_deploy(state)
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 5152 | using Registrator
# This is a global github authentication token that is set the first time
# we authenticate and then reused
const _github_auth = Ref{GitHub.Authorization}()
function github_auth(;allow_anonymous::Bool=true)
if (!isassigned(_github_auth) || !allow_anonymous && isa(_github_auth[], GitHub.AnonymousAuth)) && length(get(ENV, "GITHUB_TOKEN", "")) >= 40
try
_github_auth[] = GitHub.authenticate(ENV["GITHUB_TOKEN"])
catch e
if occursin("401", e.msg)
@warn "GitHub was unable to authenticate using the token from `ENV[\"GITHUB_TOKEN\"]`, it may be stale. Falling back on alternate authentication methods."
else
rethrow()
end
end
end
if !isassigned(_github_auth) && allow_anonymous
_github_auth[] = GitHub.AnonymousAuth()
end
if !isassigned(_github_auth)
_github_auth[] = GitHub.authenticate(obtain_token())
end
if !isa(_github_auth[], GitHub.AnonymousAuth)
# Also shove this into Registrator, so it uses the same token.
Registrator.CommentBot.CONFIG["github"] = Dict("token" => _github_auth[].token)
end
return _github_auth[]
end
function obtain_token(; outs=stdout, github_api=GitHub.DEFAULT_API)
println(outs)
printstyled(outs, "Authenticating with GitHub\n", bold=true)
@label retry
headers = Dict{String, String}("User-Agent"=>"BinaryBuilder-jl",
"Accept"=>"application/json")
# Request device authentication flow for BinaryBuilder OAauth APP
resp = HTTP.post("https://github.com/login/device/code", headers,
"client_id=2a955f9ca1a7c5b720f3&scope=public_repo")
if resp.status != 200
GitHub.handle_response_error(resp)
end
reply = JSON.parse(HTTP.payload(resp, String))
println(outs, """
To continue, we need to authenticate you with GitHub. Please navigate to
the following page in your browser and enter the code below:
$(HTTP.URIs.unescapeuri(reply["verification_uri"]))
#############
# $(reply["user_code"]) #
#############
""")
interval = reply["interval"]
device_code = reply["device_code"]
while true
# Poll for completion
sleep(interval)
resp = HTTP.post("https://github.com/login/oauth/access_token", headers,
"client_id=2a955f9ca1a7c5b720f3&grant_type=urn:ietf:params:oauth:grant-type:device_code&device_code=$device_code")
if resp.status != 200
GitHub.handle_response_error(resp)
end
token_reply = JSON.parse(HTTP.payload(resp, String))
if haskey(token_reply, "error")
error_kind = token_reply["error"]
if error_kind == "authorization_pending"
continue
elseif error_kind == "slow_down"
@warn "GitHub Auth rate limit exceeded. Waiting 10s. (This shouldn't happen)"
sleep(10)
elseif error_kind == "expired_token"
@error "Token request expired. Starting over!"
@goto retry
elseif error_kind == "access_denied"
@error "Authentication request canceled by user. Starting over!"
@goto retry
elseif error_kind in ("unsupported_grant_type",
"incorrect_client_credentials", "incorrect_device_code")
error("Received error kind $(error_kind). Please file an issue.")
else
error("Unexpected GitHub login error $(error_kind)")
end
end
token = token_reply["access_token"]
print(outs, """
Successfully obtained GitHub authorization token!
This token will be used for the rest of this BB session.
You will have to re-authenticate for any future session.
However, if you wish to bypass this step, you may create a
personal access token at """)
printstyled("https://github.com/settings/tokens"; bold=true)
println("\n and add the token to the")
printstyled(outs, "~/.julia/config/startup.jl"; bold=true)
println(outs, " file as:")
println(outs)
printstyled(outs, " ENV[\"GITHUB_TOKEN\"] = <token>"; bold=true)
println(outs)
println(outs, "This token is sensitive, so only do this in a computing environment you trust.")
println(outs)
return token
end
end
function create_or_update_pull_request(repo, params; auth=github_auth())
try
return create_pull_request(repo; params=params, auth=auth)
catch ex
# If it was already created, search for it so we can update it:
if Registrator.CommentBot.is_pr_exists_exception(ex)
prs, _ = pull_requests(repo; auth=auth, params=Dict(
"state" => "open",
"base" => params["base"],
"head" => string(split(repo, "/")[1], ":", params["head"]),
))
return update_pull_request(repo, first(prs).number; auth=auth, params=params)
else
rethrow(ex)
end
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 4817 | """
print_autoconf_hint(state::WizardState)
Print a hint for projects that use autoconf to have a good `./configure` line.
"""
function print_autoconf_hint(state::WizardState)
println(state.outs, " The recommended options for GNU Autoconf are:")
println(state.outs)
printstyled(state.outs, " ./configure --prefix=\${prefix} --build=\${MACHTYPE} --host=\${target}", bold=true)
println(state.outs)
println(state.outs)
println(state.outs, " followed by `make -j\${nproc}` and `make install`. Since the prefix environment")
println(state.outs, " variable is set already, this will automatically perform the installation")
println(state.outs, " into the correct directory.")
end
"""
provide_hints(state::WizardState, path::AbstractString)
Given an unpacked source directory, provide hints on how a user might go about
building the binary bounty they so richly desire.
"""
function provide_hints(state::WizardState, path::AbstractString)
files = readdir(path)
println(state.outs, "You have the following contents in your working directory:")
println(state.outs, join(map(x->string(" - ", x),files),'\n'))
printed = false
function start_hints()
printed || printstyled(state.outs, "Hints:\n", color=:yellow)
printed = true
end
# Avoid providing duplicate hints (even for files in separate directories)
# As long as the hint is the same, people will get the idea
hints_provided = Set{Symbol}()
function already_hinted(sym)
start_hints()
(sym in hints_provided) && return true
push!(hints_provided, sym)
return false
end
for (root, dirs, files) in walkdir(path)
for file in files
file_path = joinpath(root, file)
# Helper function to try to read the given path's contents, but
# returning an empty string on error (for e.g. broken symlinks)
read_contents(path) = try
String(read(path))
catch
""
end
if file == "configure" && occursin("Generated by GNU Autoconf", read_contents(file_path))
already_hinted(:autoconf) && continue
println(state.outs, " - ", replace(file_path, "$path/" => ""), "\n")
println(state.outs, " This file is a configure file generated by GNU Autoconf. ")
print_autoconf_hint(state)
elseif file == "configure.in" || file == "configure.ac"
already_hinted(:autoconf) && continue
println(state.outs, " - ", replace(file_path, "$path/" => ""), "\n")
println(state.outs, " This file is likely input to GNU Autoconf. ")
print_autoconf_hint(state)
elseif file == "CMakeLists.txt"
already_hinted(:CMake) && continue
println(state.outs, " - ", replace(file_path, "$path/" => ""), "\n")
print(state.outs, " This file is likely input to CMake. ")
println(state.outs, "The recommended options for CMake are")
println(state.outs)
printstyled(state.outs,
"""
cmake -B build -DCMAKE_INSTALL_PREFIX=\$prefix -DCMAKE_TOOLCHAIN_FILE=\${CMAKE_TARGET_TOOLCHAIN} -DCMAKE_BUILD_TYPE=Release
cmake --build build --parallel \${nproc}
cmake --install build
""", bold=true)
println(state.outs)
println(state.outs)
println(state.outs, " Since the prefix environment")
println(state.outs, " variable is set already, this will automatically perform the installation")
println(state.outs, " into the correct directory.\n")
elseif file == "meson.build"
already_hinted(:Meson) && continue
println(state.outs, " - ", replace(file_path, "$path/" => ""), "\n")
print(state.outs, " This file is likely input to Meson. ")
println(state.outs, "The recommended option for Meson is")
println(state.outs)
printstyled(state.outs, " meson --cross-file=\${MESON_TARGET_TOOLCHAIN} --buildtype=release", bold=true)
println(state.outs)
println(state.outs)
println(state.outs, " followed by `ninja` and `ninja install`. Since the prefix variable")
println(state.outs, " is set already, this will automatically perform the installation")
println(state.outs, " into the correct directory.\n")
end
end
end
println(state.outs)
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 34487 | using BinaryBuilderBase: get_concrete_platform, destdir
include("hints.jl")
# Add new method to `get_concrete_platform`: this is a common pattern throughout
# this file.
BinaryBuilderBase.get_concrete_platform(platform::AbstractPlatform, state::WizardState) =
get_concrete_platform(platform;
preferred_gcc_version = state.preferred_gcc_version,
preferred_llvm_version = state.preferred_llvm_version,
compilers = state.compilers)
# When rerunning the script generated during the wizard we want to fail on error
# and automatically install license at the end.
full_wizard_script(state::WizardState) =
"set -e\n" * state.history * "\n/bin/bash -lc auto_install_license"
"""
step4(state::WizardState, ur::Runner, platform::AbstractPlatform,
build_path::AbstractString, prefix::Prefix)
The fourth step selects build products after the first build is done
"""
function step4(state::WizardState, ur::Runner, platform::AbstractPlatform,
build_path::AbstractString, prefix::Prefix)
printstyled(state.outs, "\t\t\t# Step 4: Select build products\n\n", bold=true)
concrete_platform = get_concrete_platform(platform, state)
# Collect all executable/library files, explicitly exclude everything that is
# a symlink to the artifacts directory, as usual.
destdir_path = destdir(prefix, concrete_platform)
files = filter(f -> startswith(f, destdir_path), collapse_symlinks(collect_files(destdir_path)))
files = filter_object_files(files)
# Check if we can load them as an object file
files = filter(files) do f
readmeta(f) do ohs
return any(Auditor.is_for_platform(oh, platform) for oh in ohs)
end
end
# Strip out the prefix from filenames
state.files = map(file->replace(file, "$(destdir_path)/" => ""), files)
state.file_kinds = map(files) do f
readmeta(f) do ohs
oh = first(ohs)
if isexecutable(oh)
return :executable
elseif islibrary(oh)
return :library
else
return :other
end
end
end
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
if length(files) == 0
printstyled(state.outs, "ERROR: ", color=:red)
println(state.outs, "The build has produced no binary artifacts.")
println(state.outs, " "^7, "This is generally because an error occurred during the build")
println(state.outs, " "^7, "or because you forgot to `make install` or equivalent.")
println(state.outs)
choice = request(terminal, "How would you like to proceed? (CTRL-C to exit)",
RadioMenu([
"Return to build environment",
"Retry with a clean build environment",
"Edit the script"
]; charset=:ascii))
println(state.outs)
if choice == 1
# Link dependencies into the prefix again
concrete_platform = get_concrete_platform(platform, state)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), concrete_platform)
return step3_interactive(state, prefix, platform, ur, build_path, artifact_paths)
elseif choice == 2
state.step = :step3
return
elseif choice == 3
state.history = edit_script(state, state.history)
state.step = :step3_retry
return
end
elseif length(files) == 1
println(state.outs, "The build has produced only one build artifact:\n")
println(state.outs, "\t$(state.files[1])")
println(state.outs)
else
println(state.outs, "The build has produced several libraries and executables.")
println(state.outs, "Please select which of these you want to consider `products`.")
println(state.outs, "These are generally those artifacts you will load or use from julia.")
while true
selected = collect(request(
terminal,
"",
MultiSelectMenu(state.files; charset=:ascii))
)
selected_file_kinds = map(x->state.file_kinds[x], selected)
selected_files = map(x->state.files[x], selected)
if !isempty(selected_files)
state.file_kinds = selected_file_kinds
state.files = selected_files
break
else
println(state.outs, "You must make at least one selection.")
end
end
end
state.file_varnames = Symbol[]
println(state.outs, "Please provide a unique variable name for each build artifact:")
for f in state.files
default = basename(f)
ok = false
varname = ""
while !ok
if Base.isidentifier(default)
varname = line_prompt(
"variable name",
string(f, " (default '$(default)'):");
force_identifier=true,
ins=state.ins,
outs=state.outs,
)
isempty(varname) && (varname = default)
else
varname = nonempty_line_prompt(
"variable name",
string(f, ":");
force_identifier=true,
ins=state.ins,
outs=state.outs,
)
end
if isdefined(Base, Symbol(varname))
printstyled(state.outs, varname,
" is a symbol already defined in Base, please choose a different name\n",
color=:red)
else
ok = true
end
end
push!(state.file_varnames, Symbol(varname))
end
push!(state.validated_platforms, platform)
# Advance to next step
state.step = :step5a
println(state.outs)
end
"""
step3_audit(state::WizardState, platform::AbstractPlatform, prefix::Prefix)
Audit the `prefix`.
"""
function step3_audit(state::WizardState, platform::AbstractPlatform, destdir::String)
printstyled(state.outs, "\n\t\t\tAnalyzing...\n\n", bold=true)
audit(Prefix(destdir); io=state.outs, platform=platform,
verbose=true, autofix=true, require_license=true)
println(state.outs)
end
function diff_srcdir(state::WizardState, prefix::Prefix, ur::Runner)
# Check if there were any modifications made to the srcdir
if length(readdir(joinpath(prefix, "srcdir"))) > 0
# Note that we're re-creating the mount here, but in reverse
# the filesystem. It would be more efficient to just diff the files
# that were actually changed, but there doesn't seem to be a good
# way to tell diff to do that.
#
# N.B.: We only diff modified files. That should hopefully exclude
# generated build artifacts. Unfortunately, we may also miss files
# that the user newly added.
cmd = """
mkdir /meta/before
mkdir /meta/after
mount -t overlay overlay -o lowerdir=/workspace/srcdir,upperdir=/meta/upper,workdir=/meta/work /meta/after
mount --bind /workspace/srcdir /meta/before
cd /meta
/usr/bin/git diff --no-index --no-prefix --diff-filter=M before after
exit 0
"""
# If so, generate a diff
diff = read(ur, `/bin/bash -c $cmd`)
# It's still quite possible for the diff to be empty (e.g. due to the diff-filter above)
if !isempty(diff)
println(state.outs, "The source directory had the following modifications: \n")
println(state.outs, diff)
if yn_prompt(state, "Would you like to turn these modifications into a patch?", :y) == :y
patchname = nonempty_line_prompt(
"patch name",
"Please provide a name for this patch (.patch will be automatically appended):";
force_identifier=false,
ins=state.ins,
outs=state.outs,
)
patchname *= ".patch"
push!(state.patches, PatchSource(patchname, diff))
return true
end
end
end
return false
end
function bb_add(client, state::WizardState, prefix::Prefix, prefix_artifacts::Union{Dict{Prefix,Vector{String}}, Nothing}, platform::AbstractPlatform, jll::AbstractString)
if any(dep->getpkg(dep).name == jll, state.dependencies)
println(client, "ERROR: Package was already added")
return
end
if prefix_artifacts === nothing
println(client, "ERROR: `bb add` not available in this context (if you think it should be, file an issue)!")
return
end
new_dep = Dependency(jll)
try
# This will redo some work, but that may be ok
concrete_platform = get_concrete_platform(platform, state)
# Clear out the prefix artifacts directory in case this change caused
# any previous dependencies to change
cleanup_dependencies(prefix, get(prefix_artifacts, prefix, String[]), concrete_platform)
pkgs = getpkg.([state.dependencies; new_dep])
prefix_artifacts[prefix] = setup_dependencies(prefix, pkgs, concrete_platform)
push!(state.dependencies, new_dep)
catch e
showerror(client, e)
end
end
using ArgParse
function bb_parser()
s = ArgParseSettings()
@add_arg_table! s begin
"add"
help = "Add a jll package as if specified at the start of the wizard"
action = :command
end
@add_arg_table! s["add"] begin
"jll"
help = "The jll to add"
required = true
end
s
end
function setup_bb_service(state::WizardState, prefix, platform, prefix_artifacts::Union{Dict{Prefix,Vector{String}}, Nothing})
fpath = joinpath(prefix, "metadir", "bb_service")
server = listen(fpath)
@async begin
while isopen(server)
client = accept(server)
function client_error(err)
println(client, "ERROR: $err")
close(client)
end
@async while isopen(client)
try
cmd = readline(client)
ARGS = split(cmd, " ")
s = bb_parser()
try
popfirst!(ARGS)
parsed = parse_args(ARGS, s)
if parsed == nothing
elseif parsed["%COMMAND%"] == "add"
bb_add(client, state, prefix, prefix_artifacts, platform, parsed["add"]["jll"])
end
close(client)
catch e
if isa(e, ArgParseError)
println(client, "ERROR: ", e.text, "\n")
ArgParse.show_help(client, s)
close(client)
else
rethrow(e)
end
end
catch e
showerror(stderr, e)
close(client)
end
end
end
end
(fpath, server)
end
"""
interactive_build(state::WizardState, prefix::Prefix,
ur::Runner, build_path::AbstractString)
Runs the interactive shell for building, then captures bash history to save
reproducible steps for building this source. Shared between steps 3 and 5
"""
function interactive_build(state::WizardState, prefix::Prefix,
ur::Runner, build_path::AbstractString,
platform::AbstractPlatform;
prefix_artifacts::Union{Dict{Prefix,Vector{String}}, Nothing} = nothing,
hist_modify = string, srcdir_overlay = true)
histfile = joinpath(prefix, "metadir", ".bash_history")
cmd = `/bin/bash -l`
had_patches = false
script_successful = false
server = nothing
fpath = nothing
try
if srcdir_overlay
mkpath(joinpath(prefix, "metadir", "upper"))
mkpath(joinpath(prefix, "metadir", "work"))
cmd = """
mount -t overlay overlay -o lowerdir=/workspace/srcdir,upperdir=/meta/upper,workdir=/meta/work /workspace/srcdir
cd \$(pwd)
/bin/bash --login
"""
cmd = `/bin/bash -c $cmd`
end
(fpath, server) = setup_bb_service(state, prefix, platform, prefix_artifacts)
script_successful = run_interactive(ur, ignorestatus(cmd), stdin=state.ins, stdout=state.outs, stderr=state.outs)
had_patches = srcdir_overlay && diff_srcdir(state, prefix, ur)
finally
# Julia's auto cleanup has trouble removing the overlayfs work
# directory, because the kernel sets permissions to `000` so do that
# here manually.
workdir = joinpath(prefix, "metadir", "work", "work")
if isdir(workdir)
rm(workdir)
end
if server !== nothing
close(server)
rm(fpath; force=true)
end
end
# This is an extremely simplistic way to capture the history,
# but ok for now. Obviously doesn't include any interactive
# programs, etc.
if isfile(histfile)
state.history = hist_modify(state.history,
# This is a bit of a hack for now to get around the fact
# that we don't know cwd when we get back from bash, but
# always start in the WORKSPACE. This makes sure the script
# accurately reflects that.
string("cd \$WORKSPACE/srcdir\n", had_patches ?
raw"""
for f in ${WORKSPACE}/srcdir/patches/*.patch; do
atomic_patch -p1 ${f}
done
""" : "",
String(read(histfile))))
rm(histfile)
end
if !run(ur, `/bin/bash -lc "auto_install_license -c"`, devnull)
@warn "License file not found, install it with the `install_license` command"
end
if !script_successful
warn = """
\nWarning: The interactive build exited with an error code.
In non-interactive mode, this is an error. You may want to
adjust the script.
"""
printstyled(state.outs, warn, color=:yellow)
end
printstyled(state.outs, "\n\t\t\tBuild complete\n\n", bold=true)
print(state.outs, "Your build script was:\n\n\t")
println(state.outs, replace(state.history, "\n" => "\n\t"))
if yn_prompt(state, "Would you like to edit this script now?", :n) == :y
state.history = edit_script(state, state.history)
println(state.outs)
msg = strip("""
We will now rebuild with your new script to make sure it still works.
""")
println(state.outs, msg)
println(state.outs)
return true
else
return false
end
end
"""
step3_interactive(state::WizardState, prefix::Prefix, platform::AbstractPlatform,
ur::Runner, build_path::AbstractString)
The interactive portion of step3, moving on to either rebuild with an edited
script or proceed to step 4.
"""
function step3_interactive(state::WizardState, prefix::Prefix,
platform::AbstractPlatform,
ur::Runner, build_path::AbstractString, artifact_paths::Vector{String})
concrete_platform = get_concrete_platform(platform, state)
if interactive_build(state, prefix, ur, build_path, platform; prefix_artifacts=Dict(prefix=>artifact_paths))
# Unsymlink all the deps from the dest_prefix before moving to the next step
cleanup_dependencies(prefix, artifact_paths, concrete_platform)
state.step = :step3_retry
else
step3_audit(state, platform, destdir(prefix, concrete_platform))
# Unsymlink all the deps from the dest_prefix before moving to the next step
cleanup_dependencies(prefix, artifact_paths, concrete_platform)
return step4(state, ur, platform, build_path, prefix)
end
end
"""
step3_retry(state::WizardState)
Rebuilds the initial Linux x86_64 build after things like editing the script
file manually, etc...
"""
function step3_retry(state::WizardState)
platform = pick_preferred_platform(state.platforms)
msg = "\t\t\t# Attempting to build for $(triplet(platform))\n\n"
printstyled(state.outs, msg, bold=true)
build_path = tempname()
mkpath(build_path)
concrete_platform = get_concrete_platform(platform, state)
prefix = setup_workspace(build_path, vcat(state.source_files, state.patches), concrete_platform; verbose=false)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), concrete_platform)
ur = preferred_runner()(
prefix.path;
cwd="/workspace/srcdir",
platform=platform,
src_name=state.name,
compilers=state.compilers,
preferred_gcc_version=state.preferred_gcc_version,
preferred_llvm_version=state.preferred_llvm_version,
)
with_logfile(joinpath(build_path, "out.log")) do io
run(ur, `/bin/bash -c $(full_wizard_script(state))`, io; verbose=true, tee_stream=state.outs)
end
step3_audit(state, platform, destdir(prefix, concrete_platform))
# Unsymlink all the deps from the dest_prefix before moving to the next step
cleanup_dependencies(prefix, artifact_paths, concrete_platform)
return step4(state, ur, platform, build_path, prefix)
end
"""
Pick the first platform for use to run on. We prefer Linux x86_64 because
that's generally the host platform, so it's usually easiest. After that we
go by the following preferences:
* OS (in order): Linux, Windows, OSX
* Architecture: x86_64, i686, aarch64, powerpc64le, armv7l
* The first remaining after this selection
"""
function pick_preferred_platform(platforms)
if Platform("x86_64", "linux") in platforms
return Platform("x86_64", "linux")
end
for o in ("linux", "windows", "macos")
plats = filter(p-> os(p) == o, platforms)
if !isempty(plats)
platforms = plats
end
end
for a in ("x86_64", "i686", "aarch64", "powerpc64le", "armv7l")
plats = filter(p->arch(p) == a, platforms)
if !isempty(plats)
platforms = plats
end
end
return first(platforms)
end
"""
step34(state::WizardState)
Starts initial build for Linux x86_64, which is our initial test target
platform. Sources that build properly for this platform continue on to attempt
builds for more complex platforms.
"""
function step34(state::WizardState)
platform = pick_preferred_platform(state.platforms)
push!(state.visited_platforms, platform)
printstyled(state.outs, "\t\t\t# Step 3: Build for $(platform)\n\n", bold=true)
println(state.outs, "You will now be dropped into the cross-compilation environment.")
print(state.outs, "Please compile the package. Your initial compilation target is ")
printstyled(state.outs, triplet(platform), bold=true)
println(state.outs)
print(state.outs, "The ")
printstyled(state.outs, "\$prefix ", bold=true)
println(state.outs, "environment variable contains the target directory.")
print(state.outs, "Once you are done, exit by typing ")
printstyled(state.outs, "`exit`", bold=true)
print(state.outs, " or ")
printstyled(state.outs, "`^D`", bold=true)
println(state.outs)
println(state.outs)
build_path = tempname()
mkpath(build_path)
state.history = ""
concrete_platform = get_concrete_platform(platform, state)
prefix = setup_workspace(
build_path,
vcat(state.source_files, state.patches),
concrete_platform;
verbose=false
)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), concrete_platform; verbose=true)
provide_hints(state, joinpath(prefix, "srcdir"))
ur = preferred_runner()(
prefix.path;
cwd="/workspace/srcdir",
workspaces = [
joinpath(prefix, "metadir") => "/meta",
],
platform=platform,
src_name=state.name,
compilers=state.compilers,
preferred_gcc_version=state.preferred_gcc_version,
preferred_llvm_version=state.preferred_llvm_version,
)
return step3_interactive(state, prefix, platform, ur, build_path, artifact_paths)
end
function step5_internal(state::WizardState, platform::AbstractPlatform)
print(state.outs, "Your next build target will be ")
printstyled(state.outs, triplet(platform), bold=true)
println(state.outs)
print(state.outs, "Press Enter to continue...")
read(state.ins, Char)
println(state.outs)
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
printstyled(state.outs, "\t\t\t# Attempting to build for $(triplet(platform))\n\n", bold=true)
build_path = tempname()
mkpath(build_path)
local ok = false
# The code path in this function is rather complex (and unpredictable)
# due to the fact that the user makes the choices. Therefore we keep
# track of all the linked artifacts in a dictionary, and make sure to
# unlink them before setting up a new build prefix
prefix_artifacts = Dict{Prefix,Vector{String}}()
while !ok
cd(build_path) do
concrete_platform = get_concrete_platform(platform, state)
prefix = setup_workspace(build_path, vcat(state.source_files, state.patches), concrete_platform; verbose=true)
# Clean up artifacts in case there are some
cleanup_dependencies(prefix, get(prefix_artifacts, prefix, String[]), concrete_platform)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), concrete_platform; verbose=true)
# Record newly added artifacts for this prefix
prefix_artifacts[prefix] = artifact_paths
ur = preferred_runner()(
prefix.path;
cwd="/workspace/srcdir",
platform=platform,
src_name=state.name,
compilers=state.compilers,
preferred_gcc_version=state.preferred_gcc_version,
preferred_llvm_version=state.preferred_llvm_version,
)
with_logfile(joinpath(build_path, "out.log")) do io
ok = run(ur, `/bin/bash -c $(full_wizard_script(state))`, io; verbose=true, tee_stream=state.outs)
end
while true
msg = "\n\t\t\tBuild complete. Analyzing...\n\n"
printstyled(state.outs, msg, bold=true)
if !ok
println(state.outs)
printstyled(state.outs, "ERROR: ", color=:red)
msg = "The build script failed (see above).\n"
println(state.outs, msg)
else
audit(Prefix(destdir(prefix, concrete_platform)); io=state.outs,
platform=platform, verbose=true, autofix=true, require_license=true)
ok = isempty(match_files(state, prefix, platform, state.files))
if !ok
println(state.outs)
printstyled(state.outs, "ERROR: ", color=:red)
msg = "Some build products could not be found (see above).\n"
println(state.outs, msg)
end
end
if !ok
choice = request(terminal,
"How would you like to proceed? (CTRL-C to exit)",
RadioMenu([
"Drop into build environment",
"Open a clean session for this platform",
"Disable this platform",
"Edit build script",
]; charset=:ascii)
)
if choice == 1
if interactive_build(state, prefix, ur, build_path, platform;
prefix_artifacts = prefix_artifacts,
hist_modify = function(olds, s)
"""
$olds
if [ \$target = "$(triplet(platform))" ]; then
$s
fi
"""
end)
# We'll go around again after this
break
else
# Go back to analysis of the newly environment
continue
end
elseif choice == 2
rm(build_path; force=true, recursive=true)
mkpath(build_path)
concrete_platform = get_concrete_platform(platform, state)
prefix = setup_workspace(
build_path,
vcat(state.source_files, state.patches),
concrete_platform;
verbose=true,
)
# Clean up artifacts in case there are some
cleanup_dependencies(prefix, get(prefix_artifacts, prefix, String[]), concrete_platform)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), platform; verbose=true)
# Record newly added artifacts for this prefix
prefix_artifacts[prefix] = artifact_paths
ur = preferred_runner()(
prefix.path;
cwd="/workspace/srcdir",
platform=platform,
src_name=state.name,
compilers=state.compilers,
preferred_gcc_version=state.preferred_gcc_version,
preferred_llvm_version=state.preferred_llvm_version,
)
if interactive_build(state, prefix, ur, build_path, platform;
prefix_artifacts = prefix_artifacts,
hist_modify = function(olds, s)
"""
if [ \$target != "$(triplet(platform))" ]; then
$olds
else
$s
fi
"""
end)
# We'll go around again after this
break
else
# Go back to analysis of the newly environment
continue
end
elseif choice == 3
filter!(p->p != platform, state.platforms)
ok = true
break
elseif choice == 4
change_script!(state, edit_script(state, state.history))
break
# Well go around again after this
end
else
ok = true
push!(state.validated_platforms, platform)
println(state.outs)
print(state.outs, "You have successfully built for ")
printstyled(state.outs, triplet(platform), bold=true)
println(state.outs, ". Congratulations!")
break
end
end
println(state.outs)
end
end
# Unsymlink all the deps from the prefixes before moving to the next step
for (prefix, paths) in prefix_artifacts
cleanup_dependencies(prefix, paths, get_concrete_platform(platform, state))
end
return ok
end
function step5a(state::WizardState)
printstyled(state.outs, "\t\t\t# Step 5: Generalize the build script\n\n", bold=true)
# We will try to pick a platform for a different operating system
possible_platforms = filter(state.platforms) do plat
!(any(state.visited_platforms) do p
plat isa typeof(p)
end)
end
if isempty(possible_platforms)
state.step = :step5b
return
end
platform = pick_preferred_platform(possible_platforms)
println(state.outs, "We will now attempt to use the same script to build for other operating systems.")
println(state.outs, "This will help iron out any issues with the cross compiler.")
if step5_internal(state, platform)
push!(state.visited_platforms, platform)
state.step = :step5b
end
end
function step5b(state::WizardState)
# We will try to pick a platform for a different architecture
possible_platforms = filter(state.platforms) do plat
!(any(state.visited_platforms) do p
arch(plat) == arch(p) ||
# Treat the x86 variants equivalently
(arch(p) in ("x86_64", "i686") &&
arch(plat) in ("x86_64", "i686"))
end)
end
if isempty(possible_platforms)
state.step = :step5c
return
end
platform = pick_preferred_platform(possible_platforms)
println(state.outs, "This should uncover issues related to architecture differences.")
if step5_internal(state, platform)
state.step = :step5c
push!(state.visited_platforms, platform)
end
end
function step5c(state::WizardState)
msg = strip("""
We will now attempt to build all remaining platforms. Note that these
builds are not verbose. If you have edited the script since we attempted
to build for any given platform, we will verify that the new script still
works. This will probably take a while.
Press Enter to continue...
""")
print(state.outs, msg)
read(state.ins, Char)
println(state.outs)
pred = x -> !(x in state.validated_platforms)
for platform in filter(pred, state.platforms)
print(state.outs, "Building $(triplet(platform)): ")
build_path = tempname()
mkpath(build_path)
local ok = true
concrete_platform = get_concrete_platform(platform, state)
prefix = setup_workspace(
build_path,
vcat(state.source_files, state.patches),
concrete_platform;
verbose=false,
)
artifact_paths = setup_dependencies(prefix, getpkg.(state.dependencies), concrete_platform; verbose=false)
ur = preferred_runner()(
prefix.path;
cwd="/workspace/srcdir",
platform=platform,
src_name=state.name,
compilers=state.compilers,
preferred_gcc_version=state.preferred_gcc_version,
preferred_llvm_version=state.preferred_llvm_version,
)
with_logfile(joinpath(build_path, "out.log")) do io
ok = run(ur, `/bin/bash -c $(full_wizard_script(state))`, io; verbose=false, tee_stream=state.outs)
end
if ok
audit(Prefix(destdir(prefix, concrete_platform));
io=state.outs,
platform=platform,
verbose=false,
silent=true,
autofix=true,
require_license=true,
)
ok = isempty(match_files(
state,
prefix,
platform,
state.files;
silent = true
))
end
# Unsymlink all the deps from the prefix before moving to the next platform
cleanup_dependencies(prefix, artifact_paths, concrete_platform)
print(state.outs, "[")
if ok
printstyled(state.outs, "✓", color=:green)
push!(state.validated_platforms, platform)
else
printstyled(state.outs, "✗", color=:red)
push!(state.failed_platforms, platform)
end
println(state.outs, "]")
end
println(state.outs)
end
function step6(state::WizardState)
if isempty(state.failed_platforms)
if any(p->!(p in state.validated_platforms), state.platforms)
# Some platforms weren't validated, we probably edited the script.
# Go back to 5c to recompute failed platforms
state.step = :step5c
else
state.step = :step7
printstyled(state.outs, "\t\t\tDone!\n\n", bold=true)
end
return
end
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
msg = "\t\t\t# Step 6: Revisit failed platforms\n\n"
printstyled(state.outs, msg, bold=true)
println(state.outs, "Several platforms failed to build:")
for plat in state.failed_platforms
println(state.outs, " - ", plat)
end
println(state.outs)
choice = request(terminal,
"How would you like to proceed? (CTRL-C to exit)",
RadioMenu([
"Disable these platforms",
"Revisit manually",
"Edit script and retry all",
]; charset=:ascii)
)
println(state.outs)
if choice == 1
filter!(p->!(p in state.failed_platforms), state.platforms)
state.step = :step7
elseif choice == 2
plats = collect(state.failed_platforms)
if length(plats) > 1
choice = request(terminal,
"Which platform would you like to revisit?",
RadioMenu(map(repr, plats); charset=:ascii))
println(state.outs)
else
choice = 1
end
if step5_internal(state, plats[choice])
delete!(state.failed_platforms, plats[choice])
end
# Will wrap back around to step 6
elseif choice == 3
change_script!(state, edit_script(state, state.history))
empty!(state.failed_platforms)
# Will wrap back around to step 6 (which'll go back to 5c)
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 14765 | using BinaryBuilderBase: available_gcc_builds, available_llvm_builds, enable_apple_file, macos_sdk_already_installed, accept_apple_sdk, cached_git_clone
import Downloads
"""
Canonicalize a GitHub repository URL
"""
function canonicalize_source_url(url)
repo_regex = r"(https:\/\/)?github.com\/([^\/]+)\/([^\/]+)\/?$"
m = match(repo_regex, url)
if m !== nothing
_, user, repo = m.captures
if !endswith(repo, ".git")
return "https://github.com/$user/$repo.git"
end
end
url
end
"""
Canonicalize URL to a file within a GitHub repo
"""
function canonicalize_file_url(url)
blob_regex = r"(https:\/\/)?github.com\/([^\/]+)\/([^\/]+)\/blob\/([^\/]+)\/(.+)"
m = match(blob_regex, url)
if m !== nothing
_, user, repo, ref, filepath = m.captures
if length(ref) != 40 || !all(c->isnumeric(c) || c in 'a':'f' || c in 'A':'F', ref)
# Ask github to resolve this ref for us
ref = GitHub.reference(GitHub.Repo("$user/$repo"), "heads/$ref").object["sha"]
end
return "https://raw.githubusercontent.com/$user/$repo/$ref/$(filepath)"
end
url
end
"""
download_source(state::WizardState)
Ask the user where the source code is coming from, then download and record the
relevant parameters, returning the source `url`, the local `path` it is stored
at after download, and a `hash` identifying the version of the code. In the
case of a `git` source URL, the `hash` will be a git treeish identifying the
exact commit used to build the code, in the case of a tarball, it is the
`sha256` hash of the tarball itself.
"""
function download_source(state::WizardState)
entered_url = nothing
while entered_url === nothing #&& !eof(state.ins)
# First, ask the user where this is all coming from
msg = replace(strip("""
Please enter a URL (git repository or compressed archive) containing the
source code to build or `N` to stop:
"""), "\n" => " ")
new_entered_url = nonempty_line_prompt("URL", msg; ins=state.ins, outs=state.outs)
if new_entered_url == "N" || new_entered_url == "n"
return new_entered_url, SetupSource(string(new_entered_url), "", "", "")
end
# Early-exit for invalid URLs, using HTTP.URIs.parse_uri() to ensure
# it is a valid URL
try
HTTP.URIs.parse_uri(new_entered_url; strict=true)
entered_url = new_entered_url
catch e
printstyled(state.outs, e.msg, color=:red, bold=true)
println(state.outs)
println(state.outs)
continue
end
end
# Did the user exit out with ^D or something else go horribly wrong?
if entered_url === nothing
error("Could not obtain source URL")
end
url = string(canonicalize_source_url(entered_url))
if url != entered_url
print(state.outs, "The entered URL has been canonicalized to\n")
printstyled(state.outs, url, bold=true)
println(state.outs)
println(state.outs)
end
local source_hash
if endswith(url, ".git") || startswith(url, "git://")
source_path = cached_git_clone(url; progressbar=true, verbose=true)
# Clone the URL, record the current gitsha for the given branch
repo = GitRepo(source_path)
msg = "You have selected a git repository. Please enter a branch, commit or tag to use.\n" *
"Please note that for reproducibility, the exact commit will be recorded, \n" *
"so updates to the remote resource will not be used automatically; \n" *
"you will have to manually update the recorded commit."
#print(state.outs, msg, "\n> ")
treeish = nonempty_line_prompt("git reference", msg; ins=state.ins, outs=state.outs)
obj = try
LibGit2.GitObject(repo, treeish)
catch
LibGit2.GitObject(repo, "origin/$treeish")
end
source_hash = LibGit2.string(LibGit2.GitHash(obj))
# Tell the user what we recorded the current commit as
print(state.outs, "Recorded as ")
printstyled(state.outs, source_hash, bold=true)
println(state.outs)
close(repo)
else
# Download the source tarball
basename_without_urlparams(url) = first(split(basename(url), "?"))
source_path = joinpath(state.workspace, basename_without_urlparams(url))
if isfile(source_path)
# Try to match everything up to but not including ".tar.*" to strip multiple file extensions
m = match(r"^.+(?=(\.tar\.([\s\S]+)))", basename(source_path))
name, ext = if isnothing(m)
splitext(basename(source_path))
else
m.match, m.captures[1]
end
n = 1
while isfile(joinpath(state.workspace, "$(name)_$n$ext"))
n += 1
end
source_path = joinpath(state.workspace, "$(name)_$n$ext")
end
Downloads.download(url, source_path)
# Save the source hash
open(source_path) do file
source_hash = bytes2hex(sha256(file))
end
end
# Spit back the url, local path and source hash
return url, SetupSource(url, source_path, source_hash, "")
end
"""
step1(state::WizardState)
It all starts with a single step, the unabashed ambition to leave your current
stability and engage with the universe on a quest to create something new,
beautiful and unforeseen. It all ends with compiler errors.
This step selects the relevant platform(s) for the built binaries.
"""
function step1(state::WizardState)
print_wizard_logo(state.outs)
# Select a platform
msg = "\t\t\t# Step 1: Select your platforms\n\n"
printstyled(state.outs, msg, bold=true)
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
platform_select = request(terminal,
"Make a platform selection",
RadioMenu([
"All Supported Platforms",
"Select by Operating System",
"Fully Custom Platform Choice",
]; charset=:ascii)
)
println(state.outs)
# Set `state.platforms` accordingly
result = nothing
if platform_select == 1
state.platforms = supported_platforms()
elseif platform_select == 2
oses = sort(unique(map(os, supported_platforms())))
while true
result = request(terminal,
"Select operating systems",
MultiSelectMenu(oses; charset=:ascii)
)
result = map(x->oses[x], collect(result))
if isempty(result)
println("Must select at least one operating system")
else
break
end
end
state.platforms = collect(filter(x->os(x) in result, supported_platforms()))
elseif platform_select == 3
platfs = supported_platforms()
while true
result = request(terminal,
"Select platforms",
MultiSelectMenu(map(repr, platfs); charset=:ascii)
)
if isempty(result)
println("Must select at least one platform")
else
break
end
end
state.platforms = collect(map(x->platfs[x], collect(result)))
else
error("Somehow platform_select was not a valid choice!")
end
if any(p -> Sys.isapple(p), state.platforms) && !isfile(enable_apple_file()) && !macos_sdk_already_installed()
# Ask the user if they accept to download the macOS SDK
if accept_apple_sdk(state.ins, state.outs)
touch(enable_apple_file())
else
# The user refused to download the macOS SDK
println(state.outs)
printstyled(state.outs, "Removing MacOS from the list of platforms...\n", bold=true)
filter!(p -> !Sys.isapple(p), state.platforms)
end
end
if isempty(state.platforms)
# In case the user didn't accept the macOS SDK and macOS was the only
# platform selected.
error("No valid platform selected!")
end
println(state.outs)
end
function obtain_binary_deps(state::WizardState)
msg = "\t\t\t# Step 2b: Obtain binary dependencies (if any)\n\n"
printstyled(state.outs, msg, bold=true)
q = "Do you require any (binary) dependencies? "
state.dependencies = Dependency[]
if yn_prompt(state, q, :n) == :y
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
local resolved_deps
jll_names = String[]
while true
jll_name = nonempty_line_prompt("package name", "Enter JLL package name:"; ins=state.ins, outs=state.outs)
if !endswith(jll_name, "_jll")
jll_name *= "_jll"
end
# Check to see if this JLL package name can be resolved:
push!(jll_names, jll_name)
all_resolved, resolved_deps = resolve_jlls(Dependency.(jll_names), outs=state.outs)
if !all_resolved
pop!(jll_names)
if yn_prompt(state, "Unable to resolve \"$(jll_name)\"; enter a new one?", :y) != :y
break
else
continue
end
end
q = "Would you like to provide additional dependencies? "
if yn_prompt(state, q, :n) != :y
break
end
end
# jll_names contains the valid names, resolved_deps potentially contains
# unresolved deps so we filter them out here.
state.dependencies = filter(x -> getname(x) ∈ jll_names, resolved_deps)
end
println(state.outs)
end
function obtain_source(state::WizardState)
msg = "\t\t\t# Step 2a: Obtain the source code\n\n"
printstyled(state.outs, msg, bold=true)
# Create the workspace that we'll stash everything within
state.workspace = tempname()
mkpath(state.workspace)
# These are the metadata we need to know about all the sources we'll be
# building over the course of this journey we're on together.
state.source_urls = String[]
state.source_files = SetupSource[]
while true
url, file = download_source(state)
if url != "N"
push!(state.source_urls, url)
push!(state.source_files, file)
println(state.outs)
else
if isempty(state.source_urls)
printstyled(state.outs, "No URLs were given.\n", color=:yellow, bold=true)
continue
end
end
q = "Would you like to download additional sources? "
if yn_prompt(state, q, :n) != :y
break
end
end
println(state.outs)
end
function get_name_and_version(state::WizardState)
ygg = LibGit2.GitRepo(get_yggdrasil())
while state.name === nothing
msg = "Enter a name for this project. This will be used for filenames:"
# Remove trailing `_jll` in case the user thinks this should be part of the name
new_name = replace(nonempty_line_prompt("Name", msg; ins=state.ins, outs=state.outs),
r"_jll$" => "")
if !Base.isidentifier(new_name)
println(state.outs, "\"$(new_name)\" is an invalid identifier. Try again.")
continue
end
# Check to see if this project name already exists
if case_insensitive_repo_file_exists(ygg, yggdrasil_build_tarballs_path(new_name))
println(state.outs, "A build recipe with that name already exists within Yggdrasil.")
if yn_prompt(state, "Choose a new project name?", :y) != :n
continue
end
end
state.name = new_name
end
msg = "Enter a version number for this project:"
while state.version === nothing
try
state.version = VersionNumber(nonempty_line_prompt("Version", msg; ins=state.ins, outs=state.outs))
catch e
if isa(e, ArgumentError)
println(state.outs, e.msg)
continue
end
rethrow(e)
end
end
end
@enum Compilers C=1 Go Rust
function get_compilers(state::WizardState)
while state.compilers === nothing
compiler_descriptions = Dict(C => "C/C++/Fortran", Go => "Go", Rust => "Rust")
compiler_symbols = Dict(Int(C) => :c, Int(Go) => :go, Int(Rust) => :rust)
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
result = nothing
while true
select_menu = MultiSelectMenu([compiler_descriptions[i] for i in instances(Compilers)]; charset=:ascii)
select_menu.selected = Set([Int(C)])
result = request(terminal,
"Select compilers for the project",
select_menu
)
if isempty(result)
println("Must select at least one platform")
else
break
end
end
state.compilers = map(c -> compiler_symbols[c], collect(result))
end
end
function get_preferred_version(state::WizardState, compiler::AbstractString,
available_versions=Vector{Integer})
terminal = TTYTerminal("xterm", state.ins, state.outs, state.outs)
message = "Select the preferred $(compiler) version (default: $(first(available_versions)))"
version_selected = request(terminal, message, RadioMenu(string.(available_versions); charset=:ascii))
if compiler == "GCC"
state.preferred_gcc_version = available_versions[version_selected]
elseif compiler == "LLVM"
state.preferred_llvm_version = available_versions[version_selected]
end
end
"""
step2(state::WizardState)
This step obtains the source code to be built and required binary dependencies.
"""
function step2(state::WizardState)
obtain_source(state)
obtain_binary_deps(state)
get_name_and_version(state)
if yn_prompt(state, "Do you want to customize the set of compilers?", :n) == :y
get_compilers(state)
# Default GCC version is the oldest one
get_preferred_version(state, "GCC", getversion.(available_gcc_builds))
# Default LLVM version is the latest one
get_preferred_version(state, "LLVM", getversion.(reverse(available_llvm_builds)))
else
state.compilers = [:c]
# Default GCC version is the oldest one
state.preferred_gcc_version = getversion(available_gcc_builds[1])
# Default LLVM version is the latest one
state.preferred_llvm_version = getversion(available_llvm_builds[end])
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 3584 | import Base: show
abstract type AbstractWizardDependency; end
struct InlineBuildDependency <: AbstractWizardDependency
script::String
end
struct RemoteBuildDependency <: AbstractWizardDependency
url::String
script::Union{String, Nothing}
end
struct TarballDependency <: AbstractWizardDependency
url::String
hash::String
end
"""
WizardState
Building large dependencies can take a lot of time. This state object captures
all relevant state of this function. It can be passed back to the function to
resume where we left off. This can aid debugging when code changes are
necessary. It also holds all necessary metadata such as input/output streams.
"""
@Base.kwdef mutable struct WizardState
step::Symbol = :step1
ins::IO = stdin
outs::IO = stdout
# Filled in by step 1
platforms::Union{Nothing, Vector{Platform}} = nothing
# Filled in by step 2
workspace::Union{Nothing, String} = nothing
source_urls::Union{Nothing, Vector{String}} = nothing
source_files::Union{Nothing, Vector{SetupSource}} = nothing
dependencies::Union{Nothing, Vector{Dependency}} = nothing
compilers::Union{Nothing, Vector{Symbol}} = nothing
preferred_gcc_version::Union{Nothing, VersionNumber} = nothing
preferred_llvm_version::Union{Nothing, VersionNumber} = nothing
# Filled in by step 3
history::Union{Nothing, String} = nothing
files::Union{Nothing, Vector{String}} = nothing
file_kinds::Union{Nothing, Vector{Symbol}} = nothing
file_varnames::Union{Nothing, Vector{Symbol}} = nothing
patches::Vector{PatchSource} = PatchSource[]
# Filled in by step 5c
failed_platforms::Set{Platform} = Set{Platform}()
# Used to keep track of which platforms we already visited
visited_platforms::Set{Platform} = Set{Platform}()
# Used to keep track of which platforms we have shown to work
# with the current script. This gets reset if the script is edited.
validated_platforms::Set{Platform} = Set{Platform}()
# Filled in by step 7
name::Union{Nothing, String} = nothing
version::Union{Nothing, VersionNumber} = nothing
github_api::GitHub.GitHubAPI = GitHub.DEFAULT_API
end
function serializeable_fields(::WizardState)
# We can't serialize TTY's, in general.
bad_fields = [:ins, :outs, :github_api]
return [f for f in fieldnames(WizardState) if !(f in bad_fields)]
end
# Serialize a WizardState out into a JLD2 dictionary-like object
function serialize(io, x::WizardState)
for field in serializeable_fields(x)
io[string(field)] = getproperty(x, field)
end
# For unnecessarily complicated fields (such as `x.github_api`) store the internal data raw:
io["github_api"] = string(x.github_api.endpoint)
# For non-serializable fields (such as `x.ins` and `x.outs`) we just recreate them in unserialize().
end
function unserialize(io)
x = WizardState()
for field in serializeable_fields(x)
setproperty!(x, field, io[string(field)])
end
# Manually recreate `ins` and `outs`. Note that this just sets them to their default values
x.ins = stdin
x.outs = stdout
x.github_api = GitHub.GitHubWebAPI(HTTP.URI(io["github_api"]))
return x
end
# Compare only serializeable fields when checking ==
function Base.:(==)(x::WizardState, y::WizardState)
for field in serializeable_fields(x)
if getproperty(x, field) != getproperty(y, field)
return false
end
end
return true
end
function show(io::IO, x::WizardState)
print(io, "WizardState [$(x.step)]")
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 9051 | # `_getpass` is based on `Base.getpass` but doesn't print any prompt
if Sys.iswindows()
function _getpass(input::Base.TTY)
input === stdin || throw(ArgumentError("getpass only works for stdin"))
s = SecretBuffer()
plen = 0
while true
c = UInt8(ccall(:_getch, Cint, ()))
if c == 0xff || c == UInt8('\n') || c == UInt8('\r')
break # EOF or return
elseif c == 0x00 || c == 0xe0
ccall(:_getch, Cint, ()) # ignore function/arrow keys
elseif c == UInt8('\b') && plen > 0
plen -= 1 # delete last character on backspace
elseif !iscntrl(Char(c)) && plen < 128
write(s, c)
end
end
return seekstart(s)
end
else
function _getpass(input::Base.TTY)
input === stdin || throw(ArgumentError("getpass only works for stdin"))
Base.unsafe_SecretBuffer!(ccall(:getpass, Cstring, (Cstring,), ""))
end
end
function line_prompt(name, msg; ins=stdin, outs=stdout, force_identifier=false, echo=true)
while true
print(outs, msg, "\n> ")
if echo
val = strip(readline(ins))
else
val = strip(read(_getpass(ins), String))
end
if !isopen(ins)
throw(InterruptException())
end
println(outs)
if !isempty(val) && force_identifier && !Base.isidentifier(val)
printstyled(outs, "$(name) must be an identifier!\n", color=:red)
continue
end
return val
end
end
function nonempty_line_prompt(name, msg; outs=stdout, kwargs...)
while true
val = line_prompt(name, msg; outs=outs, kwargs...)
if isempty(val)
printstyled(outs, "$(name) may not be empty!\n", color=:red)
continue
end
return val
end
end
"""
normalize_name(file::AbstractString)
Given a filename, normalize it, stripping out extensions. E.g. the file path
`"foo/libfoo.tar.gz"` would get mapped to `"libfoo"`.
"""
function normalize_name(file::AbstractString)
file = basename(file)
idx = findfirst(isequal('.'), file)
if idx !== nothing
file = file[1:prevind(file, idx)]
end
# Strip -123, which is a common thing for libraries on windows
idx = findlast(isequal('-'), file)
if idx !== nothing && all(isnumeric, file[nextind(file, idx):end])
file = file[1:prevind(file, idx)]
end
return file
end
"""
filter_object_files(files)
Given a list of files, filter out any that cannot be opened by `readmeta()`
from `ObjectFile`.
"""
function filter_object_files(files)
return filter(files) do f
try
readmeta(f) do ohs
return true
end
catch e
# If it was just a MagicMismatch, then return false for this file
if isa(e, ObjectFile.MagicMismatch)
return false
end
# If it was an EOFError (e.g. this was an empty file) then return false
if isa(e, EOFError)
return false
end
# If something else wrong then rethrow that error and pass it up
rethrow(e)
end
end
end
"""
match_files(state::WizardState, prefix::Prefix,
platform::AbstractPlatform, files::Vector; silent::Bool = false)
Inspects all binary files within a prefix, matching them with a given list of
`files`, complaining if there are any files that are not properly matched and
returning the set of normalized names that were not matched, or an empty set if
all names were properly matched.
"""
function match_files(state::WizardState, prefix::Prefix,
platform::AbstractPlatform, files::Vector; silent::Bool = false)
# Collect all executable/library files
prefix_files = collapse_symlinks(collect_files(prefix))
prefix_files = filter_object_files(prefix_files)
# Check if we can load them as an object file
prefix_files = filter(prefix_files) do f
readmeta(f) do ohs
if !any(Auditor.is_for_platform(oh, platform) for oh in ohs)
if !silent
@warn("Skipping binary `$f` with incorrect platform")
end
return false
end
return true
end
end
norm_prefix_files = Set(map(normalize_name, prefix_files))
norm_files = Set(map(normalize_name, files))
d = setdiff(norm_files, norm_prefix_files)
if !isempty(d)
if !silent
@warn("Could not find correspondences for $(join(d, ' '))")
end
end
return d
end
"""
edit_script(state::WizardState, script::AbstractString)
For consistency (and security), use the sandbox for editing a script, launching
`vi` within an interactive session to edit a buildscript.
"""
function edit_script(state::WizardState, script::AbstractString)
mktempdir() do tempdir
path = joinpath(tempdir, "script")
open(path, "w") do io
write(io, script)
end
edit_script(path, state.ins, state.outs, state.outs)
return String(read(path))
end
end
function edit_script(file::AbstractString, ins, outs, errs)
# Launch a sandboxed vim editor
ur = preferred_runner()(
dirname(file),
cwd = "/workspace/",
platform = Platform("x86_64", "linux"),
)
run_interactive(ur,
`/usr/bin/vim /workspace/$(basename(file))`;
stdin=ins,
stdout=outs,
stderr=errs,
)
end
"""
yn_prompt(state::WizardState, question::AbstractString, default = :y)
Perform a `[Y/n]` or `[y/N]` question loop, using `default` to choose between
the prompt styles, and looping until a proper response (e.g. `"y"`, `"yes"`,
`"n"` or `"no"`) is received.
"""
function yn_prompt(state::WizardState, question::AbstractString, default = :y)
@assert default in (:y, :n)
ynstr = default == :y ? "[Y/n]" : "[y/N]"
while true
print(state.outs, question, " ", ynstr, ": ")
answer = lowercase(strip(readline(state.ins)))
if isempty(answer)
return default
elseif answer == "y" || answer == "yes"
return :y
elseif answer == "n" || answer == "no"
return :n
else
println(state.outs, "Unrecognized answer. Answer `y` or `n`.")
end
end
end
"""
Change the script. This will invalidate all platforms to make sure we later
verify that they still build with the new script.
"""
function change_script!(state, script)
state.history = strip(script)
empty!(state.validated_platforms)
end
function print_wizard_logo(outs)
logo = raw"""
o `.
o*o \'-_ 00000000: 01111111 $.
\\ \;"". ,;.--* 00000001: 01000101 $E
\\ ,\''--.--'/ 00000002: 01001100 $L
:=\--<' `"" _ | 00000003: 01000110 $F
||\\ `" / ''-- 00000004: 00000010 .
`/_\\,-| | 00000005: 00000001 .
\\/ L
\\ ,' \
_/ L' ` \ Join us in the #binarybuilder channel on the
/ / / / community slack: https://julialang.org/slack/
/ / | \
"_''--_-''---__=; https://github.com/JuliaPackaging/BinaryBuilder.jl
"""
blue = "\033[34m"
red = "\033[31m"
green = "\033[32m"
magenta = "\033[35m"
normal = "\033[0m\033[0m"
# These color codes are annoying to embed, just run replacements here
logo = replace(logo, " o " => " $(green)o$(normal) ")
logo = replace(logo, "o*o" => "$(red)o$(blue)*$(magenta)o$(normal)")
logo = replace(logo, ".--*" => "$(red).$(green)-$(magenta)-$(blue)*$(normal)")
logo = replace(logo, "\$." => "$(blue).$(normal)")
logo = replace(logo, "\$E" => "$(red)E$(normal)")
logo = replace(logo, "\$L" => "$(green)L$(normal)")
logo = replace(logo, "\$F" => "$(magenta)F$(normal)")
logo = replace(logo, "#binarybuilder" => "$(green)#binarybuilder$(normal)")
logo = replace(logo, "https://julialang.slack.com" => "$(green)https://julialang.slack.com$(normal)")
println(outs, logo)
println(outs,
"Welcome to the BinaryBuilder wizard. ",
"We'll get you set up in no time."
)
println(outs)
end
"""
with_gitcreds(f, username::AbstractString, password::AbstractString)
Calls `f` with an `LibGit2.UserPasswordCredential` object as an argument, constructed from
the `username` and `password` values. `with_gitcreds` ensures that the credentials object
gets properly shredded after it's no longer necessary. E.g.:
```julia
with_gitcreds(user, token) do creds
LibGit2.clone("https://github.com/foo/bar.git", "bar"; credentials=creds)
end
````
"""
function with_gitcreds(f, username::AbstractString, password::AbstractString)
creds = LibGit2.UserPasswordCredential(deepcopy(username), deepcopy(password))
try
f(creds)
finally
Base.shred!(creds)
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 6680 | # Only update yggdrasil once
yggdrasil_updated = false
function get_yggdrasil()
# TODO: Eventually, we want to use a Pkg cache to store Yggdrasil,
# but since that doens't exist yet, we'll stick it into `deps`:
yggdrasil_dir = abspath(joinpath(@__DIR__, "..", "..", "deps", "Yggdrasil"))
if !isdir(yggdrasil_dir)
@info( "Cloning bare Yggdrasil into deps/Yggdrasil...")
LibGit2.clone("https://github.com/JuliaPackaging/Yggdrasil.git", yggdrasil_dir; isbare=true)
else
if !yggdrasil_updated
@info("Updating bare Yggdrasil clone in deps/Yggdrasil...")
repo = LibGit2.GitRepo(yggdrasil_dir)
LibGit2.fetch(repo)
origin_master_oid = LibGit2.GitHash(LibGit2.lookup_branch(repo, "origin/master", true))
LibGit2.reset!(repo, origin_master_oid, LibGit2.Consts.RESET_SOFT)
end
end
global yggdrasil_updated = true
return yggdrasil_dir
end
"""
yggdrasil_build_tarballs_path(name::String)
Return the relative path within an Yggdrasil clone where this project (given
its name) would be stored. This is useful for things like generating the
`build_tarballs.jl` file and checking to see if it already exists, etc...
Note that we do not allow case-ambiguities within Yggdrasil, we check for this
using the utility function `case_insensitive_file_exists(path)`.
"""
function yggdrasil_build_tarballs_path(name)
dir = uppercase(name[1])
return "$(dir)/$(name)/build_tarballs.jl"
end
using LibGit2: GitRepo, GitCommit, GitTree, GitObject
function case_insensitive_repo_file_exists(repo::GitRepo, path)
# Walk from top-to-bottom, checking all branches for the eventual leaf node
tree = LibGit2.peel(GitTree, GitCommit(repo, "origin/master"))
spath = splitpath(path)
frontier = GitTree[tree]
for (idx,node) in enumerate(spath)
lnode = lowercase(node)
new_frontier = GitTree[]
for tree in frontier
LibGit2.treewalk(tree) do root, entry
if lowercase(LibGit2.filename(entry)) == lnode &&
(idx == lastindex(spath) || LibGit2.entrytype(entry) == GitTree)
push!(new_frontier, GitTree(entry))
end
return 1
end
end
frontier = new_frontier
isempty(frontier) && return false
end
return !isempty(frontier)
end
function with_yggdrasil_pr(f::Function, pr_number::Integer)
# Get Yggdrasil, then force it to fetch our pull request refspec
yggy = LibGit2.GitRepo(get_yggdrasil())
# First, delete any local branch that might exist with our "pr-$(pr_number)" name:
branch_name = "pr-$(pr_number)"
branch = LibGit2.lookup_branch(yggy, branch_name)
if branch !== nothing
LibGit2.delete_branch(branch)
end
mktempdir() do tmpdir
# Fetch the PR contents down into a local branch
@info("Fetching Yggdrasil PR #$(pr_number) and checking out to $(tmpdir)")
LibGit2.fetch(yggy; refspecs=["pull/$(pr_number)/head:refs/heads/$(branch_name)"])
LibGit2.clone(LibGit2.path(yggy), tmpdir; branch=branch_name)
cd(tmpdir) do
f()
end
end
end
function test_yggdrasil_pr(pr_number::Integer)
# Get list of files changed in this PR
with_yggdrasil_pr(pr_number) do
# Analyze the current repository, figure out what files have been changed
@info("Inspecting changed files in PR #$(pr_number)")
r = GitHub.Repo("JuliaPackaging/Yggdrasil")
changed_files = [f.filename for f in GitHub.pull_request_files(r, pr_number)]
# Discard anything that doens't end with `build_tarballs.jl`
filter!(f -> endswith(f, "build_tarballs.jl"), changed_files)
# Discard anything starting with 0_RootFS
filter!(f -> !startswith(f, "0_RootFS"), changed_files)
# If there's nothing left, fail out
if length(changed_files) == 0
error("Unable to find any valid changes!")
end
# Use TerminalMenus to narrow down which to build
terminal = TTYTerminal("xterm", stdin, stdout, stderr)
if length(changed_files) > 1
builder_idx = request(terminal,
"Multiple recipes modified, which to build?",
RadioMenu(basename.(dirname.(changed_files)); charset=:ascii)
)
println()
build_tarballs_path = joinpath(pwd(), changed_files[builder_idx])
else
build_tarballs_path = joinpath(pwd(), first(changed_files))
end
# Next, run that `build_tarballs.jl`
successful = false
while true
try
cd(dirname(build_tarballs_path)) do
run(`$(Base.julia_cmd()) --color=yes build_tarballs.jl --verbose --debug`)
@info("Build successful! Recipe temporarily available at $(joinpath(pwd(), "build_tarballs.jl"))")
end
# Exit the `while` loop
@info("Build successful!")
break
catch
end
what_do = request(terminal,
"Build unsuccessful:",
RadioMenu([
# The definition of insanity
"Try again immediately",
"Edit build_tarball.jl file, then try again",
"Bail out",
]; charset=:ascii)
)
println()
if what_do == 2
edit_script(build_tarballs_path, stdin, stdout, stderr)
@info("Building with new recipe...")
continue
elseif what_do == 3
break
end
end
# If we make it this far, we are in a good state; check to see if we've modified stuff;
# if we have, offer to push it up to a new branch.
if LibGit2.isdirty(LibGit2.GitRepo(pwd()))
what_do = request(terminal,
"Changes to $(build_tarballs_path) detected:",
RadioMenu([
"Open a PR to the Yggdrasil PR",
"Display diff and quit",
"Discard",
]; charset=:ascii)
)
println()
if what_do == 1
dummy_name = basename(dirname(build_tarballs_path))
dummy_version = v"1.33.7"
yggdrasil_deploy(dummy_name, dummy_version, [], read(build_tarballs_path))
elseif what_do == 2
cd(dirname(build_tarballs_path))
run(`git diff`)
end
end
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 43152 | using BinaryBuilder.Auditor
using BinaryBuilder.Auditor: compatible_marchs, valid_library_path
# Tests for our auditing infrastructure
@testset "Auditor - cppfilt" begin
# We take some known x86_64-linux-gnu symbols and pass them through c++filt
mangled_symbol_names = [
"_ZNKSt7__cxx1110_List_baseIiSaIiEE13_M_node_countEv",
"_ZNKSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEE6lengthEv@@GLIBCXX_3.4.21",
"_Z10my_listlenNSt7__cxx114listIiSaIiEEE",
"_ZNKSt7__cxx114listIiSaIiEE4sizeEv",
]
unmangled_symbol_names = Auditor.cppfilt(mangled_symbol_names, Platform("x86_64", "linux"))
@test all(unmangled_symbol_names .== [
"std::__cxx11::_List_base<int, std::allocator<int> >::_M_node_count() const",
"std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::length() const@@GLIBCXX_3.4.21",
"my_listlen(std::__cxx11::list<int, std::allocator<int> >)",
"std::__cxx11::list<int, std::allocator<int> >::size() const",
])
# Do the same, for macOS, whose compilers usually prepend an additional underscore to symbols
mangled_symbol_names = [
"__ZNSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEC1EPKcRKS3_.isra.41",
"__ZStplIcSt11char_traitsIcESaIcEENSt7__cxx1112basic_stringIT_T0_T1_EEOS8_PKS5_",
"__ZNSt6vectorISt4pairINSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEESt3mapIS6_S6_St4lessIS6_ESaIS0_IKS6_S6_EEEESaISE_EED1Ev",
"__ZNSt8_Rb_treeINSt7__cxx1112basic_stringIcSt11char_traitsIcESaIcEEESt4pairIKS5_S5_ESt10_Select1stIS8_ESt4lessIS5_ESaIS8_EE7_M_copyINSE_11_Alloc_nodeEEEPSt13_Rb_tree_nodeIS8_EPKSI_PSt18_Rb_tree_node_baseRT_",
]
unmangled_symbol_names = Auditor.cppfilt(mangled_symbol_names, Platform("x86_64", "macos"); strip_underscore=true)
@test all(unmangled_symbol_names .== [
"std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >::basic_string(char const*, std::allocator<char> const&) (.isra.41)",
"std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > std::operator+<char, std::char_traits<char>, std::allocator<char> >(std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >&&, char const*)",
"std::vector<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::map<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > > > > >::~vector()",
"std::_Rb_tree_node<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > >* std::_Rb_tree<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::_Select1st<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > >, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > >::_M_copy<std::_Rb_tree<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::_Select1st<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > >, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > >::_Alloc_node>(std::_Rb_tree_node<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > const*, std::_Rb_tree_node_base*, std::_Rb_tree<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >, std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::_Select1st<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > >, std::less<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > >, std::allocator<std::pair<std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > > > >::_Alloc_node&)",
])
end
@testset "Auditor - ISA tests" begin
@test compatible_marchs(Platform("x86_64", "linux")) == ["x86_64"]
@test compatible_marchs(Platform("x86_64", "linux"; march="x86_64")) == ["x86_64"]
@test compatible_marchs(Platform("x86_64", "linux"; march="avx")) == ["x86_64", "avx"]
@test compatible_marchs(Platform("x86_64", "linux"; march="avx2")) == ["x86_64", "avx", "avx2"]
@test compatible_marchs(Platform("x86_64", "linux"; march="avx512")) == ["x86_64", "avx", "avx2", "avx512"]
@test compatible_marchs(Platform("armv7l", "linux")) == ["armv7l"]
@test compatible_marchs(Platform("i686", "linux"; march="prescott")) == ["pentium4", "prescott"]
@test compatible_marchs(Platform("aarch64", "linux"; march="armv8_1")) == ["armv8_0", "armv8_1"]
product = ExecutableProduct("main", :main)
# The microarchitecture of the product doesn't match the target architecture: complain!
mktempdir() do build_path
platform = Platform("x86_64", "linux"; march="avx")
build_output_meta = nothing
@test_logs (:info, "Building for x86_64-linux-gnu-march+avx") (:warn, r"is avx512, not avx as desired.$") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"isa_tests",
v"1.0.0",
[DirectorySource(build_tests_dir)],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
cd ${WORKSPACE}/srcdir/isa_tests
make -j${nproc} CFLAGS="-march=skylake-avx512 -mtune=skylake-avx512" install
install_license /usr/include/ltdl.h
""",
# Build for our platform
[platform],
# Ensure our executable products are built
[product],
# No dependencies
Dependency[];
preferred_gcc_version=v"6",
lock_microarchitecture=false,
)
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
# Run ISA test
readmeta(locate(product, prefix)) do ohs
foreach(ohs) do oh
detected_isa = Auditor.analyze_instruction_set(oh, platform; verbose=true)
@test detected_isa == "avx512"
end
end
end
end
# The instruction set of the product is compatible with the target
# architecture, but it's lower than desired: issue a gentle warning
mktempdir() do build_path
platform = Platform("x86_64", "linux"; march="avx512")
build_output_meta = nothing
@test_logs (:info, "Building for x86_64-linux-gnu-march+avx512") (:warn, r"is avx, not avx512 as desired. You may be missing some optimization flags during compilation.$") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"isa_tests",
v"1.0.0",
[DirectorySource(build_tests_dir)],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
cd ${WORKSPACE}/srcdir/isa_tests
make -j${nproc} CFLAGS="-march=sandybridge -mtune=sandybridge" install
install_license /usr/include/ltdl.h
""",
# Build for our platform
[platform],
# Ensure our executable products are built
[product],
# No dependencies
Dependency[];
preferred_gcc_version=v"6",
lock_microarchitecture=false,
)
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
# Run ISA test
readmeta(locate(product, prefix)) do ohs
foreach(ohs) do oh
detected_march = Auditor.analyze_instruction_set(oh, platform; verbose=true)
@test detected_march == "avx"
end
end
end
end
# The microarchitecture of the product matches the target architecture: no warnings!
for march in ("x86_64", "avx", "avx2", "avx512")
mktempdir() do build_path
platform = Platform("x86_64", "linux"; march=march)
build_output_meta = nothing
@test_logs (:info, "Building for x86_64-linux-gnu-march+$(march)") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"isa_tests",
v"1.0.0",
[DirectorySource(build_tests_dir)],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
cd ${WORKSPACE}/srcdir/isa_tests
make -j${nproc} install
install_license /usr/include/ltdl.h
""",
# Build for our platform
[platform],
# Ensure our executable products are built
[product],
# No dependencies
Dependency[];
# Use a recent version of GCC to make sure we can detect the
# ISA accurately even with new optimizations
preferred_gcc_version=v"8"
)
end
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
# Run ISA test
readmeta(locate(product, prefix)) do ohs
foreach(ohs) do oh
detected_march = Auditor.analyze_instruction_set(oh, platform; verbose=true)
if march == "avx2"
# Detecting the ISA isn't 100% reliable and it's even less
# accurate when looking for AVX2 features
@test_broken march == detected_march
else
@test march == detected_march
end
end
end
end
end
end
@testset "Auditor - cxxabi selection" begin
libcxxstringabi_test = LibraryProduct("libcxxstringabi_test", :libcxxstringabi_test)
# Factor the autobuild() out
function do_build(build_path, script, platform, gcc_version)
autobuild(
build_path,
"libcxxstringabi_test",
v"1.0.0",
# Copy in the build_tests sources
[DirectorySource(build_tests_dir)],
script,
# Build for this platform
[platform],
# The products we expect to be build
[libcxxstringabi_test],
# No dependencies
Dependency[];
preferred_gcc_version=gcc_version
)
end
for platform in (Platform("x86_64", "linux"; cxxstring_abi="cxx03"),
Platform("x86_64", "linux"; cxxstring_abi="cxx11"))
# Look across multiple gcc versions; there can be tricksy interactions here
for gcc_version in (v"4", v"6", v"9")
# Do each build within a separate temporary directory
mktempdir() do build_path
script = raw"""
cd ${WORKSPACE}/srcdir/cxxstringabi_tests
make install
install_license /usr/share/licenses/libuv/LICENSE
"""
build_output_meta = @test_logs (:info, "Building for $(triplet(platform))") (:warn, r"Linked library libgcc_s.so.1") match_mode=:any do_build(build_path, script, platform, gcc_version)
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
# Ensure that the library detects as the correct cxxstring_abi:
readmeta(locate(libcxxstringabi_test, prefix)) do ohs
foreach(ohs) do oh
detected_cxxstring_abi = Auditor.detect_cxxstring_abi(oh, platform)
@test detected_cxxstring_abi == cxxstring_abi(platform)
end
end
# Explicitly test cxx string abi mismatches
if gcc_version > v"4"
script = """
mkdir -p \${libdir}
/opt/\${target}/bin/\${target}-g++ -fPIC \\
-D_GLIBCXX_USE_CXX11_ABI=$(cxxstring_abi(platform) == "cxx03" ? "1" : "0") \\
-o \${libdir}/libcxxstringabi_test.\${dlext} \\
-shared \${WORKSPACE}/srcdir/cxxstringabi_tests/lib.cc
install_license /usr/share/licenses/libuv/LICENSE
"""
@test_logs (:warn, r"ignoring our choice of compiler") match_mode=:any begin
do_build(build_path, script, platform, gcc_version)
end
end
end
end
end
# Explicitly test not setting a cxx string abi at all
script = raw"""
cd ${WORKSPACE}/srcdir/cxxstringabi_tests
make install
install_license /usr/share/licenses/libuv/LICENSE
"""
platform = Platform("x86_64", "linux")
mktempdir() do build_path
@test_logs (:warn, r"contains std::string values") match_mode=:any begin
do_build(build_path, script, platform, v"6")
end
end
end
@testset "Auditor - .la removal" begin
for os in ["linux", "macos", "freebsd", "windows"]
platform = Platform("x86_64", os)
mktempdir() do build_path
build_output_meta = nothing
@test_logs (:info, r"Removing libtool file .*/destdir/lib/libfoo.la$") (:info, r"Removing libtool file .*/destdir/lib/libqux.la$") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"libfoo",
v"1.0.0",
# Copy in the libfoo sources
[DirectorySource(build_tests_dir)],
# Build libfoo using autotools to create a real .la file, and also
# create a fake .la file (which should not be removed). Create also a
# symlink libqux.la -> libfoo.la, which will be broken after libfoo.la
# has been deleted: remove libqux.la as well
libfoo_autotools_script * raw"""
touch ${prefix}/lib/libbar.la
ln -s ${prefix}/lib/libfoo.la ${prefix}/lib/libqux.la
""",
# Build for our platform
[platform],
# The products we expect to be build
libfoo_products,
# No dependencies
Dependency[];
verbose = true,
)
end
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Test that `libfoo.la` and `libqux.la` have been removed but `libbar.la` hasn't
contents = list_tarball_files(tarball_path)
@test "lib/libbar.la" in contents
@test !("lib/libfoo.la" in contents)
@test !("lib/libqux.la" in contents)
end
end
end
@testset "Auditor - .dll moving" begin
for platform in [Platform("x86_64", "windows")]
mktempdir() do build_path
build_output_meta = nothing
@test_logs (:warn, r"lib/libfoo.dll should be in `bin`") (:warn, r"Simple buildsystem detected") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"dll_moving",
v"1.0.0",
GitSource[],
# Install a .dll into lib
raw"""
mkdir -p ${prefix}/lib
cc -o ${prefix}/lib/libfoo.${dlext} -shared /usr/share/testsuite/c/dyn_link/libfoo/libfoo.c
install_license /usr/include/ltdl.h
""",
# Build for our platform
[platform],
# Ensure our library product is built
[LibraryProduct("libfoo", :libfoo)],
# No dependencies
Dependency[]
)
end
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Test that `libfoo.dll` gets moved to `bin` if it's a windows
contents = list_tarball_files(tarball_path)
@test "bin/libfoo.$(platform_dlext(platform))" in contents
end
end
end
@testset "Auditor - .dylib identity mismatch" begin
mktempdir() do build_path
no_id = LibraryProduct("no_id", :no_id)
abs_id = LibraryProduct("abs_id", :wrong_id)
wrong_id = LibraryProduct("wrong_id", :wrong_id)
right_id = LibraryProduct("right_id", :wrong_id)
platform = Platform("x86_64", "macos")
build_output_meta = autobuild(
build_path,
"dll_moving",
v"1.0.0",
FileSource[],
# Intsall a .dll into lib
raw"""
mkdir -p "${libdir}"
SRC=/usr/share/testsuite/c/dyn_link/libfoo/libfoo.c
cc -o ${libdir}/no_id.${dlext} -shared $SRC
cc -o ${libdir}/abs_id.${dlext} -Wl,-install_name,${libdir}/abs_id.${dlext} -shared $SRC
cc -o ${libdir}/wrong_id.${dlext} -Wl,-install_name,@rpath/totally_different.${dlext} -shared $SRC
cc -o ${libdir}/right_id.${dlext} -Wl,-install_name,@rpath/right_id.${dlext} -shared $SRC
install_license /usr/include/ltdl.h
""",
# Build for MacOS
[platform],
# Ensure our executable products are built
Product[no_id, abs_id, wrong_id, right_id],
# No dependencies
Dependency[],
)
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
# Helper to extract the dylib id of a path
function get_dylib_ids(path)
return readmeta(path) do ohs
map(ohs) do oh
dylib_id_lcs = [lc for lc in MachOLoadCmds(oh) if isa(lc, MachOIdDylibCmd)]
@test !isempty(dylib_id_lcs)
return dylib_name(first(dylib_id_lcs))
end
end
end
# Locate the build products within the prefix, ensure that all the dylib ID's
# now match the pattern `@rpath/$(basename(p))`
no_id_path = locate(no_id, prefix; platform=platform)
abs_id_path = locate(abs_id, prefix; platform=platform)
right_id_path = locate(right_id, prefix; platform=platform)
for p in (no_id_path, abs_id_path, right_id_path)
@test any(startswith.(p, libdirs(prefix)))
@test all(get_dylib_ids(p) .== "@rpath/$(basename(p))")
end
# Only if it already has an `@rpath/`-ified ID, it doesn't get touched.
wrong_id_path = locate(wrong_id, prefix; platform=platform)
@test any(startswith.(wrong_id_path, libdirs(prefix)))
@test all(get_dylib_ids(wrong_id_path) .== "@rpath/totally_different.dylib")
# Ensure that this bianry is codesigned
@test BinaryBuilder.Auditor.check_codesigned(right_id_path, platform)
end
end
@testset "Auditor - absolute paths" begin
mktempdir() do build_path
sharedir = joinpath(realpath(build_path), "share")
mkpath(sharedir)
open(joinpath(sharedir, "foo.conf"), "w") do f
write(f, "share_dir = \"$sharedir\"")
end
# Test that `audit()` warns about an absolute path within the prefix
@test_logs (:warn, r"share/foo.conf contains an absolute path") match_mode=:any begin
Auditor.audit(Prefix(build_path); verbose=true)
end
end
end
@testset "Auditor - broken symlinks" begin
mktempdir() do build_path
bindir = joinpath(realpath(build_path), "bin")
mkpath(bindir)
# Test both broken and working (but external) symlinks
symlink("../../artifacts/1a2b3/lib/libzmq.dll.a", joinpath(bindir, "libzmq.dll.a"))
# The following symlinks shouldn't raise a warning
symlink("/bin/bash", joinpath(bindir, "bash.exe"))
symlink("libfoo.so.1.2.3", joinpath(bindir, "libfoo.so"))
# Test that `audit()` warns about broken symlinks
@test_logs (:warn, r"Broken symlink: bin/libzmq.dll.a") match_mode=:any begin
Auditor.warn_deadlinks(build_path)
end
end
end
@testset "Auditor - gcc version" begin
# These tests assume our gcc version is concrete (e.g. that Julia is linked against libgfortran)
our_libgfortran_version = libgfortran_version(platform)
@test our_libgfortran_version != nothing
mktempdir() do build_path
hello_world = ExecutableProduct("hello_world_fortran", :hello_world_fortran)
build_output_meta = @test_logs (:warn, r"CompilerSupportLibraries_jll") (:warn, r"Linked library libgfortran.so.5") (:warn, r"Linked library libquadmath.so.0") (:warn, r"Linked library libgcc_s.so.1") match_mode=:any begin
autobuild(
build_path,
"hello_fortran",
v"1.0.0",
# No sources
FileSource[],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Build fortran hello world
make -j${nproc} -sC /usr/share/testsuite/fortran/hello_world install
# Install fake license just to silence the warning
install_license /usr/share/licenses/libuv/LICENSE
""",
# Build for our platform
[platform],
#
Product[hello_world],
# Note: we purposefully don't require CompilerSupportLibraries, even if we
# should, but the `@test_logs` above makes sure the audit warns us about
# this problem.
Dependency[];
)
end
# Extract our platform's build, run the hello_world tests:
output_meta = select_platform(build_output_meta, platform)
@test output_meta != nothing
tarball_path, tarball_hash = output_meta[1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
# Attempt to run the executable, we expect it to work since it's our platform:
hello_world_path = locate(hello_world, Prefix(testdir); platform=platform)
with_libgfortran() do
@test readchomp(`$hello_world_path`) == "Hello, World!"
end
# If we audit the testdir, pretending that we're trying to build an ABI-agnostic
# tarball, make sure it warns us about it.
@test_logs (:warn, r"links to libgfortran!") match_mode=:any begin
@test !Auditor.audit(Prefix(testdir); platform=BinaryBuilderBase.abi_agnostic(platform), autofix=false)
# Make sure audit is otherwise happy with the executable
# Note by Mosè: this test was introduced before
# https://github.com/JuliaPackaging/BinaryBuilder.jl/pull/1240 and relied on the
# fact audit was ok with not depending on CSL for packages needing GCC
# libraries, but that was a fallacious expectation. At the moment I don't know
# how to meaningfully use this test, leaving here as broken until we come up
# with better ideas (just remove the test?).
@test Auditor.audit(Prefix(testdir); platform=platform, autofix=false) broken=true
end
# Let's pretend that we're building for a different libgfortran version:
# audit should warn us.
libgfortran_versions = (3, 4, 5)
other_libgfortran_version = libgfortran_versions[findfirst(v -> v != our_libgfortran_version.major, libgfortran_versions)]
@test_logs (:warn, Regex("but we are supposedly building for libgfortran$(other_libgfortran_version)")) (:warn, r"Linked library libgfortran.so.5") (:warn, r"Linked library libquadmath.so.0") (:warn, r"Linked library libgcc_s.so.1") readmeta(hello_world_path) do ohs
foreach(ohs) do oh
p = deepcopy(platform)
p["libgfortran_version"] = "$(other_libgfortran_version).0.0"
@test !Auditor.audit(Prefix(testdir); platform=p, autofix=false)
end
end
end
end
@testset "Auditor - soname matching" begin
mktempdir() do build_path
build_output_meta = nothing
linux_platform = Platform("x86_64", "linux")
@test_logs (:info, r"creating link to libfoo\.so\.1\.0\.0") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"soname_matching",
v"1.0.0",
# No sources
FileSource[],
# Build the library only with the versioned name
raw"""
mkdir -p "${libdir}"
cc -o "${libdir}/libfoo.${dlext}.1.0.0" -fPIC -shared /usr/share/testsuite/c/dyn_link/libfoo/libfoo.c
# Set the soname to a non-existing file
patchelf --set-soname libfoo.so "${libdir}/libfoo.${dlext}.1.0.0"
""",
# Build for Linux
[linux_platform],
# Ensure our library product is built
[LibraryProduct("libfoo", :libfoo)],
# No dependencies
Dependency[];
autofix = true,
verbose = true,
require_license = false
)
end
# Extract our platform's build
@test haskey(build_output_meta, linux_platform)
tarball_path, tarball_hash = build_output_meta[linux_platform][1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
@test readlink(joinpath(testdir, "lib", "libfoo.so")) == "libfoo.so.1.0.0"
end
end
@testset "Auditor - rpaths" begin
@testset "$platform" for platform in (Platform("x86_64", "linux"; libc="glibc"), Platform("x86_64", "macos"))
mktempdir() do build_path
build_output_meta = nothing
@test_logs (:info, "Building for $(triplet(platform))") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"rpaths",
v"1.0.0",
# No sources
FileSource[],
# Build two libraries, `libbar` in `${libdir}/qux/` and `libfoo` in
# `${libdir}`, with the latter linking to the former.
raw"""
mkdir -p ${libdir}/qux
echo "int bar(){return 38;}" | gcc -x c -shared -fPIC - -o ${libdir}/qux/libbar.${dlext}
echo "extern int bar(); int foo(){return bar() + 4;}" | gcc -x c -shared -fPIC - -o ${libdir}/libfoo.${dlext} -L${libdir}/qux -lbar -Wl,-rpath,${libdir}/qux
""",
[platform],
# Ensure our library products are built
[LibraryProduct("libbar", :libbar, "\$libdir/qux"), LibraryProduct("libfoo", :libfoo)],
# No dependencies
Dependency[];
require_license = false
)
end
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
# Make sure rpath of libbar is empty
@test Auditor._rpaths(joinpath(testdir, "lib", "qux", "libbar.$(platform_dlext(platform))")) == []
# Make sure the rpath of libfoo contains only `$ORIGIN/qux`, with the relative
# path handled correctly.
libfoo_rpaths = Auditor._rpaths(joinpath(testdir, "lib", "libfoo.$(platform_dlext(platform))"))
@test (Sys.isapple(platform) ? "@loader_path" : "\$ORIGIN") * "/qux" in libfoo_rpaths
# Currently we don't filter out absolute rpaths for macOS libraries, no good.
@test length(libfoo_rpaths) == 1 broken=Sys.isapple(platform)
end
end
@testset "GCC libraries" begin
platform = Platform("x86_64", "linux"; libc="glibc", libgfortran_version=v"5")
mktempdir() do build_path
build_output_meta = nothing
@test_logs (:info, "Building for $(triplet(platform))") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"rpaths",
v"2.0.0",
# No sources
FileSource[],
# Build two libraries, `libbar` in `${libdir}/qux/` and `libfoo` in
# `${libdir}`, with the latter linking to the former.
raw"""
# Build fortran hello world
make -j${nproc} -sC /usr/share/testsuite/fortran/hello_world install
# Install fake license just to silence the warning
install_license /usr/share/licenses/libuv/LICENSE
""",
[platform],
# Ensure our library products are built
[ExecutableProduct("hello_world_fortran", :hello_world_fortran)],
# Dependencies: add CSL
[Dependency("CompilerSupportLibraries_jll")];
autofix=true,
)
end
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Ensure reproducibility of build
@test build_output_meta[platform][3] == Base.SHA1("0165cfbbbb8e521707299d649359f2bfdc28f204")
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
# Make sure auditor set the rpath of `hello_world`, even if it links only to
# libgfortran.
@test Auditor._rpaths(joinpath(testdir, "bin", "hello_world_fortran")) == ["\$ORIGIN/../lib"]
end
end
end
@testset "Auditor - execution permission" begin
mktempdir() do build_path
build_output_meta = nothing
product = LibraryProduct("libfoo", :libfoo)
@test_logs (:info, r"Making .*libfoo.* executable") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"exec",
v"1.0.0",
# No sources
FileSource[],
# Build a library without execution permissions
raw"""
mkdir -p "${libdir}"
cc -o "${libdir}/libfoo.${dlext}" -fPIC -shared /usr/share/testsuite/c/dyn_link/libfoo/libfoo.c
chmod 640 "${libdir}/libfoo.${dlext}"
""",
# Build for our platform
[platform],
# Ensure our library product is built
[product],
# No dependencies
Dependency[];
verbose = true,
require_license = false
)
end
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
libfoo_path = joinpath(testdir, build_output_meta[platform][4][product]["path"])
# Tar.jl normalizes permissions of executable files to 0o755, instead of
# recording exact original permissions:
# https://github.com/JuliaIO/Tar.jl/blob/37766a22f5a6ac9f07022d83debd5db7d7a4b896/README.md#permissions
@test_broken filemode(libfoo_path) & 0o777 == 0o750
end
end
# References:
# * https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/1232
# * https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/1245
@testset "Auditor - Reproducible libraries on Windows" begin
platform = Platform("i686", "windows")
expected_git_shas = Dict(
v"4" => Base.SHA1("1b625af3aa29c4b4b398f1eeaccc83d781bca1a5"),
v"6" => Base.SHA1("61767c3a66a66caeed84ee747a95021a94e77e3d"),
)
@testset "gcc version $(gcc_version)" for gcc_version in (v"4", v"6")
mktempdir() do build_path
build_output_meta = nothing
product = LibraryProduct("libfoo", :libfoo)
@test_logs (:info, r"Normalising timestamps in import library") match_mode=:any begin
build_output_meta = autobuild(
build_path,
"implib",
v"1.0.0",
# No sources
FileSource[],
# Build a library without execution permissions
raw"""
mkdir -p "${libdir}"
cd "${libdir}"
echo 'int foo(){ return 42; }' | cc -x c -shared - -o libfoo.${dlext} -Wl,--out-implib,libfoo.${dlext}.a
""",
# Build for Windows
[platform],
# Ensure our library product is built
[product],
# No dependencies
Dependency[];
verbose = true,
require_license = false,
preferred_gcc_version = gcc_version,
)
end
# Extract our platform's build
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Ensure reproducibility of build
@test build_output_meta[platform][3] == expected_git_shas[gcc_version]
end
end
end
@testset "Auditor - other checks" begin
platform = Platform("armv7l", "linux"; call_abi = "eabihf", libc = "glibc")
mktempdir() do build_path
build_output_meta = @test_logs (:error, r"libsoft.so does not match the hard-float ABI") match_mode=:any begin
autobuild(
build_path,
"hard_float_ABI",
v"1.0.0",
# No sources
FileSource[],
# Build a library which doesn't link to the standard library and
# forces the soft-float ABI
raw"""
mkdir -p "${libdir}" "${bindir}"
# This library has hard-float ABI
echo 'int test() { return 0; }' | cc -shared -fPIC -o "${libdir}/libhard.${dlext}" -x c -
# This library has soft-float ABI
echo 'int _start() { return 0; }' | /opt/${target}/bin/${target}-gcc -nostdlib -shared -mfloat-abi=soft -o "${libdir}/libsoft.${dlext}" -x c -
# hello_world built by Go doesn't specify any float ABI
make -C /usr/share/testsuite/go/hello_world/
cp "/tmp/testsuite/${target}/go/hello_world/hello_world" "${bindir}/hello_world"
""",
# Build for Linux armv7l hard-float
[platform],
# Ensure our library product is built
[
LibraryProduct("libhard", :libhard),
LibraryProduct("libsoft", :libsoft),
ExecutableProduct("hello_world", :hello_world),
],
# No dependencies
Dependency[];
compilers = [:c, :go],
verbose = true,
require_license = false
)
end
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
# Remove libsoft.so, we want to run audit only on the other products
rm(joinpath(testdir, "lib", "libsoft.so"))
# Make sure `hello_world` passes the float ABI check even if it doesn't
# set `EF_ARM_ABI_FLOAT_HARD`.
@test Auditor.audit(Prefix(testdir); platform=platform, require_license=false)
end
end
@testset "valid_library_path" begin
linux = Platform("x86_64", "linux")
macos = Platform("x86_64", "macos")
windows = Platform("x86_64", "windows")
@test valid_library_path("/usr/libc.dylib", macos)
@test !valid_library_path("/usr/libc.dylib.", macos)
@test !valid_library_path("/usr/libc.dylib.1", macos)
@test !valid_library_path("/usr/libc.dylib", linux)
@test !valid_library_path("/usr/libc.dylib", windows)
@test valid_library_path("libc.dll", windows)
@test !valid_library_path("libc.dll.1", windows)
@test !valid_library_path("libc.dll", linux)
@test !valid_library_path("libc.dll", macos)
@test valid_library_path("/usr/libc.so", linux)
@test valid_library_path("/usr/libc.so.1", linux)
@test valid_library_path("/usr/libc.so.1.2", linux)
@test !valid_library_path("/usr/libc.so.", linux)
@test !valid_library_path("/usr/libc.sot", linux)
@test !valid_library_path("/usr/libc.so", macos)
@test !valid_library_path("/usr/libc.so", windows)
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 10475 | ## Basic tests for simple utilities within BB
using BinaryBuilder, Test, Pkg, UUIDs
using BinaryBuilder: preferred_runner, resolve_jlls, CompilerShard, preferred_libgfortran_version, preferred_cxxstring_abi, gcc_version, available_gcc_builds, getversion, generate_compiler_wrappers!, getpkg, build_project_dict
using BinaryBuilder.BinaryBuilderBase
using BinaryBuilder.Wizard
@testset "File Collection" begin
temp_prefix() do prefix
# Create a file and a link, ensure that only the one file is returned by collect_files()
f = joinpath(prefix, "foo")
f_link = joinpath(prefix, "foo_link")
touch(f)
symlink(f, f_link)
d = joinpath(prefix, "bar")
d_link = joinpath(prefix, "bar_link")
mkpath(d)
symlink(d, d_link)
files = collect_files(prefix)
@test length(files) == 3
@test realpath(f) in files
@test realpath(f_link) in files
@test !(realpath(d) in files)
@test !(realpath(d_link) in files)
@test d_link in files
collapsed_files = collapse_symlinks(files)
@test length(collapsed_files) == 1
@test realpath(f) in collapsed_files
files = collect_files(prefix, exclude_dirs = false)
@test length(files) == 4
@test realpath(f) in files
@test realpath(f_link) in files
@test realpath(d) in files
@test realpath(d_link) in files
files = collect_files(prefix, islink)
@test length(files) == 2
@test !(realpath(f) in files)
@test f_link in files
@test !(realpath(d) in files)
@test d_link in files
files = collect_files(prefix, islink, exclude_dirs = false)
@test length(files) == 2
@test !(realpath(f) in files)
@test f_link in files
@test !(realpath(d) in files)
@test d_link in files
end
end
@testset "environment and history saving" begin
mktempdir() do temp_path
# This is a litmus test, to catch any errors before we do a `@test_throws`
@test_logs (:error, r"^Unable to find valid license file") match_mode=:any autobuild(
temp_path,
"this_will_pass",
v"1.0.0",
# No sources to speak of
FileSource[],
# Just exit with code 0
"""
exit 0
""",
# Build for this platform
[platform],
# No products
Product[],
# No dependencies
Dependency[],
)
@test_throws ErrorException autobuild(
temp_path,
"this_will_fail",
v"1.0.0",
FileSource[],
# Simple script that just sets an environment variable
"""
MARKER=1
exit 1
""",
[platform],
Product[],
Dependency[],
)
# build_path is the nonce'd build directory
build_path = joinpath(temp_path, "build", triplet(platform))
build_path = joinpath(build_path, first(readdir(build_path)))
# Ensure that we get a metadir, and that our history and .env files are in there!
metadir = joinpath(build_path, "metadir")
@test isdir(metadir)
hist_file = joinpath(metadir, ".bash_history")
env_file = joinpath(metadir, ".env")
@test isfile(hist_file)
@test isfile(env_file)
# Test that exit 1 is in .bash_history
@test occursin("\nexit 1\n", read(open(hist_file), String))
# Test that MARKER=1 is in .env:
@test occursin("\nMARKER=1\n", read(open(env_file), String))
# Delete the build path
rm(build_path, recursive = true)
end
end
@testset "Debug Prompt (Flag Generated Logs)" begin
mktempdir() do build_path
log_dir = joinpath(build_path, "srcdir")
mkdir(log_dir)
@test "Build failed, launching debug shell:" == BinaryBuilder.compose_debug_prompt(build_path)
logfile_path = joinpath(log_dir, "errors.log")
write(logfile_path, "sample log message")
@test "Build failed, the following log files were generated:\n - $(replace(logfile_path, "$build_path" => "\${WORKSPACE}"))\n\nLaunching debug shell:\n" == BinaryBuilder.compose_debug_prompt(build_path)
end
end
@testset "Wizard Utilities" begin
# Make sure canonicalization does what we expect
zmq_url = "https://github.com/zeromq/zeromq3-x/releases/download/v3.2.5/zeromq-3.2.5.tar.gz"
@test Wizard.canonicalize_source_url(zmq_url) == zmq_url
this_url = "https://github.com/JuliaPackaging/BinaryBuilder.jl/blob/1fee900486baedfce66ddb24872133ef36b9d899/test/wizard.jl"
this_url_ans = "https://raw.githubusercontent.com/JuliaPackaging/BinaryBuilder.jl/1fee900486baedfce66ddb24872133ef36b9d899/test/wizard.jl"
@test Wizard.canonicalize_file_url(this_url) == this_url_ans
# Make sure normalization does what we expect
@test Wizard.normalize_name("foo/libfoo.tar.gz") == "libfoo"
@test Wizard.normalize_name("foo/libfoo-2.dll") == "libfoo"
@test Wizard.normalize_name("libfoo") == "libfoo"
# with_gitcreds
local creds_outer = nothing
Wizard.with_gitcreds("user", "password") do creds
@test creds isa LibGit2.UserPasswordCredential
@test hasproperty(creds, :user)
@test hasproperty(creds, :pass)
creds_outer = creds # assign to parent scope, so that we can check on it later
@test creds.user == "user"
@test String(read(creds.pass)) == "password"
@test !Base.isshredded(creds.pass)
end
@test creds_outer isa LibGit2.UserPasswordCredential
@test creds_outer.user == ""
@test Base.isshredded(creds_outer.pass)
@test eof(creds_outer.pass)
# in case it throws:
creds_outer = nothing
@test_throws ErrorException Wizard.with_gitcreds("user", "password") do creds
creds_outer = creds
error("...")
end
@test creds_outer isa LibGit2.UserPasswordCredential
@test creds_outer.user == ""
@test Base.isshredded(creds_outer.pass)
@test eof(creds_outer.pass)
end
@testset "State serialization" begin
state = Wizard.WizardState()
state.step = :step34
state.platforms = [Platform("x86_64", "linux")]
state.source_urls = ["http://127.0.0.1:14444/a/source.tar.gz"]
state.source_files = [BinaryBuilder.SetupSource{ArchiveSource}("/tmp/source.tar.gz", bytes2hex(sha256("a")), "")]
state.name = "libfoo"
state.version = v"1.0.0"
state.dependencies = [Dependency(PackageSpec(;name="Zlib_jll")),
Dependency(PackageSpec(;name="CompilerSupportLibraries_jll"))]
state.history = "exit 1"
io = Dict()
Wizard.serialize(io, state)
new_state = Wizard.unserialize(io)
for field in fieldnames(Wizard.WizardState)
@test getfield(state, field) == getfield(new_state, field)
end
end
# Test that updating Yggdrasil works
@testset "Yggdrasil" begin
Core.eval(Wizard, :(yggdrasil_updated = false))
@test_logs (:info, r"Yggdrasil") Wizard.get_yggdrasil()
end
@testset "Registration utils" begin
name = "CGAL"
version = v"1"
dependencies = [Dependency("boost_jll"), Dependency("GMP_jll"),
Dependency("MPFR_jll"), Dependency("Zlib_jll")]
dict = build_project_dict(name, version, dependencies)
@test dict["name"] == "$(name)_jll"
@test dict["version"] == "1.0.0"
@test dict["uuid"] == "8fcd9439-76b0-55f4-a525-bad0597c05d8"
@test dict["compat"] == Dict{String,Any}("julia" => "1.0", "JLLWrappers" => "1.2.0", "Pkg" => "< 0.0.1, 1", "Libdl" => "< 0.0.1, 1", "Artifacts" => "< 0.0.1, 1")
@test all(in.(
(
"Pkg" => "44cfe95a-1eb2-52ea-b672-e2afdf69b78f",
"Libdl" => "8f399da3-3557-5675-b5ff-fb832c97cbdb",
"GMP_jll" => "781609d7-10c4-51f6-84f2-b8444358ff6d",
"MPFR_jll" => "3a97d323-0669-5f0c-9066-3539efd106a3",
"Zlib_jll" => "83775a58-1f1d-513f-b197-d71354ab007a",
"boost_jll" => "28df3c45-c428-5900-9ff8-a3135698ca75",
), Ref(dict["deps"])))
project = Pkg.Types.Project(dict)
@test project.name == "$(name)_jll"
@test project.uuid == UUID("8fcd9439-76b0-55f4-a525-bad0597c05d8")
# Make sure that a `BuildDependency` can't make it to the list of
# dependencies of the new JLL package
@test_throws AssertionError build_project_dict(name, version, [BuildDependency("Foo_jll")])
version = v"1.6.8"
next_version = BinaryBuilder.get_next_wrapper_version("Xorg_libX11", version)
@test next_version.major == version.major
@test next_version.minor == version.minor
@test next_version.patch == version.patch
# Ensure passing a Julia dependency bound works
dict = build_project_dict(name, version, dependencies, "1.4")
@test dict["compat"] == Dict{String,Any}("julia" => "1.4", "JLLWrappers" => "1.2.0", "Pkg" => "< 0.0.1, 1", "Libdl" => "< 0.0.1, 1", "Artifacts" => "< 0.0.1, 1")
dict = build_project_dict(name, version, dependencies, "~1.4")
@test dict["compat"] == Dict{String,Any}("julia" => "~1.4", "JLLWrappers" => "1.2.0", "Pkg" => "< 0.0.1, 1", "Libdl" => "< 0.0.1, 1", "Artifacts" => "< 0.0.1, 1")
@test_throws ErrorException build_project_dict(name, version, dependencies, "nonsense")
# Ensure passing a JLLWrappers dependency bound works
dict = build_project_dict(name, version, dependencies; jllwrappers_compat="1.4.0")
@test dict["compat"] == Dict{String,Any}("julia" => "1.0", "JLLWrappers" => "1.4.0", "Pkg" => "< 0.0.1, 1", "Libdl" => "< 0.0.1, 1", "Artifacts" => "< 0.0.1, 1")
# Ensure passing compat bounds works
dependencies = [
Dependency(PackageSpec(name="libLLVM_jll"), compat="=9.0.0"),
]
dict = build_project_dict("Clang", v"9.0.1+2", dependencies)
@test dict["compat"]["julia"] == "1.0"
@test dict["compat"]["libLLVM_jll"] == "=9.0.0"
dependencies = [
Dependency(PackageSpec(name="libLLVM_jll"), compat="8.3 - 10"),
]
dict = build_project_dict("Clang", v"9.0.1+2", dependencies)
@test dict["compat"]["julia"] == "1.0"
@test dict["compat"]["libLLVM_jll"] == "8.3 - 10"
dependencies = [
Dependency(PackageSpec(name="libLLVM_jll"), compat="8.3"),
]
dict = build_project_dict("Clang", v"9.0.1+2", dependencies)
@test dict["compat"]["libLLVM_jll"] == "8.3"
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 19281 | @testset "Building libfoo" begin
# Test building with both `make` and `cmake`, using directory and git repository
for script in (libfoo_make_script, libfoo_cmake_script, libfoo_meson_script)
# Do build within a separate temporary directory
mktempdir() do build_path
# Create local git repository of `libfoo` sources
git_path = joinpath(build_path, "libfoo.git")
mkpath(git_path)
# Copy files in, commit them. This is the commit we will build.
repo = LibGit2.init(git_path)
LibGit2.commit(repo, "Initial empty commit")
libfoo_src_dir = joinpath(build_tests_dir, "libfoo")
run(`cp -r $(libfoo_src_dir)/$(readdir(libfoo_src_dir)) $(git_path)/`)
for file in readdir(git_path)
LibGit2.add!(repo, file)
end
commit = LibGit2.commit(repo, "Add libfoo files")
# Add another commit to ensure that the git checkout is getting the right commit.
open(joinpath(git_path, "Makefile"), "w") do io
println(io, "THIS WILL BREAK EVERYTHING")
end
LibGit2.add!(repo, "Makefile")
LibGit2.commit(repo, "Break Makefile")
for source in (DirectorySource(build_tests_dir),
GitSource(git_path, bytes2hex(LibGit2.raw(LibGit2.GitHash(commit)))))
build_output_meta = autobuild(
build_path,
"libfoo",
v"1.0.0",
# Copy in the libfoo sources
[source],
# Use the particular build script we're interested in
script,
# Build for this platform
[platform],
# The products we expect to be build
libfoo_products,
# No dependencies
Dependency[];
# Don't do audit passes
skip_audit=true,
# Make one verbose for the coverage. We do it all for the coverage, Morty.
verbose=true,
)
@test haskey(build_output_meta, platform)
tarball_path, tarball_hash = build_output_meta[platform][1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Ensure that the file contains what we expect
contents = list_tarball_files(tarball_path)
@test "bin/fooifier$(platform_exeext(platform))" in contents
@test "lib/libfoo.$(platform_dlext(platform))" in contents
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
rm(testdir, recursive=true, force=true)
mkpath(testdir)
unpack(tarball_path, testdir)
# Ensure we can use it
prefix = Prefix(testdir)
fooifier_path = joinpath(bindir(prefix), "fooifier$(platform_exeext(platform))")
libfoo_path = first(filter(f -> isfile(f), joinpath.(libdirs(prefix), "libfoo.$(platform_dlext(platform))")))
# We know that foo(a, b) returns 2*a^2 - b
result = 2*2.2^2 - 1.1
# Test that we can invoke fooifier
@test !success(`$fooifier_path`)
@test success(`$fooifier_path 1.5 2.0`)
@test parse(Float64,readchomp(`$fooifier_path 2.2 1.1`)) ≈ result
# Test that we can dlopen() libfoo and invoke it directly
libfoo = Libdl.dlopen_e(libfoo_path)
@test libfoo != C_NULL
foo = Libdl.dlsym_e(libfoo, :foo)
@test foo != C_NULL
@test ccall(foo, Cdouble, (Cdouble, Cdouble), 2.2, 1.1) ≈ result
Libdl.dlclose(libfoo)
end
end
end
end
shards_to_test = expand_cxxstring_abis(expand_gfortran_versions(platform))
if lowercase(get(ENV, "BINARYBUILDER_FULL_SHARD_TEST", "false")) == "true"
@info("Beginning full shard test... (this can take a while)")
shards_to_test = supported_platforms()
else
shards_to_test = [platform]
end
# Expand to all platforms
shards_to_test = expand_cxxstring_abis(expand_gfortran_versions(shards_to_test))
# Perform a sanity test on each and every shard.
@testset "Shard testsuites" begin
@testset "$(shard)" for shard in shards_to_test
platforms = [shard]
mktempdir() do build_path
products = [
ExecutableProduct("hello_world_c", :hello_world_c),
ExecutableProduct("hello_world_cxx", :hello_world_cxx),
ExecutableProduct("hello_world_fortran", :hello_world_fortran),
ExecutableProduct("hello_world_go", :hello_world_go),
]
if !(platforms_match(shard, Platform("i686", "windows")) ||
platforms_match(shard, Platform("aarch64", "freebsd")))
# Rust is broken on 32-bit Windows and unavailable on FreeBSD AArch64, let's skip it
push!(products, ExecutableProduct("hello_world_rust", :hello_world_rust))
end
compilers = [:c, :go]
# Don't even ask for Rust on FreeBSD AArch64
if !platforms_match(shard, Platform("aarch64", "freebsd"))
push!(compilers, :rust)
end
build_output_meta = autobuild(
build_path,
"testsuite",
v"1.0.0",
# No sources
DirectorySource[],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Skip Rust on 32-bit Windows, the compiler exists but it's totally unusable
if [[ "${target}" == i686-*-mingw* ]]; then
while which rustc &> /dev/null; do rm $(which rustc); done
fi
# Build testsuite
make -j${nproc} -sC /usr/share/testsuite install
# Install fake license just to silence the warning
install_license /usr/share/licenses/libuv/LICENSE
""",
# Build for ALL the platforms
platforms,
products,
# Express a dependency on CSL to silence warning for fortran code
[Dependency("CompilerSupportLibraries_jll")];
# We need to be able to build go and rust and whatnot
compilers,
)
# Test that we built everything (I'm not entirely sure how I expect
# this to fail without some kind of error being thrown earlier on,
# to be honest I just like seeing lots of large green numbers.)
@test length(keys(platforms)) == length(keys(build_output_meta))
# Extract our platform's build, run the hello_world tests:
output_meta = select_platform(build_output_meta, platform)
if !isnothing(output_meta)
tarball_path, tarball_hash = output_meta[1:2]
# Ensure the build products were created
@test isfile(tarball_path)
# Unpack it somewhere else
@test verify(tarball_path, tarball_hash)
testdir = joinpath(build_path, "testdir")
mkdir(testdir)
unpack(tarball_path, testdir)
prefix = Prefix(testdir)
for product in products
hw_path = locate(product, prefix)
@test hw_path !== nothing && isfile(hw_path)
with_libgfortran() do
@test readchomp(`$hw_path`) == "Hello, World!"
end
end
end
end
end
end
@testset "gfortran linking specialty flags" begin
# We test things like linking against libgfortran with `$FC` on a couple of troublesome platforms
x86_64_linux = Platform("x86_64", "linux"; libgfortran_version=v"3")
aarch64_linux = Platform("aarch64", "linux"; libgfortran_version=v"3")
ppc64le_linux = Platform("powerpc64le", "linux"; libgfortran_version=v"3")
armv7l_linux = Platform("armv7l", "linux"; libgfortran_version=v"3")
x86_64_macos = Platform("x86_64", "macos"; libgfortran_version=v"3")
i686_windows = Platform("i686", "windows"; libgfortran_version=v"3")
troublesome_platforms = [
x86_64_linux,
ppc64le_linux,
armv7l_linux,
aarch64_linux,
x86_64_macos,
i686_windows,
]
expected_git_shas = Dict(
v"4" => Dict(
x86_64_linux => Base.SHA1("fb3897274fe9b293eb6bfb65063895946e655114"),
ppc64le_linux => Base.SHA1("53a4e6c7e7d05bf245a8b794133b963bb1ebb1c2"),
armv7l_linux => Base.SHA1("28fc03c35a4d30da70fbdefc69ecc6b6bf93f2fb"),
aarch64_linux => Base.SHA1("c1c06efddc8bdce7b33fc9d8b6859f3c63e429ea"),
x86_64_macos => Base.SHA1("b0f9ef3b42b30f9085d4f9d60c3ea441554c442f"),
i686_windows => Base.SHA1("f39858ccc34a63a648cf21d33ae236bfdd706d09"),
),
v"5" => Dict(
x86_64_linux => Base.SHA1("743b2eac2e096281a2c69f95a2f58a4583824a84"),
ppc64le_linux => Base.SHA1("b663282a6101647c0aa87043a632b6cdc08f761f"),
armv7l_linux => Base.SHA1("9a3273d5c7a41e7c2a5ab58b6b69db49a8533bc1"),
aarch64_linux => Base.SHA1("4bab3a85aceb3e589989f1a11a2f092c5038a6e0"),
x86_64_macos => Base.SHA1("9ddfd323ed25fc02394067c6e863f1cf826a9e5e"),
i686_windows => Base.SHA1("9390a3c24a8e274e6d7245c6c977f97b406bc3f5"),
),
v"6" => Dict(
x86_64_linux => Base.SHA1("0b152c2cc8ff2af82f8d2d0adbbe26e0961131ed"),
ppc64le_linux => Base.SHA1("97b7e5682b3cadc873644931b17894fa2ff05335"),
armv7l_linux => Base.SHA1("267b443b17b99ca2a14ea93d2afc2cce51cad05e"),
aarch64_linux => Base.SHA1("b396b1d94aba8642a68122a3515b26e4397217a0"),
x86_64_macos => Base.SHA1("b211e8c87b83e820416757d6d2985bcd19db7f24"),
i686_windows => Base.SHA1("ae50af4ca8651cb3c8f71f34d0b66ca0d8f14a99"),
),
)
@testset "gcc version $(gcc_version)" for gcc_version in (v"4", v"5", v"6")
mktempdir() do build_path
build_output_meta = autobuild(
build_path,
"gfortran_flags",
v"1.0.0",
# No sources
FileSource[],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Build testsuite
make -j${nproc} -sC /usr/share/testsuite/fortran/hello_world install
# Install fake license just to silence the warning
install_license /usr/share/licenses/libuv/LICENSE
""",
# Build for a few troublesome platforms
troublesome_platforms,
[ExecutableProduct("hello_world_fortran", :hello_world_fortran)],
# Express a dependency on CSL to silence warning for fortran code
[Dependency("CompilerSupportLibraries_jll")];
preferred_gcc_version=gcc_version,
)
for p in troublesome_platforms
# Test build reproducibility
# Note: for some reasons, GCC for i686 windows gives different results on
# different systems, while still always reproducible on each of them:
# https://github.com/JuliaPackaging/BinaryBuilder.jl/pull/1234#issuecomment-1264192726
@test build_output_meta[p][3] == expected_git_shas[gcc_version][p] skip=(Sys.iswindows(p))
end
# Just a simple test to ensure that it worked.
@test length(keys(build_output_meta)) == length(troublesome_platforms)
end
end
# Test that building something that links against gfortran suggests depending on CSL
@test_logs (:warn, r"CompilerSupportLibraries_jll") match_mode=:any begin
mktempdir() do build_path
p = Platform("x86_64", "linux"; libgfortran_version=v"3")
build_output_meta = autobuild(
build_path,
"csl_dependency",
v"1.0.0",
# No sources
FileSource[],
# Build the test suite, install the binaries into our prefix's `bin`
raw"""
# Build testsuite
make -j${nproc} -sC /usr/share/testsuite/fortran/hello_world install
# Install fake license just to silence the warning
install_license /usr/share/licenses/libuv/LICENSE
""",
[p],
[ExecutableProduct("hello_world_fortran", :hello_world_fortran)],
Dependency[],
)
# Test build reproducibility
@test build_output_meta[p][3] == Base.SHA1("95e005d9b057b3a28af61189b9af5613127416a6")
end
end
end
@testset "HostBuildDependency" begin
# A platform for which we certainly can't run executable
p = Platform("x86_64", "freebsd")
mktempdir() do build_path
build_output_meta = autobuild(
build_path,
"host_dep",
v"1.0.0",
# No sources
DirectorySource[],
# Script: run hello_world from the HostBuildDependency
raw"""
hello_world
""",
# Platform
[p],
Product[],
# Install `HelloWorldC_jll` for both the target and the host.
[
HostBuildDependency("HelloWorldC_jll"),
Dependency("HelloWorldC_jll"),
];
# Don't do audit passes
skip_audit=true,
)
@test haskey(build_output_meta, p)
end
end
@testset "Invalid Arguments" begin
mktempdir() do build_path
# Test that invalid JLL names both @warn and error()
@test_logs (:warn, r"BadDependency_jll") (:warn, r"WorseDependency_jll") match_mode=:any begin
@test_throws ErrorException autobuild(
build_path,
"baddeps",
v"1.0.0",
# No sources
FileSource[],
"true",
[HostPlatform()],
Product[],
# Three dependencies; one good, two bad
[
Dependency("Zlib_jll"),
# We hope nobody will ever register something named this
Dependency("BadDependency_jll"),
Dependency("WorseDependency_jll"),
]
)
end
# Test that manually specifying prerelease or build number in our src_version is an error()
@test_throws ErrorException autobuild(
build_path,
"badopenssl",
v"1.1.1+c",
GitSource[],
"true",
[HostPlatform()],
Product[],
Dependency[],
)
@test_throws ErrorException autobuild(
build_path,
"test",
v"1.2.3-4",
GitSource[],
"true",
[HostPlatform()],
Product[],
Dependency[],
)
end
@test_throws ErrorException build_tarballs(String[], "", v"1.0", GitSource[], "", supported_platforms(; experimental=true), LibraryProduct[], Dependency[])
@test_throws ErrorException build_tarballs(String[], "1nvalid-name :(", v"4.20.69",
GitSource[], "", supported_platforms(),
LibraryProduct[], Dependency[])
end
@testset "AnyPlatform" begin
mktempdir() do build_path
p = AnyPlatform()
build_output_meta = autobuild(
build_path,
"header",
v"1.0.0",
# No sources
DirectorySource[],
raw"""
mkdir -p ${includedir}/
touch ${includedir}/libqux.h
install_license /usr/share/licenses/MIT
""",
[p],
[FileProduct("include/libqux.h", :libqux_h)],
# No dependencies
Dependency[]
)
@test haskey(build_output_meta, p)
@test build_output_meta[p][3] == Base.SHA1("45c55bfed92bd890d6487c58c4c03e07f5fb8829")
# Test that having a LibraryProduct for AnyPlatform raises an error
@test_throws ErrorException autobuild(
build_path,
"libfoo",
v"1.0.0",
[DirectorySource(build_tests_dir)],
libfoo_cmake_script,
[AnyPlatform()],
libfoo_products,
# No dependencies
Dependency[]
)
end
end
@testset "Building from remote file" begin
build_output_meta = nothing
mktempdir() do build_path
build_output_meta = autobuild(
build_path,
"libconfuse",
v"3.2.2",
# libconfuse source
[ArchiveSource("https://github.com/martinh/libconfuse/releases/download/v3.2.2/confuse-3.2.2.tar.gz",
"71316b55592f8d0c98924242c98dbfa6252153a8b6e7d89e57fe6923934d77d0")],
# Build script for libconfuse
raw"""
cd $WORKSPACE/srcdir/confuse-*/
./configure --prefix=${prefix} --build=${MACHTYPE} --host=${target}
make -j${nproc}
make install
""",
# Build for this platform
[platform],
# The products we expect to be build
[LibraryProduct("libconfuse", :libconfuse)],
# No dependencies
Dependency[];
# Don't do audit passes
skip_audit=true,
)
end
@test haskey(build_output_meta, platform)
end
@testset "Building framework" begin
mac_shards = filter(p -> Sys.isapple(p), shards_to_test)
if isempty(mac_shards)
mac_shards = [Platform("x86_64", "macos")] # Make sure to always also test this using MacOS
end
# The framework is only built as a framework on Mac and using CMake, and a regular lib elsewhere
script = libfoo_cmake_script
products = [FrameworkProduct("fooFramework", :libfooFramework)]
# Do build within a separate temporary directory
mktempdir() do build_path
products = [FrameworkProduct("fooFramework", :libfooFramework)]
build_output_meta = autobuild(
build_path,
"libfoo",
v"1.0.0",
[DirectorySource(build_tests_dir)],
# Build the test suite, install the binaries into our prefix's `bin`
libfoo_cmake_script,
# Build for ALL the platforms
mac_shards,
products,
# No dependencies
Dependency[];
verbose=true,
)
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 4339 | using JSON, BinaryBuilder, Test
import BinaryBuilder.BinaryBuilderBase: sourcify
@testset "Meta JSON" begin
meta_json_buff = IOBuffer()
# Run autobuild() a few times to generate a moderately complex `meta.json`:
dict = get_meta_json(
"libfoo",
v"1.0.0",
[FileSource("https://julialang.org", "123123"), DirectorySource("./bundled")],
"exit 1",
[Platform("x86_64", "linux")],
Product[LibraryProduct("libfoo", :libfoo), FrameworkProduct("fooFramework", :libfooFramework)],
[Dependency("Zlib_jll")];
)
println(meta_json_buff, JSON.json(dict))
dict = get_meta_json(
"libfoo",
v"1.0.0",
[GitSource("https://github.com/JuliaLang/julia.git", "5d4eaca0c9fa3d555c79dbacdccb9169fdf64b65")],
"exit 0",
[Platform("x86_64", "linux"), Platform("x86_64", "windows")],
Product[ExecutableProduct("julia", :julia), LibraryProduct("libfoo2", :libfoo2; dlopen_flags=[:RTLD_GLOBAL])],
Dependency[];
)
println(meta_json_buff, JSON.json(dict))
# Now, deserialize the info:
seek(meta_json_buff, 0)
# Strip out ending newlines as that makes our while loop below sad
meta_json_buff = IOBuffer(strip(String(take!(meta_json_buff))))
objs = []
while !eof(meta_json_buff)
push!(objs, JSON.parse(meta_json_buff))
end
# Ensure that we get two JSON objects
@test length(objs) == 2
# Merge them, then test that the merged object contains everything we expect
meta = BinaryBuilder.cleanup_merged_object!(BinaryBuilder.merge_json_objects(objs))
@test all(haskey.(Ref(meta), ("name", "version", "script", "platforms", "products", "dependencies")))
@test meta["name"] == "libfoo"
@test meta["version"] == v"1.0.0"
@test length(meta["platforms"]) == 2
@test Platform("x86_64", "linux"; libc="glibc") ∈ meta["platforms"]
@test Platform("x86_64", "windows") ∈ meta["platforms"]
@test length(meta["sources"]) == 3
@test all(in.(
(
FileSource("https://julialang.org", "123123"),
GitSource("https://github.com/JuliaLang/julia.git", "5d4eaca0c9fa3d555c79dbacdccb9169fdf64b65"),
DirectorySource("./bundled"),
), Ref(meta["sources"])))
@test sourcify(Dict("type" => "directory", "path" => "foo")) == DirectorySource("foo")
@test sourcify(Dict("type" => "git", "url" => "https://github.com/JuliaLang/julia.git", "hash" => "12345")) == GitSource("https://github.com/JuliaLang/julia.git", "12345")
@test sourcify(Dict("type" => "file", "url" => "https://julialang.org", "hash" => "98765")) == FileSource("https://julialang.org", "98765")
@test_throws ErrorException sourcify(Dict("type" => "qux"))
@test length(meta["products"]) == 4
@test all(in.((LibraryProduct("libfoo", :libfoo), ExecutableProduct("julia", :julia), LibraryProduct("libfoo2", :libfoo2; dlopen_flags=[:RTLD_GLOBAL]), FrameworkProduct("fooFramework", :libfooFramework)), Ref(meta["products"])))
@test length(meta["script"]) == 2
@test all(in.(("exit 0", "exit 1"), Ref(meta["script"])))
@testset "AnyPlatform" begin
meta_json_buff = IOBuffer()
dict = get_meta_json(
"any_file",
v"1.0.0",
FileSource[],
"exit 1",
[AnyPlatform()],
Product[FileProduct("file", :file)],
BuildDependency[];
)
println(meta_json_buff, JSON.json(dict))
# Deserialize the info:
seekstart(meta_json_buff)
# Strip out ending newlines as that makes our while loop below sad
meta_json_buff = IOBuffer(strip(String(take!(meta_json_buff))))
objs = []
while !eof(meta_json_buff)
push!(objs, JSON.parse(meta_json_buff))
end
# Ensure that we get one JSON object
@test length(objs) == 1
# Platform-independent build: the JSON file doesn't have a "platforms" key
@test !haskey(objs[1], "platforms")
# Merge them, then test that the merged object contains everything we expect
meta = BinaryBuilder.cleanup_merged_object!(BinaryBuilder.merge_json_objects(objs))
# The "platforms" key comes back in the cleaned up object
@test meta["platforms"] == [AnyPlatform()]
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 9536 | using TOML
using JSON
using UUIDs
using GitHub
using BinaryBuilder: jll_uuid, build_project_dict, get_github_author_login, Wizard
module TestJLL end
@testset "JLLs - utils" begin
@test jll_uuid("Zlib_jll") == UUID("83775a58-1f1d-513f-b197-d71354ab007a")
@test jll_uuid("FFMPEG_jll") == UUID("b22a6f82-2f65-5046-a5b2-351ab43fb4e5")
project = build_project_dict("LibFoo", v"1.3.5",
[Dependency("Zlib_jll"),
Dependency(PackageSpec(name = "XZ_jll"), compat = "=2.4.6"),
Dependency(PackageSpec(name = "Preferences", uuid = parse(UUID, "21216c6a-2e73-6563-6e65-726566657250"))),
Dependency("Scratch"),])
@test project["deps"] == Dict("JLLWrappers" => "692b3bcd-3c85-4b1f-b108-f13ce0eb3210",
"Artifacts" => "56f22d72-fd6d-98f1-02f0-08ddc0907c33",
"Pkg" => "44cfe95a-1eb2-52ea-b672-e2afdf69b78f",
"Zlib_jll" => "83775a58-1f1d-513f-b197-d71354ab007a",
"Libdl" => "8f399da3-3557-5675-b5ff-fb832c97cbdb",
"XZ_jll" => "ffd25f8a-64ca-5728-b0f7-c24cf3aae800",
"Preferences" => "21216c6a-2e73-6563-6e65-726566657250",
"Scratch" => "6c6a2e73-6563-6170-7368-637461726353")
@test project["name"] == "LibFoo_jll"
@test project["uuid"] == "b250f842-3251-58d3-8ee4-9a24ab2bab3f"
@test project["compat"] == Dict(
"julia" => "1.0",
"XZ_jll" => "=2.4.6",
"JLLWrappers" => "1.2.0",
"Libdl" => "< 0.0.1, 1",
"Artifacts" => "< 0.0.1, 1",
"Pkg" => "< 0.0.1, 1",
)
@test project["version"] == "1.3.5"
# Make sure BuildDependency's don't find their way to the project
@test_throws AssertionError build_project_dict("LibFoo", v"1.3.5", [Dependency("Zlib_jll"), BuildDependency("Xorg_util_macros_jll")])
# `Pkg` should not be a dependency if we require Julia v1.6.
@test !haskey(BinaryBuilder.build_project_dict("foo", v"1.2", Dependency[], "1.6")["deps"], "Pkg")
gh_auth = Wizard.github_auth(;allow_anonymous=true)
@test get_github_author_login("JuliaPackaging/Yggdrasil", "invalid_hash"; gh_auth) === nothing
@test get_github_author_login("JuliaPackaging/Yggdrasil", "815de56a4440f4e05333c5295d74f1dc9b73ebe3"; gh_auth) === nothing
if gh_auth != GitHub.AnonymousAuth()
@test get_github_author_login("JuliaPackaging/Yggdrasil", "dea7c3fadad16281ead2427f7ab9b32f1c8cb664"; gh_auth) === "Pangoraw"
end
end
@testset "JLLs - building" begin
mktempdir() do build_path
name = "libfoo"
version = v"1.0.0"
sources = [DirectorySource(build_tests_dir)]
# Build for this platform and a platform that isn't this one for sure:
# FreeBSD.
freebsd = Platform("x86_64", "freebsd")
platforms = [platform, freebsd]
# We depend on Zlib_jll only on the host platform, but not on FreeBSD
dependencies = [
Dependency("Zlib_jll"; platforms=[platform]),
RuntimeDependency("Preferences"; top_level=true)
]
# Augment platform
augment_platform_block = """
using Base.BinaryPlatforms
function augment_platform!(platform::Platform)
platform["test"] = "enabled"
return platform
end
"""
# Julia compat. Include Julia v1.6 to exercise the code path which forces lazy
# artifacts when augmenting the platform
julia_compat = "1.6"
# The buffer where we'll write the JSON meta data
buff = IOBuffer()
# First: call `get_meta_json` twice, once for each platform, and write the
# JSON meta data. In this way we can test that merging multiple JSON
# objects work correctly.
for p in platforms
dict = get_meta_json(
name,
version,
sources,
# Use a build script depending on the target platform.
Sys.isfreebsd(p) ? libfoo_make_script : libfoo_meson_script,
[p],
# The products we expect to be build
libfoo_products,
dependencies;
julia_compat,
augment_platform_block,
)
# Generate the JSON file
println(buff, JSON.json(dict))
end
# Now build for real
autobuild(
build_path,
name,
version,
sources,
# Use the particular build script we're interested in
libfoo_make_script,
platforms,
# The products we expect to be build
libfoo_products,
dependencies;
# Run audit passes to make sure the library has the correct soname
skip_audit=false,
)
withenv(
"JULIA_PKG_DEVDIR" => joinpath(build_path, "devdir"),
# Let's pretend to be in Yggdrasil, set the relevant environment
# variables.
"YGGDRASIL" => "true",
"BUILD_SOURCEVERSION" => "0123456789abcdef0123456789abcdef01234567",
"PROJECT" => "L/$(name)",
) do
# What follows loosely mimics what we do to build JLL packages in
# Yggdrasil.
buff = IOBuffer(strip(String(take!(buff))))
objs = []
while !eof(buff)
push!(objs, BinaryBuilder.JSON.parse(buff))
end
# Merging modifies `obj`, so let's keep an unmerged version around
objs_unmerged = deepcopy(objs)
# Merge the multiple outputs into one
merged = BinaryBuilder.merge_json_objects(objs)
BinaryBuilder.cleanup_merged_object!(merged)
BinaryBuilder.cleanup_merged_object!.(objs_unmerged)
# Determine build version
name = merged["name"]
version = merged["version"]
# Filter out build-time dependencies that will not go into the dependencies of
# the JLL packages.
dependencies = Dependency[dep for dep in merged["dependencies"] if is_runtime_dependency(dep)]
lazy_artifacts = merged["lazy_artifacts"]
build_version = BinaryBuilder.get_next_wrapper_version(name, version)
repo = "JuliaBinaryWrappers/$(name)_jll.jl"
code_dir = joinpath(Pkg.devdir(), "$(name)_jll")
download_dir = joinpath(build_path, "products")
# Skip init of the remote repository
# Filter out build-time dependencies also here
for json_obj in [merged, objs_unmerged...]
json_obj["dependencies"] = Dependency[dep for dep in json_obj["dependencies"] if is_runtime_dependency(dep)]
end
tag = "$(name)-v$(build_version)"
upload_prefix = "https://github.com/$(repo)/releases/download/$(tag)"
# This loop over the unmerged objects necessary in the event that we have multiple packages being built by a single build_tarballs.jl
for (i,json_obj) in enumerate(objs_unmerged)
from_scratch = (i == 1)
# A test to make sure merging objects and reading them back work
# as expected.
if json_obj["platforms"] == [Platform("x86_64", "freebsd")]
@test occursin("make install", json_obj["script"])
else
@test occursin("MESON_TARGET_TOOLCHAIN", json_obj["script"])
end
BinaryBuilder.rebuild_jll_package(json_obj; download_dir, upload_prefix, verbose=false, from_scratch)
end
env_dir = joinpath(build_path, "foo")
mkpath(env_dir)
Pkg.activate(env_dir)
Pkg.develop(PackageSpec(path=code_dir))
@test isfile(joinpath(code_dir, ".pkg", "select_artifacts.jl"))
@test read(joinpath(code_dir, ".pkg", "platform_augmentation.jl"), String) == augment_platform_block
# Make sure the artifacts are lazy because we are augmenting the platform and
# Julia compat includes versions before v1.7.
artifacts_toml = TOML.parsefile(joinpath(code_dir, "Artifacts.toml"))
for artifact in artifacts_toml["libfoo"]
@test artifact["lazy"]
end
# Make sure we use Zlib_jll only in the wrapper for the host
# platform and not the FreeBSD one.
platform_wrapper = joinpath(code_dir, "src", "wrappers", triplet(platform) * ".jl")
freebsd_wrapper = joinpath(code_dir, "src", "wrappers", triplet(freebsd) * ".jl")
main_src = joinpath(code_dir, "src", name * "_jll.jl")
@test contains(readchomp(platform_wrapper), "using Zlib_jll")
@test !contains(readchomp(freebsd_wrapper), "using Zlib_jll")
@test !contains(readchomp(platform_wrapper), "using Preferences")
@test !contains(readchomp(freebsd_wrapper), "using Preferences")
@test contains(readchomp(main_src), "using Preferences")
# Load JLL package and run some actual code from it.
@eval TestJLL using libfoo_jll
@test 6.08 ≈ @eval TestJLL ccall((:foo, libfoo), Cdouble, (Cdouble, Cdouble), 2.3, 4.5)
@test @eval TestJLL libfoo_jll.is_available()
end
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 2460 | using Pkg
using BinaryBuilder, BinaryBuilder.BinaryBuilderBase
using BinaryBuilder.BinaryBuilderBase: preferred_runner, platform_dlext, platform_exeext
using Base.BinaryPlatforms
using Random, LibGit2, Test, ObjectFile, SHA
import Libdl
# The platform we're running on
const platform = HostPlatform()
const build_tests_dir = joinpath(@__DIR__, "build_tests")
# Helper function to run fortran code with the path to libgfortran/libquadmath
# embedded in the appropriate environment variables (JLL packages we love you so)
csl_path = dirname(first(filter(x -> occursin("libgfortran", x), Libdl.dllist())))
LIBPATH_var, envsep = if Sys.iswindows()
("PATH", ";")
elseif Sys.isapple()
("DYLD_LIBRARY_PATH", ":")
else
("LD_LIBRARY_PATH", ":")
end
function with_libgfortran(f::Function)
libpath_list = [csl_path split(get(ENV, LIBPATH_var, ""), envsep)]
libpath = join(filter(x -> !isempty(x), libpath_list), envsep)
withenv(f, LIBPATH_var => libpath)
end
## Tests involing building packages and whatnot
libfoo_products = [
LibraryProduct("libfoo", :libfoo),
ExecutableProduct("fooifier", :fooifier),
]
libfoo_make_script = raw"""
cd ${WORKSPACE}/srcdir/libfoo
make install
install_license ${WORKSPACE}/srcdir/libfoo/LICENSE.md
"""
libfoo_cmake_script = raw"""
mkdir ${WORKSPACE}/srcdir/libfoo/build && cd ${WORKSPACE}/srcdir/libfoo/build
cmake -DCMAKE_INSTALL_PREFIX=${prefix} -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TARGET_TOOLCHAIN} ..
make install
install_license ${WORKSPACE}/srcdir/libfoo/LICENSE.md
"""
libfoo_meson_script = raw"""
mkdir ${WORKSPACE}/srcdir/libfoo/build && cd ${WORKSPACE}/srcdir/libfoo/build
meson .. -Dprefix=${prefix} --cross-file="${MESON_TARGET_TOOLCHAIN}"
meson install
# grumble grumble meson! Why do you go to all the trouble to build it properly
# in `build`, then screw it up when you `install` it?! Silly willy.
if [[ ${target} == *apple* ]]; then
install_name_tool ${prefix}/bin/fooifier -change ${prefix}/lib/libfoo.0.dylib @rpath/libfoo.0.dylib
fi
install_license ${WORKSPACE}/srcdir/libfoo/LICENSE.md
"""
libfoo_autotools_script = raw"""
cd ${WORKSPACE}/srcdir/libfoo
autoreconf -fiv
./configure --prefix=${prefix} --build=${MACHTYPE} --host=${target} --disable-static
make install
install_license ${WORKSPACE}/srcdir/libfoo/LICENSE.md
"""
# Run all our tests
include("basic.jl")
include("building.jl")
include("auditing.jl")
include("jll.jl")
include("wizard.jl")
include("declarative.jl")
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | code | 20075 | using BinaryBuilder, BinaryBuilder.BinaryBuilderBase, BinaryBuilder.Wizard
using GitHub, Test, VT100, Sockets, HTTP, SHA, Tar
import Pkg: PackageSpec
import BinaryBuilder.BinaryBuilderBase: available_gcc_builds, available_llvm_builds, getversion
function with_wizard_output(f::Function, state, step_func::Function)
# Create fake terminal to communicate with BinaryBuilder over
pty = VT100.create_pty(false)
state.ins = Base.TTY(pty.slave)
state.outs = Base.TTY(pty.slave)
# Immediately start reading in off of `state.outs`
out_buff = PipeBuffer()
reader_task = @async begin
while isopen(pty.master)
z = String(readavailable(pty.master))
# Un-comment this to figure out what on earth is going wrong
# print(z)
write(out_buff, z)
end
end
# Start the wizard poppin' off
wizard_task = @async begin
try
step_func(state)
catch e
bt = catch_backtrace()
Base.display_error(stderr, e, bt)
# If this fails, panic
Test.@test false
end
end
f(pty.master, out_buff)
# Wait for the wizard to finish
wait(wizard_task)
# Once that's done, kill the reader task
close(pty.master)
wait(reader_task)
end
# Test the download stage
r = HTTP.Router()
io = IOBuffer()
Tar.create(joinpath(build_tests_dir, "libfoo"), pipeline(`gzip -9`, io))
libfoo_tarball_data = take!(io)
libfoo_tarball_hash = bytes2hex(sha256(libfoo_tarball_data))
function serve_tgz(req)
HTTP.Response(200, libfoo_tarball_data)
end
@static if isdefined(HTTP, Symbol("@register"))
HTTP.@register(r, "GET", "/*/source.tar.gz", serve_tgz)
else
HTTP.register!(r, "GET", "/*/source.tar.gz", serve_tgz)
end
port = -1
server = Sockets.TCPServer()
# Try to connect to different ports, in case one is busy. Important in case we
# have multiple parallel builds.
available_ports = 14444:14544
for i in available_ports
try
# Update the global server to shut it down when we are done with it.
global server = Sockets.listen(Sockets.InetAddr(Sockets.localhost, i))
catch e
if e isa Base.IOError
if i == last(available_ports)
# Oh no, this was our last attempt
error("No more ports available for the HTTP server")
end
# If the port is busy, try the next one
continue
else
rethrow(e)
end
end
# All looks good, update the global `port` and start the server
global port = i
@async HTTP.serve(r, Sockets.localhost, port; server=server, verbose=false)
break
end
function readuntil_sift(io::IO, needle)
# N.B.: This is a terrible way to do this and works around the fact that our `IOBuffer`
# does not block. It works fine here, but do not copy this to other places.
needle = codeunits(needle)
buffer = zeros(UInt8, length(needle))
all_buffer = UInt8[]
while isopen(io)
new_c = read(io, 1)
append!(all_buffer, new_c)
if isempty(new_c)
# We need to wait for more data, sleep for a bit
sleep(0.01)
continue
end
buffer = [buffer[2:end]; new_c]
if !any(buffer .!= needle)
return all_buffer
end
end
return nothing
end
function call_response(ins, outs, question, answer; newline=true)
@assert readuntil_sift(outs, question) !== nothing
# Because we occasionally are dealing with things that do strange
# stdin tricks like reading raw stdin buffers, we sleep here for safety.
sleep(0.1)
print(ins, answer)
if newline
println(ins)
end
end
@testset "Wizard - Obtain source" begin
state = Wizard.WizardState()
# Use a non existing name
with_wizard_output(state, Wizard.get_name_and_version) do ins, outs
# Append "_jll" to the name and make sure this is automatically removed
call_response(ins, outs, "Enter a name for this project", "libfoobarqux_jll")
call_response(ins, outs, "Enter a version number", "1.2.3")
end
@test state.name == "libfoobarqux"
@test state.version == v"1.2.3"
state.name = nothing
# Use an existing name, choose a new one afterwards
with_wizard_output(state, Wizard.get_name_and_version) do ins, outs
call_response(ins, outs, "Enter a name for this project", "cuba")
call_response(ins, outs, "Choose a new project name", "y")
call_response(ins, outs, "Enter a name for this project", "libfoobarqux")
end
@test state.name == "libfoobarqux"
@test state.version == v"1.2.3"
state.name = nothing
# Use an existing name, confirm the choice
with_wizard_output(state, Wizard.get_name_and_version) do ins, outs
call_response(ins, outs, "Enter a name for this project", "cuba")
call_response(ins, outs, "Choose a new project name", "N")
end
@test state.name == "cuba"
@test state.version == v"1.2.3"
end
# Set the state up
function step2_state()
state = Wizard.WizardState()
state.step = :step2
state.platforms = [Platform("x86_64", "linux")]
return state
end
@testset "Wizard - Downloading" begin
state = step2_state()
with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "http://127.0.0.1:$(port)/a/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "N")
call_response(ins, outs, "Enter a name for this project", "libfoo")
# Test bad version number detection
call_response(ins, outs, "Enter a version number", "parse me, I dare you")
call_response(ins, outs, "Enter a version number", "1.0.0")
# Compiler
call_response(ins, outs, "Do you want to customize the set of compilers?", "Y")
call_response(ins, outs, "Select compilers for the project", "ad")
call_response(ins, outs, "Select the preferred GCC version", "\r")
call_response(ins, outs, "Select the preferred LLVM version", "\e[B\e[B\e[B\r")
end
# Check that the state is modified appropriately
@test state.source_urls == ["http://127.0.0.1:$(port)/a/source.tar.gz"]
@test getfield.(state.source_files, :hash) == [libfoo_tarball_hash]
@test Set(state.compilers) == Set([:c, :rust, :go])
@test state.preferred_gcc_version == getversion(available_gcc_builds[1])
# The default LLVM shard is the latest one, and above we pressed three times
# arrow down in the reverse order list.
@test state.preferred_llvm_version == getversion(available_llvm_builds[end-3])
# Test two tar.gz download
state = step2_state()
with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "http://127.0.0.1:$(port)/a/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "Y")
call_response(ins, outs, "Please enter a URL", "http://127.0.0.1:$(port)/b/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "N")
call_response(ins, outs, "Enter a name for this project", "libfoo")
call_response(ins, outs, "Enter a version number", "1.0.0")
call_response(ins, outs, "Do you want to customize the set of compilers?", "N")
end
# Check that the state is modified appropriately
@test state.source_urls == [
"http://127.0.0.1:$(port)/a/source.tar.gz",
"http://127.0.0.1:$(port)/b/source.tar.gz",
]
@test getfield.(state.source_files, :hash) == [
libfoo_tarball_hash,
libfoo_tarball_hash,
]
#test that two files downloaded with the same name are re-named appropriately
m = match.(r"^.+(?=(\.tar\.([\s\S]+)))", basename.(getfield.(state.source_files,:path)))
for cap in m
@test cap.captures[1] ∈ BinaryBuilderBase.tar_extensions
end
# Test download/install with a broken symlink that used to kill the wizard
# https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/183
state = step2_state()
with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "https://github.com/staticfloat/small_bin/raw/d846f4a966883e7cc032a84acf4fa36695d05482/broken_symlink/broken_symlink.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "N")
call_response(ins, outs, "Enter a name for this project", "broken_symlink")
call_response(ins, outs, "Enter a version number", "1.0.0")
call_response(ins, outs, "Do you want to customize the set of compilers?", "N")
end
# Test failure to resolve a dependency
state = step2_state()
@test_logs (:warn, r"Unable to resolve iso_codez_jll") match_mode=:any with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "http://127.0.0.1:$(port)/a/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "Y")
call_response(ins, outs, "Enter JLL package name:", "ghr_jll")
call_response(ins, outs, "Would you like to provide additional dependencies?", "Y")
# Test auto-JLL suffixing
call_response(ins, outs, "Enter JLL package name:", "Zlib")
call_response(ins, outs, "Would you like to provide additional dependencies?", "Y")
# Test typo detection
call_response(ins, outs, "Enter JLL package name:", "iso_codez_jll")
call_response(ins, outs, "Unable to resolve", "N")
call_response(ins, outs, "Enter a name for this project", "check_deps")
call_response(ins, outs, "Enter a version number", "1.0.0")
call_response(ins, outs, "Do you want to customize the set of compilers?", "N")
end
@test length(state.dependencies) == 2
@test any([BinaryBuilder.getname(d) == "ghr_jll" for d in state.dependencies])
@test any([BinaryBuilder.getname(d) == "Zlib_jll" for d in state.dependencies])
# Test for escaping the URL prompt with N
state = step2_state()
with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "http://127.0.0.1:$(port)/a/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "Y")
call_response(ins, outs, "Please enter a URL", "N")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "N")
call_response(ins, outs, "Enter a name for this project", "get_me_out")
call_response(ins, outs, "Enter a version number", "1.0.0")
call_response(ins, outs, "Do you want to customize the set of compilers?", "N")
end
@test state.source_urls == ["http://127.0.0.1:$(port)/a/source.tar.gz"]
state = step2_state()
with_wizard_output(state, Wizard.step2) do ins, outs
call_response(ins, outs, "Please enter a URL", "N")
call_response(ins, outs, "No URLs", "http://127.0.0.1:$(port)/a/source.tar.gz")
call_response(ins, outs, "Would you like to download additional sources", "N")
call_response(ins, outs, "Do you require any (binary) dependencies", "N")
call_response(ins, outs, "Enter a name for this project", "no_urls")
call_response(ins, outs, "Enter a version number", "1.0.0")
call_response(ins, outs, "Do you want to customize the set of compilers?", "N")
end
end
# Dump the tarball to disk so that we can use it directly in the future
tempspace = tempname()
mkdir(tempspace)
libfoo_tarball_path = joinpath(tempspace, "source.tar.gz")
open(f -> write(f, libfoo_tarball_data), libfoo_tarball_path, "w")
function step3_state()
state = Wizard.WizardState()
state.step = :step34
state.platforms = [Platform("x86_64", "linux")]
state.source_urls = ["http://127.0.0.1:$(port)/a/source.tar.gz"]
state.source_files = [BinaryBuilder.SetupSource{ArchiveSource}(libfoo_tarball_path, libfoo_tarball_hash, "")]
state.name = "libfoo"
state.version = v"1.0.0"
state.dependencies = Dependency[]
state.compilers = [:c]
state.preferred_gcc_version = getversion(available_gcc_builds[1])
state.preferred_llvm_version = getversion(available_llvm_builds[end])
return state
end
function step3_test(state)
@test length(state.files) == 2
@test "lib/libfoo.so" in state.files
@test "bin/fooifier" in state.files
libfoo_idx = findfirst(state.files .== "lib/libfoo.so")
fooifier_idx = findfirst(state.files .== "bin/fooifier")
@test state.file_kinds[libfoo_idx] == :library
@test state.file_kinds[fooifier_idx] == :executable
@test state.file_varnames[libfoo_idx] == :libfoo
@test state.file_varnames[fooifier_idx] == :fooifier
end
@testset "Wizard - Building" begin
function succcess_path_call_response(ins, outs)
output = readuntil_sift(outs, "Build complete")
if contains(String(output), "Warning:")
close(ins)
return false
end
call_response(ins, outs, "Would you like to edit this script now?", "N")
call_response(ins, outs, "d=done, a=all", "ad"; newline=false)
call_response(ins, outs, "lib/libfoo.so", "libfoo")
call_response(ins, outs, "bin/fooifier", "fooifier")
return true
end
# Test step3 success path
state = step3_state()
with_wizard_output(state, Wizard.step34) do ins, outs
call_response(ins, outs, "\${WORKSPACE}/srcdir", """
cd libfoo
make install
exit
""")
@test succcess_path_call_response(ins, outs)
end
@test state.history == """
cd \$WORKSPACE/srcdir
cd libfoo
make install
exit
"""
step3_test(state)
# Step 3 failure path (no binary in destdir -> return to build)
state = step3_state()
with_wizard_output(state, Wizard.step34) do ins, outs
# Don't build anything
call_response(ins, outs, "\${WORKSPACE}/srcdir", "exit")
call_response(ins, outs, "Would you like to edit this script now?", "N")
# Return to build environment
call_response(ins, outs, "Return to build environment", "\r", newline=false)
call_response(ins, outs, "\${WORKSPACE}/srcdir", """
cd libfoo
make install
exit
""")
@test succcess_path_call_response(ins, outs)
end
@test state.history == """
cd \$WORKSPACE/srcdir
exit
cd \$WORKSPACE/srcdir
cd libfoo
make install
exit
"""
step3_test(state)
# Step 3 failure path (no binary in destdir -> retry with a clean build environment)
state = step3_state()
with_wizard_output(state, Wizard.step34) do ins, outs
# Don't build anything
call_response(ins, outs, "\${WORKSPACE}/srcdir", "exit")
call_response(ins, outs, "Would you like to edit this script now?", "N")
# Clean environment
call_response(ins, outs, "Return to build environment", "\e[B\r")
end
@test state.step == :step3
# Step 3 with a failing script
state = step3_state()
with_wizard_output(state, Wizard.step34) do ins, outs
# Build ok, but then indicate a failure
call_response(ins, outs, "\${WORKSPACE}/srcdir", """
cd libfoo
make install
exit 1
""")
@test readuntil_sift(outs, "Warning:") !== nothing
@test succcess_path_call_response(ins, outs)
end
step3_test(state)
# Step 3 dependency download
state = step3_state()
state.dependencies = [Dependency(PackageSpec(name="Zlib_jll", uuid="83775a58-1f1d-513f-b197-d71354ab007a"))]
with_wizard_output(state, Wizard.step34) do ins, outs
call_response(ins, outs, "\${WORKSPACE}/srcdir", """
if [[ ! -f \${libdir}/libz.\${dlext} ]]; then
echo "ERROR: Could not find libz.\${dlext}" >&2
exit 1
fi
cd libfoo
make install
exit
""")
@test succcess_path_call_response(ins, outs)
end
# Step 3 - `bb add`
state = step3_state()
state.dependencies = [Dependency(PackageSpec(name="Zlib_jll", uuid="83775a58-1f1d-513f-b197-d71354ab007a"))]
with_wizard_output(state, Wizard.step34) do ins, outs
call_response(ins, outs, "\${WORKSPACE}/srcdir", """
if [[ ! -f \${libdir}/libz.\${dlext} ]]; then
echo "ERROR: Could not find libz.\${dlext}" >&2
exit 1
fi
bb add Xorg_xorgproto_jll
if [[ ! -d \${includedir}/X11 ]]; then
echo "ERROR: Could not find include/X11" >&2
exit 1
fi
bb add Zlib_jll
cd libfoo
make install
exit
""")
@test succcess_path_call_response(ins, outs)
end
end
function step5_state(script)
state = step3_state()
state.history = script
state.files = ["lib/libfoo.so","bin/fooifier"]
state.file_kinds = [:library, :executable]
state.file_varnames = [:libfoo, :fooifier]
state
end
@testset "Wizard - Generalizing" begin
# Check that with a failing script, step 5 rejects,
# even if all artifacts are present.
state = step5_state("""
cd libfoo
make install
exit 1
""")
with_wizard_output(state, state->Wizard.step5_internal(state, first(state.platforms))) do ins, outs
call_response(ins, outs, "Press Enter to continue...", "\n")
call_response(ins, outs, "How would you like to proceed?", "\e[B\e[B\r")
end
@test isempty(state.platforms)
end
function step7_state()
state = step5_state("""
cd libfoo
make install
exit 1
""")
state.patches = [PatchSource("foo.patch", "this is a patch")]
return state
end
@testset "Wizard - Deployment" begin
state = step7_state()
# First, test local deployment
mktempdir() do out_dir
with_wizard_output(state, state->Wizard._deploy(state)) do ins, outs
call_response(ins, outs, "How should we deploy this build recipe?", "\e[B\r")
call_response(ins, outs, "Enter directory to write build_tarballs.jl to:", "$(out_dir)\r")
end
@test isfile(joinpath(out_dir, "build_tarballs.jl"))
@test isfile(joinpath(out_dir, "bundled", "patches", "foo.patch"))
end
# Next, test writing out to stdout
state = step7_state()
with_wizard_output(state, state->Wizard._deploy(state)) do ins, outs
call_response(ins, outs, "How should we deploy this build recipe?", "\e[B\e[B\r")
@test readuntil_sift(outs, "Your generated build_tarballs.jl:") !== nothing
@test readuntil_sift(outs, "name = \"libfoo\"") !== nothing
@test readuntil_sift(outs, "make install") !== nothing
@test readuntil_sift(outs, "LibraryProduct(\"libfoo\", :libfoo)") !== nothing
@test readuntil_sift(outs, "ExecutableProduct(\"fooifier\", :fooifier)") !== nothing
@test readuntil_sift(outs, "dependencies = Dependency[") !== nothing
end
end
@testset "Wizard - state serialization" begin
for state_generator in (Wizard.WizardState, step2_state, step3_state, step7_state)
mktempdir() do dir
state = state_generator()
Wizard.save_wizard_state(state, dir)
@test Wizard.load_wizard_state(dir; as_is=true) == state
end
end
end
close(server)
@testset "GitHub - authentication" begin
withenv("GITHUB_TOKEN" => "") do
@test Wizard.github_auth(allow_anonymous=true) isa GitHub.AnonymousAuth
end
end
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 3527 | # BinaryBuilder
[](https://dev.azure.com/JuliaPackaging/BinaryBuilder.jl/_build/latest?definitionId=2&branchName=master) [](http://codecov.io/github/JuliaPackaging/BinaryBuilder.jl?branch=master)
[](https://docs.binarybuilder.org/stable)
[](https://docs.binarybuilder.org/dev)
> "Yea, though I walk through the valley of the shadow of death, I will fear no evil"
# Quickstart
1. Install `BinaryBuilder`
```julia
using Pkg; Pkg.add("BinaryBuilder")
```
2. Run the wizard.
```julia
using BinaryBuilder
BinaryBuilder.run_wizard()
```
3. The wizard will take you through a process of building your software package. Note that the wizard may need to download a new compiler shard for each platform targeted, and there are quite a few of these, so a fast internet connection can be helpful. The output of this stage is a `build_tarballs.jl` file, which is most commonly deployed as a pull request to the community buildtree [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil). For experienced users, it is often more convenient to directly copy/modify an existing `build_tarballs.jl` file within Yggdrasil, then simply open a pull request where CI will test building the binary artifacts for all platforms again.
4. The output of a build is a JLL package (typically hosted within the [JuliaBinaryWrappers](https://github.com/JuliaBinaryWrappers/) GitHub organization) which can be added to packages just like any other Julia package. The JLL package will export bindings for all products defined within the build recipe.
For more information, see the documentation for this package, viewable either directly in markdown within the [`docs/src`](docs/src) folder within this repository, or [online](https://docs.binarybuilder.org).
# Philosophy
Building binary packages is a pain. `BinaryBuilder` follows a philosophy that is similar to that of building [Julia](https://julialang.org) itself; when you want something done right, you do it yourself. To that end, `BinaryBuilder` is designed from the ground up to facilitate the building of packages within an easily reproducible and reliable Linux environment, ensuring that the built libraries and executables are deployable to every platform that Julia itself will run on. Packages are cross-compiled using a sequence of shell commands, packaged up inside tarballs, and hosted online for all to enjoy. Package installation is merely downloading, verifying package integrity and extracting that tarball on the user's computer. No more compiling on user's machines. No more struggling with system package managers. No more needing `sudo` access to install that little mathematical optimization library.
All packages are cross compiled. If a package does not support cross compilation, we patch the package or, in extreme cases, rebundle prebuilt executables.
The cross-compilation environment that we use is a homegrown Linux environment with many different compilers built for it, including various versions of `gcc`, `clang`, `gfortran`, `rustc` and `go`. You can read more about this in [the `RootFS.md` file](https://github.com/JuliaPackaging/Yggdrasil/blob/master/RootFS.md) within the Yggdrasil repository.
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 11079 | # Frequently Asked Questions
### I'm having trouble compiling `<project name here>`
First, make sure that you can compile that project natively on whatever platform you're attempting to compile it on. Once you are assured of that, search around the internet to see if anyone else has run into issues cross-compiling that project for that platform. In particular, most smaller projects should be just fine, but larger projects (and especially anything that does any kind of bootstrapping) may need some extra smarts smacked into their build system to support cross-compiling. Finally, if you're still stuck, try reaching out for help on the [`#binarybuilder` channel](https://julialang.slack.com/archives/C674ELDNX) in the JuliaLang slack.
### How do I use this to compile my Julia code?
This package does not compile Julia code; it compiles C/C++/Fortran dependencies. Think about that time you wanted to use `IJulia` and you needed to download/install `libnettle`. The purpose of this package is to make generated tarballs that can be downloaded/installed painlessly as possible.
### What is this I hear about the macOS SDK license agreement?
Apple restricts distribution and usage of the macOS SDK, a necessary component to build software for macOS targets. Please read the [Apple and Xcode SDK agreement](https://images.apple.com/legal/sla/docs/xcode.pdf) for more information on the restrictions and legal terms you agree to when using the SDK to build software for Apple operating systems. Copyright law is a complex area and you should not take legal advice from FAQs on the internet. This toolkit is designed to primarily run on Linux, though it can of course be used within a virtualized environment on a macOS machine or directly by running Linux Apple hardware. The Docker runner implements the virtualization approach on macOS machines. `BinaryBuilder.jl`, by default, will not automatically download or use the macOS SDK on non-apple host operating systems, unless the `BINARYBUILDER_AUTOMATIC_APPLE` environment variable is set to `true`.
### Are there other environment variables I can use?
Yes, [take a look](environment_variables.md).
### Hey, this is cool, can I use this for my non-Julia related project?
Absolutely! There's nothing Julia-specific about the binaries generated by the cross-compilers used by `BinaryBuilder.jl`. Although the best interface for interacting with this software will always be the Julia interface defined within this package, you are free to use these software tools for other projects as well. Note that the cross-compiler image is built through a multistage bootstrapping process, [see this repository for more information](https://github.com/JuliaPackaging/Yggdrasil). Further note the **macOS SDK license agreement** tidbit above.
### At line XXX, ABORTED (Operation not permitted)!
Some linux distributions have a bug in their `overlayfs` implementation that prevents us from mounting overlay filesystems within user namespaces. See [this Ubuntu kernel bug report](https://bugs.launchpad.net/ubuntu/+source/linux/+bug/1531747) for a description of the situation and how Ubuntu has patched it in their kernels. To work around this, you can launch `BinaryBuilder.jl` in "privileged container" mode. BinaryBuilder should auto-detect this situation, however if the autodetection is not working or you want to silence the warning, you can set the `BINARYBUILDER_RUNNER` environment variable to `privileged`. Unfortunately, this involves running `sudo` every time you launch into a BinaryBuilder session, but on the other hand, this successfully works around the issue on distributions such as Arch linux.
### I have to build a very small project without a Makefile, what do I have to do?
What BinaryBuilder needs is to find the relevant file (shared libraries, or executables, etc...) organised under the `$prefix` directory: libraries should go to `${libdir}`, executables to `${bindir}`. You may need to create those directories. You are free to choose whether to create a simple Makefile to build the project or to do everything within the `build_tarballs.jl` script.
When the script completes, BinaryBuilder expects to find at least one artifact _built for the expected architecture_ in either `${libdir}` or `${bindir}`.
Remember also that you should use the standard environment variables like `CC`, `CXX`, `CFLAGS`, `LDFLAGS` as appropriate in order to cross compile. See the list of variables in the [Tips for Building Packages](build_tips.md) section.
### Can I open a shell in a particular build environment for doing some quick tests?
Yes! You can use [`BinaryBuilder.runshell(platform)`](@ref BinaryBuilderBase.runshell) to quickly start a shell in the current directory, without having to set up a working `build_tarballs.jl` script. For example,
```
julia -e 'using BinaryBuilder; BinaryBuilder.runshell(Platform("i686", "windows"))'
```
will open a shell in a Windows 32-bit build environment, without any source loaded. The current working directory of your system will be mounted on `${WORKSPACE}` within this BinaryBuilder environment.
### Can I publish a JLL package locally without going through Yggdrasil?
You can always build a JLL package on your machine with the `--deploy` flag to the `build_tarballs.jl` script. Read the help (`--help`) for more information.
A common use case is that you want to build a JLL package for, say, `Libfoo`, that will be used as dependency to build `Quxlib`, and you want to make sure that building both `Libfoo` and `Quxlib` will work before submitting all the pull requests to [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil/). You can prepare the `build_tarballs.jl` script for `Libfoo` and then build and deploy it with
```
julia build_tarballs.jl --debug --verbose --deploy="MY_USERNAME/Libfoo_jll.jl"
```
replacing `MY_USERNAME` with your GitHub username: this will build the tarballs for all the platforms requested and upload them to a release of the `MY_USERNAME/Libfoo_jll.jl`, where the JLL package will also be created. As explained above, you can pass argument the list of triplets of the platforms for you which you want to build the tarballs, in case you want to compile only some of them. In the Julia REPL, you can install this package as any unregistered package with
```julia
]add https://github.com/MY_USERNAME/Libfoo_jll.jl.git
```
or develop it with
```julia
]dev https://github.com/MY_USERNAME/Libfoo_jll.jl.git
```
Since this package is unregistered, you have to use the full [`PackageSpec`](https://julialang.github.io/Pkg.jl/v1/api/#Pkg.PackageSpec) specification to add it as dependency of the local builder for `Quxlib`:
```julia
Dependency(PackageSpec(; name = "Libfoo_jll", uuid = "...", url = "https://github.com/MY_USERNAME/Libfoo_jll.jl.git"))
```
You can of course in turn build and deploy this package with
```
julia build_tarballs.jl --debug --verbose --deploy="MY_USERNAME/Quxlib_jll.jl"
```
Note that `PackageSpec` can also point to a local path: e.g., `PackageSpec(; name="Libfoo_jll", uuid="...", path="/home/myname/.julia/dev/Libfoo_jll")`. This is particularly useful when [Building a custom JLL package locally](@ref), instead of deploying it to a remote Git repository.
### What are those numbers in the list of sources? How do I get them?
The list of sources is a vector of [`BinaryBuilder.AbstractSource`](@ref)s. What the hash is depends on what the source is:
* For a [`FileSource`](@ref) or an [`ArchiveSource`](@ref), the hash is a 64-character SHA256 checksum. If you have a copy of that file, you can compute the hash in Julia with
```julia
using SHA
open(path_to_the_file, "r") do f
bytes2hex(sha256(f))
end
```
where `path_to_the_file` is a string with the path to the file. Alternatively, you can use the command line utilities `curl` and `shasum` to compute the hash of a remote file:
```
$ curl -L http://example.org/file.tar.gz | shasum -a 256
```
replacing `http://example.org/file.tar.gz` with the actual URL of the file you want to download.
* For a [`GitSource`](@ref), the hash is the 40-character SHA1 hash of the revision you want to checkout. For reproducibility you must indicate a specific revision, and not a branch or tag name, which are moving targets.
### Now that I have a published and versioned `jll` package, what compat bounds do I put in its dependents? What if the upstream does not follow SemVer?
Imagine there is a package `CoolFortranLibrary_jll` that is a build of an upstream Fortran library `CoolFortranLibrary`. We will abbreviate these to `CFL_jll` and `CFL`.
Once you have `CFL_jll` you might want to have a Julia project that depends on it.
As usual you put a compat bound for `CFL_jll` (the version number of upstream `CFL` and the jll version of `CFL_jll` are typically set equal during the `jll` registration).
If you know for a fact that upstream `CFL` follows SemVer, then you just set compat bounds as if it was any other Julia project.
However, not all ecosystems follow SemVer. The following two cases are quite common:
1. `CFL` releases version 1.1.1 and version 1.1.2 that are incompatible. A real world example is Boost (which breaks the ABI in every single release because they embed the full version number in the soname of libraries). If you have a typical permissive semver-style compat section in a package that depends on `CFL_jll`, then your package will break whenever `CFL_jll` gets a new release. To solve this issue you have to use "hyphen style" compat bounds like `"0.9.0 - 1.1.2"`. This leads to a separate problem: you need to change the compat bound every time there is a new `CFL_jll` release: this is the least bad option though -- it causes more annoyance for developers but it ensures users never end up with broken installs. And bots like `CompatHelper` can mostly automate that issue.
2. `CFL` releases versions 1.0.0 and 2.0.0 that are perfectly compatible. The Linux kernel, Chrome, Firefox, and curl are such examples. This causes annoying churn, as the developer still needs to update compat bounds in packages that depend on `CFL_jll`. Or, if you have a very strong belief in `CFL`'s commitment to backward compatibility, you can put an extremely generous compat bound like `">= 1.0.0"`.
While the SemVer (and Julia's) "conservative" approach to compatibility ensures there will never be runtime crashes due to installed incompatible libraries, you might still end up with systems that refuse to install in the first place (which the Julia ecosystem considers the lesser evil). E.g., package `A.jl` that depends on newer versions of `CLF` and package `B.jl` that depends on older versions can not be installed at the same time. This happens less often in ecosystems that follow semver, but might happen relatively often in an ecosystem that does not. Thus developers that rely on `jll` packages that do not follow semver should be proactive in updating their compat bonds (and are strongly encouraged to heavily use the `CompatHelper` bot).
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 20745 | # Tips for Building Packages
BinaryBuilder provides a convenient environment to enable cross-platform building. But, many libraries have complicated build scripts that may need to be adapted to support all of the BinaryBuilder targets.
If your build fails with some errors, look at the [Build Troubleshooting](@ref) page.
*If you have additional tips, please submit a PR with suggestions.*
## Build strategy
What BinaryBuilder does is to create a tarball containing all files that are found inside the `${prefix}` directory at the end of the build and which don't come from the dependencies listed in the build recipe.
Thus, what you want to do in a build script is to install the relevant files under the appropriate directories in `${prefix}` (see the [Automatic environment variables](@ref) section): the libraries in `${libdir}`, the binary executables in `${bindir}`, etc...
Most packages come with a build system to automate this process (GNU Autoconf, CMake, Meson, a plain Makefile, etc...), but sometimes you may need to manually move the files as appropriate.
## Initiating different shell commands based on target
Sometimes, you need to adapt build scripts based on the target platform. This can be done within the shell script. Here is an example from [`OpenBLAS`](https://github.com/JuliaPackaging/Yggdrasil/blob/685cdcec9f0f0a16f7b90a1671af88326dcf5ab1/O/OpenBLAS/build_tarballs.jl):
```sh
# Set BINARY=32 on i686 platforms and armv7l
if [[ ${nbits} == 32 ]]; then
flags="${flags} BINARY=32"
fi
```
Here are other examples of scripts with target-specific checks:
* [Kaleido](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/K/Kaleido/build_tarballs.jl#L20-L25) - Different steps for Windows and macOS
* [Libical](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/L/Libical/build_tarballs.jl#L21-L25) - 32-bit check
It is also possible to run quite different scripts for each target by running different build scripts for different sets of targets. Here is an example where windows builds are separated from other targets:
* [Git](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/G/Git/build_tarballs.jl#L22-L26)
## Autoconfigure builds
Autoconfigure builds are generally quite straightforward. Here is a typical approach:
```sh
./configure --prefix=$prefix --build=${MACHTYPE} --host=${target}
make -j${nproc}
make install
```
Here are examples of autoconfigure build scripts:
* [Patchelf](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/P/Patchelf/build_tarballs.jl#L18-L20)
* [LibCURL](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/L/LibCURL/build_tarballs.jl#L55-L57)
## CMake builds
For CMake, the wizard will suggest a template for running CMake. Typically, this will look like:
```sh
cmake -B build -DCMAKE_INSTALL_PREFIX=${prefix} -DCMAKE_TOOLCHAIN_FILE=${CMAKE_TARGET_TOOLCHAIN} -DCMAKE_BUILD_TYPE=Release
cmake --build build --parallel ${nproc}
cmake --install build
```
CMake makes it hard to cleanup a partial failed build and start over, so we always recommend configuring and building a CMake project in a dedicated new directory, `build` in the example above.
The toolchain file sets up several CMake environment variables for better cross-platform support, such as `CMAKE_SYSROOT`, `CMAKE_C_COMPILER`, etc... Examples of builds that include CMake parts include:
* [JpegTurbo](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/J/JpegTurbo/build_tarballs.jl#L19-L21)
* [Sundials](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/S/Sundials/Sundials%405/build_tarballs.jl#L42-L55)
- Needs to copy *.dll files from `${prefix}/lib` to `${libdir}` for Windows
- Needs `KLU_LIBRARY_DIR="$libdir"`, so CMake's `find_library` can find libraries from KLU
## Meson builds
BinaryBuilder supports also building with Meson. Since this is going to be a cross-compilation, you have to specify a Meson cross file:
```sh
meson --cross-file="${MESON_TARGET_TOOLCHAIN}" --buildtype=release
```
After configuring the project with `meson`, you can then build and install it with
```
ninja -j${nproc}
ninja install
```
The wizard automatically suggests using Meson if the `meson.build` file is present.
Examples of builds performed with Meson include:
* [gdk-pixbuf](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/G/gdk_pixbuf/build_tarballs.jl#L22-L35):
here meson uses platform-dependent options;
* [libepoxy](https://github.com/JuliaPackaging/Yggdrasil/blob/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/L/Libepoxy/build_tarballs.jl#L19-L25):
this script modifies `c_args` in the Meson cross file in order to add an include directory;
* [xkbcommon](https://github.com/JuliaPackaging/Yggdrasil/blob/2f3638292c99fa6032634517f8a1aa8360d6fe8d/X/xkbcommon/build_tarballs.jl#L26-L30).
## Go builds
The Go toolchain provided by BinaryBuilder can be requested by adding `:go` to the `compilers` keyword argument to [`build_tarballs`](@ref): `compilers=[:c, :go]`. Go-based packages can usually be built and installed with `go`:
```sh
go build -o ${bindir}
```
The Go toolchain provided by BinaryBuilder automatically selects the appropriate target.
Example of packages using Go:
* [pprof](https://github.com/JuliaPackaging/Yggdrasil/blob/ea43d07d264046e8c94a460907bba209a015c10f/P/pprof/build_tarballs.jl#L21-L22): it uses `go build` to compile the program and manually moves the executable to `${bindir}`.
## Rust builds
The Rust toolchain provided by BinaryBuilder can be requested by adding `:rust` to the `compilers` keyword argument to [`build_tarballs`](@ref): `compilers=[:c, :rust]`. Rust-based packages can usually be built with `cargo`:
```sh
cargo build --release
```
The Rust toolchain provided by BinaryBuilder automatically selects the appropriate target and number of parallel jobs to be used. Note, however, that you may have to manually install the product in the `${prefix}`. Read the installation instructions of the package in case they recommend a different build procedure.
Example of packages using Rust:
* [Tokei](https://github.com/JuliaPackaging/Yggdrasil/blob/ea43d07d264046e8c94a460907bba209a015c10f/T/Tokei/build_tarballs.jl#L14-L15): it uses `cargo build` to compile the program and manually moves the executable to `${bindir}`;
* [Librsvg](https://github.com/JuliaPackaging/Yggdrasil/blob/ea43d07d264046e8c94a460907bba209a015c10f/L/Librsvg/build_tarballs.jl#L35-L45): it uses a build system based on Autoconf which would internally call `cargo build`, but the user has to follow the `./configure` + `make` + `make install` sequence.
!!! warn
The Rust toolchain currently used does not work with the `i686-w64-mingw32` (32-bit Windows) platform.
## Editing files in the wizard
In the wizard, the `vim` editor is available for editing files. But, it doesn't leave any record in the build script. One generally needs to provide patch files or use something like `sed`. If a file needs patching, we suggest using `git` to add the entire worktree to a new repo, make the changes you need, then use `git diff -p` to output a patch that can be included alongside your build recipe.
You can include local files like patches very easily by placing them within a `bundled/patches` nested directory, and then providing `"./bundled"` as one of the `sources` for your build. See, for example, [`OpenBLAS`](https://github.com/JuliaPackaging/Yggdrasil/tree/8d5a27e24016c0ff2eae379f15dca17e79fd4be4/O/OpenBLAS/OpenBLAS%400.3.13).
## Automatic environment variables
The following environment variables are automatically set in the build environment and should be used to build the project. Occasionally, you may need to tweak them (e.g., when [Using GCC on macOS and FreeBSD](@ref)).
* `CC`: the C cross compiler
* `CXX`: the C++ cross compiler
* `FC`: the Fortran cross compiler
The above variables point to utilities for the target environment. To reference the utilities for the host environment either prepend `HOST` or append `_HOST`. For example, `HOSTCC` and `CC_HOST` point to the native C compiler.
These are other environment variables that you may occasionally need to set during a build
* `CFLAGS`: options for the C compiler
* `CXXFLAGS`: options for the C++ compiler
* `CPPFLAGS`: options for the C pre-processor
* `LDFLAGS`: options for the linker
* `PKG_CONFIG_PATH`: a colon-separated list of directories to search for `.pc` files
* `PKG_CONFIG_SYSROOT_DIR`: modifies `-I` and `-L` to use the directories located in target sysroot
The following variables are useful to control the build script over different target systems, but are not intended to be modified by the users:
* `prefix`: the path to the top-directory of where all the products should be installed. This will be the top-directory of the generated tarball
* `libdir`: the path to the directory where the shared libraries should be installed. This is `${prefix}/bin` when building for Windows, `${prefix}/lib` for all other platforms
* `bindir`: the path to the directory where the executables should be installed. This is equivalent to `${prefix}/bin`
* `includedir`: the path to the directory where the header files should be installed. This is equivalent to `${prefix}/include`
* similar variables, with analogous meaning, exist for the host prefix (where [`HostBuildDependency`](@ref) are installed): `${host_prefix}`, `${host_bindir}`, `${host_libdir}`, `${host_includedir}`
* `target`: the target platform
* `bb_full_target`: the full target platform, containing things like libstdc++ string ABI platform tags, and libgfortran version
* `MACHTYPE`: the triplet of the host platform
* `nproc`: the number of processors of the host machine, useful for parallel building (e.g., `make -j${nproc}`)
* `nbits`: number of bits of the target architecture (usually it is either 32 or 64)
* `proc_family`: target processor family (e.g., "intel", "power", or "arm")
* `dlext`: extension of the shared library on the target system. It is "dll" for Windows, "dylib" for macOS, and "so" for the other Unix systems
* `exeext`: extension of the executable on the target system, including the dot if present. It is ".exe" for Windows and the empty string "" for all the other target platforms
* `SRC_NAME`: name of the project being built
## Using GCC on macOS and FreeBSD
For these target systems Clang is the default compiler, however some programs may not be compatible with Clang.
For programs built with CMake (see the [CMake build](#CMake-builds-1) section) you can use the GCC toolchain file that is in `${CMAKE_TARGET_TOOLCHAIN%.*}_gcc.cmake`.
For programs built with Meson (see the [Meson build](#Meson-builds-1) section) you can use the GCC toolchain file that is in `${MESON_TARGET_TOOLCHAIN%.*}_gcc.meson`.
If the project that you want to build uses the GNU Build System (also known as the Autotools), there isn't an automatic switch to use GCC, but you have to set the appropriate variables. For example, this setting can be used to build most C/C++ programs with GCC for FreeBSD and macOS:
```sh
if [[ "${target}" == *-freebsd* ]] || [[ "${target}" == *-apple-* ]]; then
CC=gcc
CXX=g++
fi
```
## [Linking to BLAS/LAPACK libraries](@id link-blas)
Many numerical libraries link to [BLAS](https://en.wikipedia.org/wiki/Basic_Linear_Algebra_Subprograms)/[LAPACK](https://en.wikipedia.org/wiki/LAPACK) libraries to execute optimised linear algebra routines.
It is important to understand that the elements of the arrays manipulated by these libraries can be indexed by either 32-bit integer numbers ([LP64](https://en.wikipedia.org/wiki/64-bit_computing#64-bit_data_models)), or 64-bit integers (ILP64).
For example, Julia itself employs BLAS libraries for linear algebra, and it expects ILP64 model on 64-bit platforms (e.g. the `x86_64` and `aarch64` architectures) and LP64 on LP64 on 32-bit platforms (e.g. the `i686` and `armv7l` architectures).
Furthermore, Julia comes by default with [`libblastrampoline`](https://github.com/JuliaLinearAlgebra/libblastrampoline), a library which doesn't implement itself any BLAS/LAPACK routine but it forwards all BLAS/LAPACK function calls to another library (by default OpenBLAS) which can be designated at runtime, allowing you to easily switch between different backends if needed.
`libblastrampoline` provides both ILP64 and LP64 interfaces on 64-bit platforms, in the former case BLAS function calls are expected to have the `_64` suffix to the standard BLAS names.
If in your build you need to use a package to a BLAS/LAPACK library you have the following options:
* use ILP64 interface on 64-bit systems and LP64 interface on 32-bit ones, just like Julia itself.
In this case, when targeting 64-bit systems you will need to make sure all BLAS/LAPACK function calls in the package you want to build will follow the expected naming convention of using the `_64` suffix, something which most packages would not do automatically.
The build systems of some packages (e.g. [`OpenBLAS`](https://github.com/JuliaPackaging/Yggdrasil/blob/b7f5e3c48f292078bbed4c9fdad071da7875c0bc/O/OpenBLAS/common.jl#L125) and [`SuiteSparse`](https://github.com/JuliaPackaging/Yggdrasil/blob/master/S/SuiteSparse/SuiteSparse%407/build_tarballs.jl#L30-L39)) provide this option out-of-the-box, but in most cases you will need to rename the symbols manually using the preprocessor, see for example the [`armadillo`](https://github.com/JuliaPackaging/Yggdrasil/blob/b7f5e3c48f292078bbed4c9fdad071da7875c0bc/A/armadillo/build_tarballs.jl#L29-L41) recipe.
If you are ready to use ILP64 interface on 64-bit systems, you can choose different libraries to link to:
- `libblastrampoline`, using the `libblastrampoline_jll` dependency.
This is the recommended solution, as it is also what is used by Julia itself, it does not introduce new dependencies, a default backing BLAS/LAPACK is always provided, and also the package you are building can take advantage of `libblastrampoline`'s mechanism to switch between different BLAS/LAPACK backend for optimal performance.
A couple of caveats to be aware of:
* for compatibility reasons it's recommended to use
```julia
Dependency("libblastrampoline_jll"; compat="5.4.0")
```
as dependency and also pass `julia_compat="1.9"` as keyword argument to the [`build_tarballs`](@ref) function
* to link to `libblastrampoline` you should use `-lblastrampoline` when targeting Unix systems, and `-lblastrampoline-5` (`5` being the major version of the library) when targeting Windows.
- link directly to other libraries which provide the ILP64 interface on 64-bit systems and the LP64 interface on 32-bit systems, like `OpenBLAS_jll` (what is used by default by julia to back `libblastrampoline`), but once you made the effort to respect the ILP64 interface linking to `libbastrampoline` may be more convenient
* always use LP64 interface, also on 64-bit systems.
This may be a simpler option if renamining the BLAS/LAPACK symbols is too cumbersome in your case.
In terms of libraries to link to:
- also in this case you can link to `libblastrampoline`, however you _must_ make sure an LP64 BLAS/LAPACK library is backing `libblastrampoline`, otherwise all BLAS/LAPACK calls from the library will result in hard-to-debug segmentation faults, because in this case Julia does not provided a default backing LP64 BLAS/LAPACK library on 64-bit systems
- alternatively, you can use builds of BLAS/LAPACK libraries which always use LP64 interface also on 64-bit platforms, like the package `OpenBLAS32_jll`.
## Dependencies for the target system vs host system
BinaryBuilder provides a cross-compilation environment, which means that in general there is a distinction between the target platform (where the build binaries will eventually run) and the host platform (where compilation is currently happening). In particular, inside the build environment in general you cannot run binary executables built for the target platform.
For a build to work there may be different kinds of dependencies, for example:
* binary libraries that the final product of the current build (binary executables or other libraries) will need to link to. These libraries must have been built for the target platform. You can install this type of dependency as [`Dependency`](@ref), which will also be a dependency of the generated JLL package. This is the most common class of dependencies;
* binary libraries or non-binary executables (usually shell scripts that can actually be run inside the build environment) for the target platform that are exclusively needed during the build process, but not for the final product of the build to run on the target system. You can install this type of dependency as [`BuildDependency`](@ref). Remember they will _not_ be added as dependency of the generated JLL package;
* binary executables that are exclusively needed to be run during the build process. They cannot generally have been built for the target platform, so they cannot be installed as `Dependency` or `BuildDependency`. However you have two options:
* if they are available in a JLL package for the `x86_64-linux-musl` platform, you can install them as [`HostBuildDependency`](@ref). In order to keep binaries for the target platform separated from those for the host system, these dependencies will be installed under `${host_prefix}`, in particular executables will be present under `${host_bindir}` which is automatically added to the `${PATH}` environment variable;
* if they are present in Alpine Linux repositories, you can install them with the system package manager [`apk`](https://wiki.alpinelinux.org/wiki/Alpine_Linux_package_management).
Remember that this class of dependencies is built for the host platform: if the library you want to build for the target platform requires another binary library to link to, installing it as `HostBuildDependency` or with `apk` will not help.
You need to understand the build process of package you want to compile in order to know what of these classes a dependency belongs to.
## Installing the license file
Generated tarballs should come with the license of the library that you want to install. If at the end of a successful build there is only one directory inside `${WORKSPACE}/srcdir`, BinaryBuilder will look into it for files with typical names for license (like `LICENSE`, `COPYRIGHT`, etc... with some combinations of extensions) and automatically install them to `${prefix}/share/licenses/${SRC_NAME}/`. If in the final tarball there are no files in this directory a warning will be issued, to remind you to provide a license file.
If the license file is not automatically installed (for example because there is more than one directory in `${WORKSPACE}/srcdir` or because the file name doesn't match the expected pattern) you have to manually install the file. In the build script you can use the `install_license` command. See the [Utilities in the build environment](@ref utils_build_env) section below.
## [Utilities in the build environment](@id utils_build_env)
In addition to the standard Unix tools, in the build environment there are some extra commands provided by BinaryBuilder. Here is a list of some of these commands:
* `atomic_patch`: utility to apply patches. It is similar to the standard `patch`, but it fails gracefully when a patch cannot be applied:
```sh
atomic_patch -p1 /path/to/file.patch
```
* `flagon`: utility to translate some compiler-flags to the one required on the current platform. For example, to build a shared library from a static archive:
```sh
cc -o "${libdir}/libfoo.${dlext}" -Wl,$(flagon --whole-archive) libfoo.a -Wl,$(flagon --no-whole-archive) -lm
```
The currently supported flags are:
* `--whole-archive`;
* `--no-whole-archive`;
* `--relative-rpath-link`.
* `install_license`: utility to install a file to `${prefix}/share/licenses/${SRC_NAME}`:
```sh
install_license ${WORKSPACE}/srcdir/THIS_IS_THE_LICENSE.md
```
* `update_configure_scripts`: utility to update autoconfigure scripts. Sometimes libraries come with out-of-date autoconfigure scripts (e.g., old `configure.sub` can't recognise `aarch64` platforms or systems using Musl C library). Just run
```sh
update_configure_scripts
```
to get a newer version. With the `--reconf` flag, it also runs `autoreconf -i -f` afterwards:
```sh
update_configure_scripts --reconf
```
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 22794 | # Building Packages
A `BinaryBuilder.jl` build script (what is often referred to as a `build_tarballs.jl` file) looks something like this:
```julia
using BinaryBuilder
name = "libfoo"
version = v"1.0.1"
sources = [
ArchiveSource("<url to source tarball>", "sha256 hash"),
]
script = raw"""
cd ${WORKSPACE}/srcdir/libfoo-*
make -j${nproc}
make install
"""
platforms = supported_platforms()
products = [
LibraryProduct("libfoo", :libfoo),
ExecutableProduct("fooifier", :fooifier),
]
dependencies = [
Dependency("Zlib_jll"),
]
build_tarballs(ARGS, name, version, sources, script, platforms, products, dependencies)
```
The [`build_tarballs`](@ref) function takes in the variables defined above and
runs the builds, placing output tarballs into the `./products` directory, and
optionally generating and publishing the [JLL package](./jll.md). Let's see in
more details what are the ingredients of the builder.
## Name
This is the name that will be used in the tarballs and for the JLL package. It
should be the name of the upstream package, not for example that of a specific
library or executable provided by it, even though they may coincide. The case
of the name should match that of the upstream package. Note that the name
should be a valid Julia identifier, so it has meet some requirements, among
which:
* it cannot start with a number,
* it cannot have spaces, dashes, or dots in the name. You can use underscores
to replace them.
If you are unsure, you can use `Base.isidentifer` to check whehter the name is
acceptable:
```julia
julia> Base.isidentifier("valid_package_name")
true
julia> Base.isidentifier("100-invalid package.name")
false
```
Note that `_jll` will be automatically appended to the name of the generated JLL
package.
## Version number
This is the version number used in tarballs and should coincide with the version
of the upstream package. However, note that this should only contain major,
minor and patch numbers, so
```julia
julia> v"1.2.3"
v"1.2.3"
```
is acceptable, but
```julia
julia> v"1.2.3-alpha"
v"1.2.3-alpha"
julia> v"1.2.3+3"
v"1.2.3+3"
```
or a version including more than three levels (e.g., `1.2.3.4`) are not.
Truncate the version to the patch number if necessary.
The generated JLL package will automatically add a build number, increasing it
for each rebuild of the same package version.
## Sources
The sources are what will be compiled with the build script. They will be
placed under `${WORKSPACE}/srcdir` inside the build environment. Sources can be
of the following types:
* [`ArchiveSource`](@ref): a compressed archive (e.g., `tar.gz`, `tar.bz2`,
`tar.xz`, `zip`) that will be downloaded and automatically uncompressed;
* [`GitSource`](@ref): a git repository that will be automatically cloned. The
specified revision will be checked out;
* [`FileSource`](@ref): a generic file that will be downloaded from the
Internet, without special treatment;
* [`DirectorySource`](@ref): a local directory whose content will be copied in
`${WORKSPACE}/srcdir`. This usually contains local patches used to
non-interactively edit files in the source code of the package you want to
build.
Example of packages with multiple sources of different types:
* [`libftd2xx`](https://github.com/JuliaPackaging/Yggdrasil/blob/62d44097a26fe338763da8263b36ce6a63e7fa9c/L/libftd2xx/build_tarballs.jl#L9-L29).
Sources are not to be confused with the [binary
dependencies](#Binary-dependencies-1).
!!! note
Each builder should build a single package: don't use multiple sources to
bundle multiple packages into a single recipe. Instead, build each package
separately, and use them as binary dependencies as appropriate. This will
increase reusability of packages.
## Build script
The script is a bash script executed within the build environment, which is a
`x86_64` Linux environment using the Musl C library, based on Alpine Linux
(triplet: `x86_64-linux-musl`). The section [Build Tips](./build_tips.md)
provides more details about what you can usually do inside the build script.
## Platforms
The builder should also specify the list of platforms for which you want to
build the package. At the time of writing, we support Linux (`x86_64`, `i686`,
`armv6l`, `armv7l`, `aarch64`, `ppc64le`), Windows (`x86_64`, `i686`), macOS
(`x86_64`, `aarch64`), and FreeBSD (`x86_64`, `aarch64`). When possible, we try
to build for all supported platforms, in which case you can set
```julia
platforms = supported_platforms()
```
You can get the list of the supported platforms and their associated _triplets_
by using the functions `supported_platforms` and `triplet`:
```@repl
using BinaryBuilder
supported_platforms()
triplet.(supported_platforms())
```
The triplet of the platform is used in the name of the tarball generated.
For some packages, (cross-)compilation may not be possible for all those
platforms, or you have interested in building the package only for a subset of
them. Examples of packages built only for some platforms are
* [`libevent`](https://github.com/JuliaPackaging/Yggdrasil/blob/eb3728a2303c98519338fe0be370ef299b807e19/L/libevent/build_tarballs.jl#L24-L36);
* [`Xorg_libX11`](https://github.com/JuliaPackaging/Yggdrasil/blob/eb3728a2303c98519338fe0be370ef299b807e19/X/Xorg_libX11/build_tarballs.jl#L29):
this is built only for Linux and FreeBSD systems, automatically filtered from
`supported_platforms`, instead of listing the platforms explicitly.
### Expanding C++ string ABIs or libgfortran versions
Building libraries is not a trivial task and entails a lot of compatibility
issues, some of which are detailed in [Tricksy Gotchas](./tricksy_gotchas.md).
You should be aware of two incompatibilities in particular:
* The standard C++ library that comes with GCC can have one of [two incompatible
ABIs](https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html) for
`std::string`, an old one usually referred to as C++03 string ABI, and a newer
one conforming to the 2011 C++ standard.
!!! note
This ABI does *not* have to do with the C++ standard used by the source
code, in fact you can build a C++03 library with the C++11 `std::string`
ABI and a C++11 library with the C++03 `std::string` ABI. This is
achieved by appropriately setting the `_GLIBCXX_USE_CXX11_ABI` macro.
This means that when building with GCC a C++ library or program which exposes
the `std::string` ABI, you must make sure that the user will run a binary
matching their `std::string` ABI. You can manually specify the `std::string`
ABI in the `compiler_abi` part of the platform, but `BinaryBuilder` lets you
automatically expand the list of platform to include an entry for the C++03
`std::string` ABI and another one for the C++11 `std::string` ABI, by using
the [`expand_cxxstring_abis`](@ref) function:
```jldoctest
julia> using BinaryBuilder
julia> platforms = [Platform("x86_64", "linux")]
1-element Vector{Platform}:
Linux x86_64 {libc=glibc}
julia> expand_cxxstring_abis(platforms)
2-element Vector{Platform}:
Linux x86_64 {cxxstring_abi=cxx03, libc=glibc}
Linux x86_64 {cxxstring_abi=cxx11, libc=glibc}
```
Example of packages dealing with the C++ `std::string` ABIs are:
* [`GEOS`](https://github.com/JuliaPackaging/Yggdrasil/blob/1ba8f726810ba5315f686ef0137469a9bf6cca2c/G/GEOS/build_tarballs.jl#L33):
expands the the C++ `std::string` ABIs for all supported platforms;
* [`Bloaty`](https://github.com/JuliaPackaging/Yggdrasil/blob/14ee948c38385fc4dfd7b6167885fa4005b5da35/B/Bloaty/build_tarballs.jl#L37):
builds the package only for some platforms and expands the C++ `std::string`
ABIs;
* [`libcgal_julia`](https://github.com/JuliaPackaging/Yggdrasil/blob/b73815bb1e3894c9ed18801fc7d62ad98fd9f8ba/L/libcgal_julia/build_tarballs.jl#L52-L57):
builds only for platforms with C++11 `std::string` ABI.
* The `libgfortran` that comes with GCC changed the ABI in a
backward-incompatible way in the 6.X -> 7.X and the 7.X -> 8.X transitions.
This means that when you build a package that will link to `libgfortran`, you
must be sure that the user will use a package linking to a `libgfortran`
version compatible with their own. Also in this case you can either manually
specify the `libgfortran` version in the `compiler_abi` part fo the platform
or use a function, [`expand_gfortran_versions`](@ref), to automatically expand
the list of platform to include all possible `libgfortran` versions:
```jldoctest
julia> using BinaryBuilder
julia> platforms = [Platform("x86_64", "linux")]
1-element Vector{Platform}:
Linux x86_64 {libc=glibc}
julia> expand_gfortran_versions(platforms)
3-element Vector{Platform}:
Linux x86_64 {libc=glibc, libgfortran_version=3.0.0}
Linux x86_64 {libc=glibc, libgfortran_version=4.0.0}
Linux x86_64 {libc=glibc, libgfortran_version=5.0.0}
```
Example of packages expanding the `libgfortran` versions are:
* [`OpenSpecFun`](https://github.com/JuliaPackaging/Yggdrasil/blob/4f20fd7c58f6ad58911345adec74deaa8aed1f65/O/OpenSpecFun/build_tarballs.jl#L34): expands the `libgfortran`
versions for all supported platforms;
* [`LibAMVW`](https://github.com/JuliaPackaging/Yggdrasil/blob/dbc6aa9dded5ae2fe967f262473f77f7e75f6973/L/LibAMVW/build_tarballs.jl#L65-L73):
builds the package only for some platforms and expands the `libgfortran`
versions.
Note that whether you need to build for different C++ string ABIs or libgfortran
versions depends exclusively on whether the products of the current build expose
the `std::string` ABI or directly link to `libgfortran`. The fact that some of
the dependencies need to expand the C++ string ABIs or libgfortran versions is
not relevant for the current build recipe and BinaryBuilder will take care of
installing libraries with matching ABI.
Don't worry if you don't know whether you need to expand the list of platforms
for the C++ `std::string` ABIs or the libgfortran versions: this is often not
possible to know in advance without thoroughly reading the source code or
actually building the package. In any case the audit will inform you if you
have to use these `expand-*` functions.
### Platform-independent packages
`BinaryBuilder.jl` is particularly useful to build packages involving shared
libraries and binary executables. There is little benefit in using this package
to build a package that would be platform-independent, for example to install a
dataset to be used in a Julia package on the user's machine. For this purpose a
simple
[`Artifacts.toml`](https://julialang.github.io/Pkg.jl/v1/artifacts/#Artifacts.toml-files-1)
file generated with
[`create_artifact`](https://julialang.github.io/Pkg.jl/v1/artifacts/#Using-Artifacts-1)
would do exactly the same job. Nevertheless, there are cases where a
platform-independent JLL package would still be useful, for example to build a
package containing only header files that will be used as dependency of other
packages. To build a platform-independent package you can use the special
platform [`AnyPlatform`](@ref):
```julia
platforms = [AnyPlatform()]
```
Within the build environment, an `AnyPlatform` looks like `x86_64-linux-musl`,
but this shouldn't affect your build in any way. Note that when building a
package for `AnyPlatform` you can only have products of type `FileProduct`, as
all other types are platform-dependent. The JLL package generated for an
`AnyPlatform` is
[platform-independent](https://julialang.github.io/Pkg.jl/v1/artifacts/#Artifact-types-and-properties-1)
and can thus be installed on any machine.
Example of builders using `AnyPlatform`:
* [`OpenCL_Headers`](https://github.com/JuliaPackaging/Yggdrasil/blob/1e069da9a4f9649b5f42547ced7273c27bd2db30/O/OpenCL_Headers/build_tarballs.jl);
* [`SPIRV_Headers`](https://github.com/JuliaPackaging/Yggdrasil/blob/1e069da9a4f9649b5f42547ced7273c27bd2db30/S/SPIRV_Headers/build_tarballs.jl).
## Products
The products are the files expected to be present in the generated tarballs. If
a product is not found in the tarball, the build will fail. Products can be of
the following types:
* [`LibraryProduct`](@ref): this represent a shared library;
* [`ExecutableProduct`](@ref): this represent a binary executable program.
Note: this cannot be used for interpreted scripts;
* [`FrameworkProduct`](@ref) (only when building for `MacOS`): this represents a
[macOS
framework](https://en.wikipedia.org/wiki/Bundle_(macOS)#macOS_framework_bundles);
* [`FileProduct`](@ref): a file of any type, with no special treatment.
The audit will perform a series of sanity checks on the products of the builder,
with the exclusion `FileProduct`s, trying also to automatically fix some common
issues.
You don't need to list as products _all_ files that will end up in the tarball,
but only those you want to make sure are there and on which you want the audit
to perform its checks. This usually includes the shared libraries and the
binary executables. If you are also generating a JLL package, the products will
have some variables that make it easy to reference them. See the documentation
of [JLL packages](./jll.md) for more information about this.
Packages listing products of different types:
* [`Fontconfig`](https://github.com/JuliaPackaging/Yggdrasil/blob/eb3728a2303c98519338fe0be370ef299b807e19/F/Fontconfig/build_tarballs.jl#L57-L69).
## Binary dependencies
A build script can depend on binaries generated by another builder. A builder
specifies `dependencies` in the form of previously-built JLL packages:
```julia
# Dependencies of Xorg_xkbcomp
dependencies = [
Dependency("Xorg_libxkbfile_jll"),
BuildDependency("Xorg_util_macros_jll"),
]
```
* [`Dependency`](@ref) specify a JLL package that is necessary to build and load
the current builder. Binaries for the target platform will be installed;
* [`RuntimeDependency`](@ref): a JLL package that is necessary only at runtime. Its
artifact will not be installed in the prefix during the build.
* [`BuildDependency`](@ref) is a JLL package necessary only to build the current
package, but not to load it. This dependency will install binaries for the
target platforms and will not be added to the list of the dependencies of the
generated JLL package;
* [`HostBuildDependency`](@ref): similar to `BuildDependency`, but it will
install binaries for the host system. This kind of dependency is usually
added to provide some binary utilities to run during the build process.
The argument of `Dependency`, `RuntimeDependency`, `BuildDependency`, and
`HostBuildDependency` can also be a `Pkg.PackageSpec`, with which you can
specify more details about the dependency, like a version number, or also a
non-registered package. Note that in Yggdrasil only JLL packages in the
[General registry](https://github.com/JuliaRegistries/General) can be accepted.
The dependencies for the target system (`Dependency` and `BuildDependency`) will
be installed under `${prefix}` within the build environment, while the
dependencies for the host system (`HostBuildDependency`) will be installed under
`${host_prefix}`.
In the wizard, dependencies can be specified with the prompt: *Do you require
any (binary) dependencies? [y/N]*.
Examples of builders that depend on other binaries include:
* [`Xorg_libX11`](https://github.com/JuliaPackaging/Yggdrasil/blob/eb3728a2303c98519338fe0be370ef299b807e19/X/Xorg_libX11/build_tarballs.jl#L36-L42)
depends on `Xorg_libxcb_jll`, and `Xorg_xtrans_jll` at build- and run-time,
and on `Xorg_xorgproto_jll` and `Xorg_util_macros_jll` only at build-time.
### Platform-specific dependencies
By default, all dependencies are used for all platforms, but there are some
cases where a package requires some dependencies only on some platforms. You
can specify the platforms where a dependency is needed by passing the
`platforms` keyword argument to the dependency constructor, which is the vector
of `AbstractPlatforms` where the dependency should be used.
For example, assuming that the variable `platforms` holds the vector of the
platforms for which to build your package, you can specify that `Package_jl` is
required on all platforms excluding Windows one with
```julia
Dependency("Package_jll"; platforms=filter(!Sys.iswindows, platforms))
```
The information that a dependency is only needed on some platforms is
transferred to the JLL package as well: the wrappers will load the
platform-dependent JLL dependencies only when needed.
!!! warning
Julia's package manager doesn't have the concept of optional (or
platform-dependent) dependencies: this means that when installing a JLL
package in your environment, all of its dependencies will always be
installed as well in any case. It's only at runtime that platform-specific
dependencies will be loaded where necessary.
For the same reason, even if you specify a dependency to be not needed on
for a platform, the build recipe may still pull it in if that's also an
indirect dependency required by some other dependencies. At the moment
`BinaryBuilder.jl` isn't able to propagate the information that a dependency
is platform-dependent when installing the artifacts of the dependencies.
Examples:
* [`ADIOS2`](https://github.com/JuliaPackaging/Yggdrasil/blob/0528e0f31b55355df632c79a2784621583443d9c/A/ADIOS2/build_tarballs.jl#L122-L123)
uses `MPICH_jll` to provide an MPI implementations on all platforms excluding
Windows, and `MicrosoftMPI_jll` for Windows.
* [`GTK3`](https://github.com/JuliaPackaging/Yggdrasil/blob/0528e0f31b55355df632c79a2784621583443d9c/G/GTK3/build_tarballs.jl#L70-L104)
uses the X11 software stack only on Linux and FreeBSD platforms, and Wayland
only on Linux.
* [`NativeFileDialog`](https://github.com/JuliaPackaging/Yggdrasil/blob/0528e0f31b55355df632c79a2784621583443d9c/N/NativeFileDialog/build_tarballs.jl#L40-L44)
uses GTK3 only on Linux and FreeBSD, on all other platforms it uses system
libraries, so no other packages are needed in those cases.
### Version number of dependencies
There are two different ways to specify the version of a dependency, with two
different meanings:
* `Dependency("Foo_jll", v"1.2.3")`: the second argument of `Dependency`
specifies the version of the package to be used for building: this version *is
not* reflected into a compatibility bound in the project of the generated JLL
package. This is useful when the package you want to build is compatible with
all the versions of the dependency starting from the given one (and then you
don't want to restrict compatibility bounds of the JLL package), but to
maximize compatibility you want to build against the oldest compatible
version.
* `Dependency(PackageSpec(; name="Foo_jll", version=v"1.2.3"))`: if the package
is given as a `Pkg.PackageSpec` and the `version` keyword argument is given,
this version of the package is used for the build *and* the generated JLL
package will be compatible with the provided version of the package. This
should be used when your package is compatible only with a single version of
the dependency, a condition that you want to reflect also in the project of the
JLL package.
# Building and testing JLL packages locally
As a package developer, you may want to test JLL packages locally, or as a binary dependency
developer you may want to easily use custom binaries. Through a combination of `dev`'ing out
the JLL package and creating an `override` directory, it is easy to get complete control over
the local JLL package state.
## Overriding a prebuilt JLL package's binaries
After running `pkg> dev LibFoo_jll`, a local JLL package will be checked out to your depot's
`dev` directory (on most installations this is `~/.julia/dev`) and by default the JLL package
will make use of binaries within your depot's `artifacts` directory. If an `override`
directory is present within the JLL package directory, the JLL package will look within that
`override` directory for binaries, rather than in any artifact directory. Note that there is
no mixing and matching of binaries within a single JLL package; if an `override` directory is
present, all products defined within that JLL package must be found within the `override`
directory, none will be sourced from an artifact. Dependencies (e.g. found within another
JLL package) may still be loaded from their respective artifacts, so dependency JLLs must
themselves be `dev`'ed and have `override` directories created with files or symlinks
created within them.
### Auto-populating the `override` directory
To ease creation of an `override` directory, JLL packages contain a `dev_jll()` function,
that will ensure that a `~/.julia/dev/<jll name>` package is `dev`'ed out, and will copy the
normal artifact contents into the appropriate `override` directory. This will result in no
functional difference from simply using the artifact directory, but provides a template of
files that can be replaced by custom-built binaries.
Note that this feature is rolling out to new JLL packages as they are rebuilt; if a JLL
package does not have a `dev_jll()` function, [open an issue on Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil/issues/new)
and a new JLL version will be generated to provide the function.
## Building a custom JLL package locally
When building a new version of a JLL package, if `--deploy` is passed to
`build_tarballs.jl` then a newly-built JLL package will be deployed to a GitHub
repository. (Read the documentation in the [Command Line](@ref) section or
given by passing `--help` to a `build_tarballs.jl` script for more on `--deploy`
options). If `--deploy=local` is passed, the JLL package will still be built in
the `~/.julia/dev/` directory, but it will not be uploaded anywhere. This is
useful for local testing and validation that the built artifacts are working
with your package.
If you want to build a JLL package only for your current platform,
you can use `platforms = [HostPlatform()]` in the `build_tarball.jl` script.
You can also provide the target triplet `Base.BinaryPlatforms.host_triplet()`
if you run the script in the command line.
## Deploying local builds without recreating the tarballs
Sometimes all tarballs have already been created successfully locally but not
deployed to GitHub. This can happen, e.g., if it is tricky to figure out the
correct build script for all platforms, or if each platform build takes a long
time. In this case, it is possible to skip the build process and just deploy
the JLL package by providing the `--skip-build` flag to the `build_tarballs.jl`
script. Read the help (`--help`) for more information.
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 1937 | # Environment Variables
`BinaryBuilder.jl` supports multiple environment variables to modify its behavior globally:
* `BINARYBUILDER_AUTOMATIC_APPLE`: when set to `true`, this automatically agrees to the Apple macOS SDK license agreement, enabling the building of binary objects for macOS systems.
* `BINARYBUILDER_USE_SQUASHFS`: when set to `true`, this uses `.squashfs` images instead of tarballs to download cross-compiler shards. This consumes significantly less space on-disk and boasts a modest reduction in download size as well, but requires `sudo` on the local machine to mount the `.squashfs` images. This is the default when using the "privileged" runner.
* `BINARYBUILDER_RUNNER`: When set to a runner string, alters the execution engine that `BinaryBuilder.jl` will use to wrap the build process in a sandbox. Valid values are one of `"userns"`, `"privileged"` and `"docker"`. If not given, `BinaryBuilder.jl` will do its best to guess.
* `BINARYBUILDER_ALLOW_ECRYPTFS`: When set to `true`, this allows the mounting of rootfs/shard/workspace directories from within encrypted mounts. This is disabled by default, as at the time of writing, this triggers kernel bugs. To avoid these kernel bugs on a system where e.g. the home directory has been encrypted, set the `BINARYBUILDER_ROOTFS_DIR` and `BINARYBUILDER_SHARDS_DIR` environment variables to a path outside of the encrypted home directory.
* `BINARYBUILDER_USE_CCACHE`: When set to `true`, this causes a `/root/.ccache` volume to be mounted within the build environment, and for the `CC`, `CXX` and `FC` environment variables to have `ccache` prepended to them. This can significantly accelerate rebuilds of the same package on the same host. Note that `ccache` will, by default, store 5G of cached data.
* `BINARYBUILDER_NPROC`: Overrides the value of the environment variable `${nproc}` set during a build, see [Automatic environment variables](@ref).
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
|
[
"MIT"
] | 0.6.0 | 41afbecf4115126df89a3bed12ec51a0bcacd4ac | docs | 11056 | # BinaryBuilder.jl
The purpose of the [`BinaryBuilder.jl`](https://github.com/JuliaPackaging/BinaryBuilder.jl) Julia package is to provide a system for compiling 3rd-party binary dependencies that should work anywhere the [official Julia distribution](https://julialang.org/downloads) does. In particular, using this package you will be able to compile your large pre-existing codebases of C, C++, Fortran, Rust, Go, etc... software into binaries that can be downloaded and loaded/run on a very wide range of machines. As it is difficult (and often expensive) to natively compile software packages across the growing number of platforms that this package will need to support, we focus on providing a set of Linux-hosted cross-compilers. This package will therefore set up an environment to perform cross-compilation for all of the major platforms, and will do its best to make the compilation process as painless as possible.
Note that at this time, BinaryBuilder itself runs on Linux `x86_64` and macOS `x86_64` systems only, with Windows support under active development. On macOS and Windows, you must have `docker` installed as the backing virtualization engine. Note that Docker Desktop is the recommended version; if you have Docker Machine installed it may not work correctly or may need additional configuration.
!!! warn
This package currently requires Julia v1.7. Contribute to [JuliaPackaging/JLLPrefixes.jl#6](https://github.com/JuliaPackaging/JLLPrefixes.jl/issues/6) if you care about supporting newer versions of Julia.
## Project flow
Suppose that you have a Julia package `Foo.jl` which wants to use a compiled `libfoo` shared library. As your first step in writing `Foo.jl`, you may compile `libfoo` locally on your own machine with your system compiler, then using `Libdl.dlopen()` to open the library, and `ccall()` to call into the exported functions. Once you have written your C bindings in Julia, you will naturally desire to share the fruits of your labor with the rest of the world, and this is where `BinaryBuilder` can help you. Not only will `BinaryBuilder` aid you in constructing compiled versions of all your dependencies, but it will also build a wrapper Julia package (referred to as a [JLL package](jll.md)) to aid in installation, versioning, and build product localization.
The first step in the `BinaryBuilder` journey is to create a build recipe, usually named `build_tarballs.jl`. The Julia community curates a tree of build recipes, [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil), that already contains many examples of how to write a `build_tarballs.jl` file. These files contain information such as the name, version and source locations for a particular build, as well as the actual steps (in the form of a `bash` script) and the products that should be generated by the build.
The result of a successful build is an autogenerated JLL package, typically uploaded to the [JuliaBinaryWrappers](https://github.com/JuliaBinaryWrappers/) github organization. Binaries for each version of every build are uploaded to the GitHub releases page of the relevant JLL package. Finally, a registration request is opened against the `General` Julia registry, so that packages such as the aforementioned `Foo.jl` can simply `pkg> add libfoo_jll` to download the binary artifacts as well as the autogenerated Julia wrapper code. See also the [FAQ](FAQ.md), [build tips](build_tips.md), [build troubleshooting](troubleshooting.md) and [tricksy gotchas](tricksy_gotchas.md) for help with common problems.
### Wizard interface
`BinaryBuilder.jl` supports an interactive method for building the binary dependencies and capturing the commands used to build it into a `build_tarballs.jl` file: the Wizard interface. To launch it, run
```julia
using BinaryBuilder
state = BinaryBuilder.run_wizard()
```
and follow the instructions on-screen. You can watch an [asciinema demo](https://asciinema.org/a/304105) of the use of the wizard.
### Manually create or edit `build_tarballs.jl`
The wizard is a great tool, especially to get started with BinaryBuilder and create your first simple recipes for new packages. However, it lacks the full control of all options you can use in a `build_tarballs.jl` script. To generate this file (explained in greater detail in [Building Packages](building.md)), one can clone `Yggdrasil`, copy an existing build recipe, modify it, and submit a new pull request. Manually editing the `build_tarballs.jl` script is also the recommended way when you want to update an existing recipe, rather then starting from scratch with the wizard.
The `build_tarballs.jl` script can be used as a command line utility, it takes a few options and as argument the list of triplets of the targets. You can find more information about the syntax of the script in the [Command Line](@ref) section or by running
```
julia build_tarballs.jl --help
```
You can build the tarballs with
```
julia build_tarballs.jl --debug --verbose
```
The `--debug` option will drop you into the BinaryBuilder interactive shell if an error occurs. If the build fails, after finding out the steps needed to fix the build you have to manually update the script in `build_tarballs.jl`. You should run again the above command to make sure that everything is actually working.
Since `build_tarballs.jl` takes as argument the comma-separated list of [triplets](@ref Platforms) for which to build the tarballs, you can select only a few of them. For example, with
```
julia build_tarballs.jl --debug --verbose aarch64-linux-musl,arm-linux-musleabihf
```
you'll run the build script only for the `aarch64-linux-musl` and `arm-linux-musleabihf` target platforms.
If you decide to use this workflow, however, you will need to manually open pull requests for [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil/).
### GitHub Codespaces
If you already have access to the [GitHub Codespaces](https://github.com/features/codespaces) service, you can use BinaryBuilder and all the workflows described above in your browser or with Visual Studio Code, on any operating system, including those not natively supported by the package! Head to [Yggdrasil](https://github.com/JuliaPackaging/Yggdrasil/) and create a new Codespace.
## How does this all work?
`BinaryBuilder.jl` wraps a [root filesystem](rootfs.md) that has been carefully constructed so as to provide the set of cross-compilers needed to support the wide array of platforms that Julia runs on. This _RootFS_ is then used as the chroot jail for a sandboxed process which runs within the RootFS as if that were the whole world. The workspace containing input source code and (eventually) output binaries is mounted within the RootFS and environment variables are setup such that the appropriate compilers for a particular target platform are used by build tools.
## Reproducibility
> [Reproducible builds](https://reproducible-builds.org/) are a set of software development practices that create an independently-verifiable path from source to binary code.
`BinaryBuilder.jl` puts into place many of the practices needed to achieve reproducible builds.
For example, the building environment is sandboxed and uses a fixed tree structure, thus having a reproducible [build path](https://reproducible-builds.org/docs/build-path/).
The toolchain used by `BinaryBuilder.jl` also sets some [environment variables](https://reproducible-builds.org/docs/source-date-epoch/) and enforces [certain compiler flags](https://reproducible-builds.org/docs/randomness/) which help reproducibility.
While `BinaryBuilder.jl` does not guarantee to always have reproducible builds, it achieves this goal in most cases.
Reproducibility in `BinaryBuilder.jl` includes also the generated tarballs: they are created with [`Tar.jl`](https://github.com/JuliaIO/Tar.jl), which takes [a few measures](https://github.com/JuliaIO/Tar.jl/blob/1de4f92dc1ba4de4b54ac5279ec1d84fb15948f6/README.md#reproducibility) to ensure reproducibility of tarballs with the same git tree hash.
If you rebuild multiple times the same package, with the same version of BinaryBuilder, the generated tarball which contains the main products (i.e. not the log files which are known not to be reproducible) should always have the same git tree hash and SHA256 sum, information which are printed to screen at the end of the build process and stored in the `Artifacts.toml` file of the [JLL package](@ref JLL-packages).
There are however some caveats:
* reproducibility can only be expected when using the toolchain offered by `BinaryBuilder.jl`;
* there are [very specific cases](https://github.com/JuliaPackaging/BinaryBuilder.jl/issues/1230) where the macOS C/C++ toolchain does not produce reproducible binaries.
This happens when doing debug builds (`-g` flag) _and_ not building object files with deterministic names separately (e.g. if directly building and linking a program or a shared library from the source file, letting the compiler create the intermediate object files automatically with random names).
We have decided not to take action for this case because in practice most packages use build systems which compile intermediate object files with deterministic names (which is also the only way to take advantage of `ccache`, which `BinaryBuilder.jl` uses extensively) and typically do not do debug builds, thus sidestepping the issue entirely.
## Videos and tutorials
BinaryBuilder has been covered in some videos, you may want to check them out if you want to know more about the framework (the date is specified in parentheses, to make it clear how old/new the videos are):
* [10 tips on how to build better binaries](https://www.youtube.com/watch?v=2e0PBGSaQaI): JuliaCon 2018 talk by Elliot Saba introducing an early version of BinaryBuilder (2018-08-09)
* [Introduction to BinaryBuilder.jl](https://www.youtube.com/watch?v=d_h8C4iCzno): live building session by Mosè Giordano (2020-04-10)
* [BinaryBuilder.jl - The Subtle Art of Binaries That Just Work](https://www.youtube.com/watch?v=3IyXsBwqll8): JuliaCon 2020 workshop by Elliot Saba and Mosè Giordano to guide users through the use of BinaryBuilder (2020-07-25)
* [Your first BinaryBuilder.jl recipe with Julia](https://www.youtube.com/watch?v=7fkNcdbt4dg): live building by Miguel Raz Guzmán Macedo (2021-04-07)
* [BinaryBuilder.jl — The Subtle Art of Binaries That "Just Work"](https://bbb.dereferenced.org/playback/presentation/2.3/75a49eebcb63d6fee8c55417ea7cc51768d86f3d-1621065511930): AlpineConf 2021 talk by Elliot Saba and Mosè Giordano, starts at 4:19:00 (2021-05-15)
* [BinaryBuilder.jl — Using Julia's Pkg to deliver binary libraries](https://www.youtube.com/watch?v=S__x3K31qnE): PackagingCon 2021 talk by Mosè Giordano & Elliot Saba (2021-11-10)
* [BinaryBuilder.jl: distributing binary libraries for Julia packages](https://www.youtube.com/watch?v=N9H7KNftFJA): talk at [JuliaHEP 2023 Workshop](https://indico.cern.ch/event/1292759/contributions/5613062/) by Mosè Giordano (2023-11-07)
| BinaryBuilder | https://github.com/JuliaPackaging/BinaryBuilder.jl.git |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.