licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 253 | import Base: ==, -, *, +, append!, convert, copy, delete!, deleteat!,
firstindex, getindex, hash, in, isempty, isequal, lastindex, length,
push!, merge, merge!, read, setindex!, show, size, sizeof, sort!,
sort, summary, write
import DSP: filtfilt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1064 | # adapted from https://github.com/JuliaMath/KahanSummation.jl
function cumsum_kbn!(v::AbstractVector{T}) where T<:AbstractFloat
s = v[1]
c = zero(T)
@inbounds for i = 2:length(v)
vi = v[i]
t = s + vi
if abs(s) >= abs(vi)
c += ((s-t) + vi)
else
c += ((vi-t) + s)
end
s = t
v[i] = s+c
end
return nothing
end
function diff_x!(x::Array{T,1}, gaps::Array{Int64,1}, fs::T) where T<:AbstractFloat
L = length(x)
j = 1
@inbounds while j < length(gaps)
j += 1
si = gaps[j-1]+1
ei = gaps[j]-1
if j == length(gaps)
ei += 1
end
for i = ei:-1:si
x[i] -= x[i-1]
end
end
rmul!(x, fs)
return nothing
end
function int_x!(x::Array{T,1}, gaps::Array{Int64,1}, δ::T) where T<:AbstractFloat
L = length(x)
j = 1
@inbounds while j < length(gaps)
j += 1
si = gaps[j-1]
ei = gaps[j]-1
if j == length(gaps)
ei += 1
end
xv = view(x, si:ei)
cumsum_kbn!(xv)
end
rmul!(x, δ)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3328 | export ls, regex_find, safe_isdir, safe_isfile
# safe_isfile, safe_isdir adapted from https://github.com/JuliaPackaging/BinaryProvider.jl/commit/08a314a225206a68665c6f730d7c3feeda1ba615
# Temporary hack around https://github.com/JuliaLang/julia/issues/26685
function safe_isfile(path::String)
try
return isfile(path)
catch err
return false
end
end
function safe_isdir(path::String)
try
return isdir(path)
catch err
return false
end
end
"""
find_regex(path::String, r::Regex)
OS-agnostic equivalent to Linux `find`. First argument is a path string, second is a Regex. File strings are postprocessed using Julia's native PCRE Regex engine.
"""
function regex_find(path::String, r::Regex)
path = realpath(path)
if Sys.iswindows()
s = filter(x -> !(isempty(x) || x == path),
String.(split(read(
`powershell -Command "(Get-ChildItem -Path $path -File -Force -Recurse).FullName"`,
String), "\r\n"))
)
s = [replace(i, Base.Filesystem.pathsep() => "/") for i in s]
s2 = s[findall([occursin(r, f) for f in s])]
else
s = filter(x -> !(isempty(x) || x == path),
String.(split(read(
`sh -c "find $path -type f"`,
String), "\n"))
)
s2 = String[]
m = length(path) + 2
for (i,f) in enumerate(s)
s1 = f[m:end]
if occursin(r, s1)
push!(s2, f)
end
end
end
# Julia doesn't seem to handle regex searches in shell
return sort(s2)
end
@doc """
ls(str::String)
Similar functionality to Bash ls -1 with OS-agnostic output. Accepts wildcards.
Always returns full path and file name.
ls()
Return full path and file name of files in current working directory.
""" ls
function ls(s::String)
safe_isfile(s) && return [realpath(s)]
safe_isdir(s) && return [joinpath(realpath(s), i) for i in readdir(s)]
(p,f) = splitdir(s)
if any([occursin(i, s) for i in regex_chars]) || occursin("*", p) || f == "*"
# We're actually going to start at the highest-level directory that is
# uniquely specified, so rather than starting at p from splitdir...
fpat = String.(split(s, "*"))
path, ff = splitdir(fpat[1])
if isempty(ff)
popfirst!(fpat)
#= ...but this can leave us with an empty fpat and regex for that
isn't standardized, so... =#
if isempty(fpat) || fpat == [""]
fpat = [".*"]
end
else
fpat[1] = ff
end
# In case of empty path ... ?
if isempty(path)
path = "."
end
# So we're going to check for matches on all but the first m of each string:
if Sys.iswindows()
for i = 1:length(fpat)
fpat[i] = replace(fpat[i], "." => "\\.")
end
end
ff = join(fpat, ".*")
mpat = Regex(ff * "\$")
s1 = regex_find(path, mpat)
if s1 == nothing
s1 = []
end
else
s1 = try
glob(f,p)
catch
String[]
end
# DND DND DND DND DND
# Control for odd behavior of glob in Linux
if !(isempty(s1))
if isempty(p); p = "." ; end
for (i,s) in enumerate(s1)
f = splitdir(s)[2]
s1[i] = joinpath(realpath(p), f)
end
end
# DND DND DND DND DND
end
return s1
end
ls() = [joinpath(realpath("."), i) for i in readdir(pwd())]
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1758 | export namestrip, namestrip!
#= Strips chars below 0x20 plus these:
# "Filenames" => ['<', '>', ':', '\"', '/', '\\', '|', '?', '*', '^', '$', '@',
'~', '\x7f']
# "SEED" => ['.', '\x7f'] # the period is the SEED field separator
# "HTML" => ['"', '', '&', ';', '<', '>' , '©', '\x7f']
# Markdown => ['!', '#', '(', ')', '*', '+', '-', '.', '[', '\\', ']', '_', '`', '{', '}']
# "Safe" => ['!', '"', '#', '\$', '%', '&', '\'', '*', '.', '/', ':', ';', '<',
'>', '?', '@', '\\', '^', '{', '|', '}', '~', '©', '\x7f']
# "Julia" => ['\$', '\\', '\x7f']
# "Strict" => [' ', '!', '"', '#', '$', '%', '&', '\'', '(', ')', '*', '+', ',',
'-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '\\', '^', '{',
'|', '}', '~', '\x7f']
=#
@doc """
namestrip(s::String, convention::String="File")
Remove bad characters from S. Specify one of the following conventions:
* "File" => ['<', '>', ':', '\"', '/', '\\', '|', '?', '*', '^', '\$', '@', '~', '\x7f']
* "HTML" => ['"', '', '&', ';', '<', '>' , '©', '\x7f']
* "Julia" => ['\$', '\\', '\x7f']
* "Markdown" => ['!', '#', '(', ')', '*', '+', '-', '.', '[', '\\', ']', '_', '`', '{', '}']
* "SEED" => ['.', '\x7f']
* "Strict" => [' ', '!', '"', '#', '\$', '%', '&', '\'', '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '?', '@', '\\', '^', '{', '|', '}', '~', '\x7f']
""" namestrip
function namestrip(str::String, convention::String="File")
chars = UInt8.(codeunits(str))
deleteat!(chars, chars.<0x20) # strip non-printing ASCII
if haskey(bad_chars, convention)
deleteat!(chars, [c in bad_chars[convention] for c in chars])
else
deleteat!(chars, [c in bad_chars["File"] for c in chars])
end
return String(chars)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2196 | # Faster than Polynomials.jl with less memory allocation + single-point type
# stability; adapted from Octave and
# https://github.com/JuliaMath/Polynomials.jl/blob/master/src/Polynomials.jl
function poly(x::Array{T,1}) where T <: Number
n = length(x)
y = zeros(T, n+1)
y[1] = one(T)
for j = 1:n
y[2:j+1] .-= x[j].*y[1:j]
end
return y
end
function polyval(p::Array{T1,1}, x::T2) where {T1 <: Number, T2 <: Number}
y = T2(p[1])
for i = 2:lastindex(p)
y = p[i] .+ x*y
end
return y
end
function polyval(p::Array{T1,1}, x::Array{T2,1}) where {T1 <: Number, T2 <: Number}
y = ones(T2, length(x)) .* p[1]
for i = 2:length(p)
broadcast!(*, y, y, x)
broadcast!(+, y, y, p[i])
end
return y
end
function polyfit(x::Array{T1,1}, y::Array{T2,1}, n::Integer=1) where {T1 <: Real, T2 <: Real}
nx = length(x)
nx == length(y) || error("SeisIO.polyfit requires length(t) == length(x)")
-1 < n < nx || throw(DomainError)
A = Array{T2, 2}(undef, length(x), n+1)
A[:,n+1] .= one(T2)
for i = n:-1:1
A[:,i] .= A[:,i+1] .* x
end
return A \ y
end
# Convert to Float64 or use improved sum
function linreg(t::Array{Float64,1}, x::AbstractArray{T,1}) where T
n = length(t)
st = sum(t)
sx = sum(x)
stt = dot(t,t)
stx = dot(t,x)
sxx = dot(x,x)
d = n*stt - st*st
b = (stt*sx - st*stx)/d
a = (n*stx - st*sx)/d
return T[a,b]
end
# p = linreg(x, fs)
function linreg(x::AbstractArray{T,1}, dt::Float64) where T
n = length(x)
t = (1:n)*dt
st = sum(t)
sx = sum(x)
stt = dot(t,t)
stx = dot(t,x)
sxx = dot(x,x)
d = n*stt - st*st
b = (stt*sx - st*stx)/d
a = (n*stx - st*sx)/d
return T[a,b]
end
#= CHANGELOG for this file
2019-08-08
* poly, polyval, and polyfit should now always output powers in descending
order, i.e., p^n ... p^0
* BUG fixed: polyval(p::Array{T,1}, x::T) used power ordering of Polynomials.jl;
corrected to be consistent with other routines
* polyfit now allows order n=0 and takes any Integer for n; n=0 returns the mean
2019-08-19
* added linreg for low-memory linear regression; identical to SAC detrend
2019-09-03
* bug fix for rare situation where float precision led to length(t) != length(x)
=#
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 875 | function get_svn(url::String, dest::String)
isdir(dest) && (println(dest * " exists; not downloading."); return)
println("dowloading via SVN...")
status = 1
if Sys.iswindows()
status = (try
p = run(`cmd /c svn export $url $dest`)
0
catch err
@warn(string("error thrown: ", err))
1
end)
else
status = (try
p = run(`svn export $url $dest`)
p.exitcode
catch err
@warn(string("error thrown: ", err))
1
end)
end
if status != 0
err_string = "download failed. Is a command-line SVN client installed?
(type \"run(`svn --version`)\"; if an error occurs, SVN isn't installed.)
Subversion for Ubuntu: sudo apt install subversion
Subversion for OS X: pkg_add subversion
SlikSVN Windows client: https://sliksvn.com/download/
"
error(err_string)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 13453 | export d2u, j2md, md2j, parsetimewin, timestamp, u2d, timespec
# =====================================================================
# Time stamping
function tstr(t::DateTime)
Y, M, D, h, m, s, μ = year(t), month(t), day(t), hour(t), minute(t), second(t), millisecond(t)
Y = lpad(Y, 4, "0")
M = lpad(M, 2, "0")
D = lpad(D, 2, "0")
h = lpad(h, 2, "0")
m = lpad(m, 2, "0")
s = lpad(s, 2, "0")
return string(Y, "-", M, "-", D, "T", h, ":", m, ":", s)
end
"Alias to Dates.unix2datetime"
u2d(k::Real) = Dates.unix2datetime(k)
"Alias to Dates.datetime2unix"
d2u(k::DateTime) = Dates.datetime2unix(k)
@doc """
timestamp()
Return current time formatted YYYY-mm-ddTHH:MM:SS.
""" timestamp
timestamp() = tstr(Dates.unix2datetime(time()))
timestamp(t::DateTime) = tstr(t)
timestamp(t::Real) = tstr(u2d(t))
timestamp(t::String) = tstr(Dates.DateTime(t))
tnote(s::String) = string(timestamp(), " ¦ ", s)
# =====================================================================
# Time and date converters
"""
m,d = j2md(y,j)
Convert Julian day j of year y to month m, day d
"""
function j2md(y::T, j::T) where T<:Integer
if T != Int32
y = Int32(y)
j = Int32(j)
end
z = zero(Int32)
o = one(Int32)
m = z
d = o
if j > Int32(31)
leapyear = ((j > 59) && ((y % Int32(400) == z) || (y % Int32(4) == z && y % Int32(100) != z)))
while j > z
d = j
m += o
j -= days_per_month[m]
if leapyear && m == 2
j -= o
end
end
else
m = o
d = j
end
return m,d
end
"""
j = md2j(y, m, d)
Convert month `m`, day `d` of year `y` to Julian day (day of year)
"""
function md2j(y::T, m::T, d::T) where T<:Integer
j = zero(Int32)
if T != Int32
y = Int32(y)
m = Int32(m)
d = Int32(d)
end
z = zero(Int32)
i = one(Int8)
while i < m
j = j+getindex(days_per_month, i)
i = i+1
end
j = j+d
if m > 2 && ((y % Int32(400) == z) ||
(y % Int32(4) == z &&
y % Int32(100) != z))
j = j+1
end
return T(j)
end
md2j(y::AbstractString, m::AbstractString, d::AbstractString) = md2j(parse(Int, y), parse(Int, m), parse(Int, d))
@doc """
t_arr!(B::Array{Int32,1}, t::Int64)
Convert `t` to [year, day of year, hour, minute, second, frac_second], overwriting the first 6 values in `B` with the result.
""" t_arr!
function t_arr!(tbuf::Array{Int32, 1}, t::Int64)
dt = u2d(t*μs)
tbuf[1] = Int32(year(dt))
tbuf[2] = md2j(tbuf[1], Int32(month(dt)), Int32(day(dt)))
tbuf[3] = Int32(hour(dt))
tbuf[4] = Int32(minute(dt))
tbuf[5] = Int32(second(dt))
tbuf[6] = Int32(millisecond(dt))
return nothing
end
function unpack_u8(v::UInt8)
a = signed(div(v,0x10))*Int8(10)
b = signed(rem(v,0x10))
return Int64(a+b)
end
function datehex2μs!(a::Array{Int64,1}, datehex::Array{UInt8,1})
a[1] = 100*unpack_u8(getindex(datehex, 1)) + unpack_u8(getindex(datehex, 2))
a[2] = md2j(getindex(a,1), unpack_u8(getindex(datehex, 3)), unpack_u8(getindex(datehex, 4))) - 1
setindex!(a, y2μs(getindex(a,1)), 1)
a[3] = unpack_u8(getindex(datehex, 5))
a[4] = unpack_u8(getindex(datehex, 6))
a[5] = unpack_u8(getindex(datehex, 7))
a[6] = unpack_u8(getindex(datehex, 8))
return a[1] + a[2]*86400000000 + a[3]*3600000000 + a[4]*60000000 + a[5]*1000000 + a[6]*10000
end
function y2μs(y::T) where T<:Integer
y = Int64(y)-1
return 86400000000 * (y*365 + div(y,4) - div(y,100) + div(y,400)) - 62135596800000000
end
# ts = round(Int64, d2u(DateTime(iv[1], m, d, iv[3], iv[4], iv[5], iv[6]))*sμ
mktime(y::T, j::T, h::T, m::T, s::T, μ::T) where T<:Integer = (y2μs(y) +
Int64(j-one(T))*86400000000 +
Int64(h)*3600000000 +
Int64(m)*60000000 +
Int64(s)*1000000 +
Int64(μ))
mktime(t::Array{T,1}) where T<:Integer =(y2μs(t[1]) +
Int64(t[2]-one(T))*86400000000 +
Int64(t[3])*3600000000 +
Int64(t[4])*60000000 +
Int64(t[5])*1000000 +
Int64(t[6]))
# =====================================================================
# TimeSpec parsing
"""
(str0, str1) = parsetimewin(ts1::TimeSpec, ts2::TimeSpec)
Convert times `s` and `t` to strings and sorts s.t. d0 < d1.
See also: `TimeSpec`
"""
function parsetimewin(s::DateTime, t::DateTime)
if s < t
return (string(s), string(t))
else
return (string(t), string(s))
end
end
parsetimewin(s::DateTime, t::String) = parsetimewin(s, DateTime(t))
parsetimewin(s::DateTime, t::Real) = parsetimewin(s, u2d(d2u(s)+t))
parsetimewin(s::Real, t::DateTime) = parsetimewin(t, u2d(d2u(t)+s))
parsetimewin(s::String, t::Union{Real,DateTime}) = parsetimewin(DateTime(s), t)
parsetimewin(s::Union{Real,DateTime}, t::String) = parsetimewin(s, DateTime(t))
parsetimewin(s::String, t::String) = parsetimewin(DateTime(s), DateTime(t))
parsetimewin(s::Real, t::Real) = parsetimewin(u2d(60*floor(Int, time()/60) + s), u2d(60*floor(Int, time()/60) + t))
# convert a formatted time string to integer μs from the Unix epoch
function tstr2int(s::String)
str = split(s, ".", limit=2)
if length(str) < 2
μ = 0
else
μ = parse(Int64, rpad(str[2], 6, '0'))
end
return DateTime(str[1]).instant.periods.value*1000 - dtconst + μ
end
# convert a time in integer μs (measured from the Unix epoch) to a string
function int2tstr(t::Int64)
dt = unix2datetime(div(t, 1000000))
v = 1000*getfield(getfield(getfield(dt, :instant), :periods), :value)
r = string(t - v + dtconst)
s = string(dt) * "." * lpad(r, 6, '0')
return s
end
# =====================================================================
# Functions for SeisIO time matrices (:t field)
# Check whether a time matrix has gaps
function is_gapless(t::Array{Int64, 2})
(length(t) == 4) || return false
# Definition of a well-formed time matrix with no gaps
return ((t[1,1] == 1) && (t[2,1] > 1) && (t[2,2] == 0))
end
function mk_t(nx::Integer, ts::Int64)
t = Array{Int64, 2}(undef, 2, 2)
setindex!(t, one(Int64), 1)
setindex!(t, nx, 2)
setindex!(t, ts, 3)
setindex!(t, zero(Int64), 4)
return t
end
"""
tx = t_expand(t::Array{Int64,2}, fs::Float64)
Expand SeisIO time matrix `t` for data sampled at `fs` Hz. If `x` is a data
vector whose sample times are represented by `t`, then `tx` is a vector of
sample times where `tx[i]` is the sample time of `x[i]`.
"""
function t_expand(t::Array{Int64,2}, fs::Float64)
fs == 0.0 && return t[:,2]
t[end,1] == 1 && return [t[1,2]]
dt = round(Int64, 1.0/(fs*μs))
tt = dt.*ones(Int64, t[end,1])
tt[1] -= dt
for i = 1:size(t,1)
tt[t[i,1]] += t[i,2]
end
cumsum!(tt, tt)
return tt
end
"""
t = t_collapse(tx::Array{Int64, 1}, fs::Float64)
Collapse vector of sample times `tx` sampled at `fs` Hz to compact SeisIO time
matrix representation `t`.
"""
function t_collapse(tt::Array{Int64,1}, fs::Float64)
if fs == 0.0
t = hcat(collect(1:1:length(tt)), tt)
else
dt = round(Int64, 1.0/(fs*μs))
ts = Array{Int64,1}([dt; diff(tt)::Array{Int64,1}])
L = length(tt)
i = findall(ts .!= dt)
t = Array{Int64,2}([[1 tt[1]];[i ts[i].-dt]])
if isempty(i) || i[end] != L
t = vcat(t, hcat(L,0))
end
end
return t
end
function t_bounds(T::Array{Int64,2}, Δ::Int64)
tmin = typemax(Int64)
tmax = typemin(Int64)
isempty(T) && return(tmin, tmax)
t0 = 0
t1 = 0
n = size(T,1)-1
if T[n+1,2] != 0
T = vcat(T, [T[n+1,1] 0])
n += 1
end
w0 = -(Δ)
for i = 1:n
t0 = T[i,2] + w0 + Δ
t1 = t0 + Δ*(T[i+1,1]-T[i,1]-1)
w0 = t1
if t0 < tmin
tmin = t0
end
if t1 > tmax
tmax = t1
end
end
t1 += Δ
if t1 > tmax
tmax = t1
end
return (tmin, tmax)
end
function t_win(T::Array{Int64,2}, Δ::Int64)
isempty(T) && return(T)
n = size(T,1)-1
if T[n+1,2] != 0
T = vcat(T, [T[n+1,1] 0])
n += 1
end
w0 = -(Δ)
W = Array{Int64,2}(undef,n,2)
for i = 1:n
W[i,1] = T[i,2] + w0 + Δ
W[i,2] = W[i,1] + Δ*(T[i+1,1]-T[i,1]-1)
w0 = W[i,2]
end
W[n,2] += Δ
return W
end
t_win(T::Array{Int64,2}, fs::Float64) = t_win(T, round(Int64, sμ/fs))
function w_time(W::Array{Int64,2}, Δ::Int64)
n = size(W,1)+1
T = Array{Int64,2}(undef,n,2)
T[1,1] = Int64(1)
T[1,2] = W[1,1]
for i = 2:n-1
T[i,1] = T[i-1,1] + div(W[i-1,2]-W[i-1,1], Δ) + 1
T[i,2] = W[i,1] - W[i-1,2] - Δ
end
T[n,1] = T[n-1,1] + div(W[n-1,2]-W[n-1,1], Δ)
T[n,2] = 0
if T[n,1] == T[n-1,1]
T = T[1:n-1,:]
end
return T
end
w_time(W::Array{Int64,2}, fs::Float64) = w_time(W, round(Int64, sμ/fs))
# Sort based on start of each time window
function sort_segs!(W::Array{Int64, 2})
(size(W, 1) < 2) && return
j = sortperm(W[:,1])
d = diff(j)
if (maximum(d) > 1) || (minimum(d) < 1)
W .= W[j,:]
end
return nothing
end
function sort_segs(t::Array{Int64, 2}, Δ::Int64)
is_gapless(t) && return
W = t_win(t, Δ)
sort_segs!(W)
return w_time(W, Δ)
end
"""
te = endtime(t::Array{Int64,2}, Δ::Int64)
Compute the time of the last sample in `t`, a SeisIO time matrix sampled at
interval `Δ` [μs] or frequency `fs` [Hz]. Output is integer μs measured from
the Unix epoch.
"""
function endtime(t::Array{Int64,2}, Δ::Int64)
if isempty(t)
t_end = 0
elseif is_gapless(t)
t_end = t[1,2] + (t[2,1]-1)*Δ
elseif minimum(t) < 0
t_end = t_bounds(t, Δ)[2]
else
L = size(t,1)
t_end = (t[L,1]-1)*Δ
if L > 2
t_end += getindex(sum(t, dims=1),2)
else
t_end += t[1,2] + t[2,2]
end
end
return t_end
end
function endtime(t::Array{Int64,2}, fs::Float64)
if fs == 0.0
return isempty(t) ? 0 : t[size(t,1), 2]
else
return endtime(t, round(Int64, 1.0/(fs*μs)))
end
end
"""
ts = starttime(t::Array{Int64,2}, Δ::Int64)
Compute the time of the first sample in SeisIO time matrix `t`, sampled at
interval `Δ` [μs] or frequency `fs` [Hz]. Output is integer μs measured from
the Unix epoch.
"""
function starttime(t::Array{Int64,2}, Δ::Int64)
return (if isempty(t)
0
elseif minimum(t) < 0
t_bounds(t, Δ)[1]
else
t[1,2]
end)
end
function starttime(t::Array{Int64,2}, fs::Float64)
if fs == 0.0
return isempty(t) ? 0 : minimum(t, dims=1)[2]
else
return starttime(t, round(Int64, sμ/fs))
end
end
#=
Change to t_extend 2020-03-06
Bad behavior in old function for nx == 0 && nt > 0:
if nx == 0
check for gap
if gap
increment T[end,2] by δt
Extending a channel with nx=0 would change gap at end but not add samples.
This should never happen.
=#
"""
t_extend(T::Array{Int64,2}, t_new::Int64, n_new::Int64, Δ::Int64)
Extend SeisIO time matrix *T* sampled at interval *Δ* μs or frequency *fs* Hz. For matrix *Tᵢ*:
* *t_new* is the start time of the next segment in data vector *Xᵢ*
* *n_new* is the expected number of samples in the next segment of *Xᵢ*
`check_for_gap!` acts as a more specfic case of `t_extend` that operates on a
GphysData structure where `n_new` is known and no time gaps are possible
in the new segment.
This function has a mini-API in the time API (/docs/DevGuides/time.md).
See also: `check_for_gap!`
"""
function t_extend(T::Array{Int64,2}, ts::Integer, nx::Integer, Δ::Int64)
nt = div(length(T), 2)
n0 = 0
# channel has some data already
if nt > 0
(nx == 0) && (return nothing)
n = T[nt, 1]
t0 = endtime(T, Δ)
δt = ts - t0 - Δ
if abs(δt) > div(Δ, 2)
if nx == 1
if T[nt, 2] == 0
# Case 3: normal, no gap from x[n] to x[n+1], nx=1
T1 = copy(T)
T1[nt, 1] = n+nx
T1[nt, 2] = δt
return T1
else
# Case 4: abnormal (gap before x[n]), gap from x[n] to x[n+1], nx=1
return vcat(T, [n+1 δt])
end
elseif T[nt, 2] == 0
# Case 5: normal, gap from x[n] to x[n+1], nx>1
T1 = vcat(T, [n+nx 0])
T1[nt, 1] = n+1
T1[nt, 2] = δt
return T1
else
# Case 6: abnormal (gap before x[n]), gap from x[n] to x[n+1], nx>1
return vcat(T, [n+1 δt; n+nx 0])
end
elseif T[nt, 2] > 0
# Case 2: abnormal (gap before x[n]), no gap from x[n] to x[n+1]
return vcat(T, [n+nx 0])
else
# Case 1: normal (no gap before x[n]), no gap from x[n] to x[n+1]
setindex!(T, n+nx, nt)
return nothing
end
# extend t to end at ts (counterintuitive syntax)
elseif nx == 0
return mk_t(nx, ts)[1:1,:]
# I really don't like how this behaves...is it even used anymore?
# behavior for a new channel
else
return mk_t(nx, ts)
end
end
# Change 2020-03-07: Account for fs = 0.0
function t_extend(T::Array{Int64,2}, ts::Integer, nx::Integer, fs::Float64)
T1 = (if fs == 0.0
nt = div(length(T), 2)
T2 = zeros(Int64, nx, 2)
T2[:,1].=nt.+(1:nx)
T2[1,2] = ts
vcat(T, T2)
else
t_extend(T, ts, nx, round(Int64, sμ/fs))
end)
return T1
end
function tx_float(t::Array{Int64,2}, fs::Float64)
fs == 0.0 && return map(Float64, t[:,2])
t[end,1] == 1 && return Float64[t[1,2]]
Nt = t[end,1]
dt = 1.0/fs
tt = dt.*ones(Float64, Nt)
tt[1] -= dt
for i = 2:size(t,1)
tt[t[i,1]] += t[i,2]
end
cumsum!(tt, tt)
return tt
end
function x_inds(t::Array{Int64,2})
nt = size(t, 1)-1
inds = zeros(Int64, nt, 2)
for i in 1:nt
inds[i,1] = t[i,1]
inds[i,2] = t[i+1,1] - (i == nt ? 0 : 1)
end
if t[nt+1,2] != 0
inds[nt,2] -= 1
inds = vcat(inds, [t[nt+1,1] t[nt+1,1]])
end
return inds
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5037 | # allowed values in misc: char, string, numbers, and arrays of same.
tos(t::Type) = round(UInt8, log2(sizeof(t)))
function typ2code(t::Type)
n = 0xff
if t == Char
n = 0x00
elseif t == String
n = 0x01
elseif t <: Unsigned
n = 0x10 + tos(t)
elseif t <: Signed
n = 0x20 + tos(t)
elseif t <: AbstractFloat
n = 0x30 + tos(t) - 0x01
elseif t <: Complex
n = 0x40 + typ2code(real(t))
elseif t <: Array
n = 0x80 + typ2code(eltype(t))
end
return n
end
# Who needs "switch"...
function code2typ(c::UInt8)
t::Type = Any
if c >= 0x80
t = Array{code2typ(c-0x80)}
elseif c >= 0x40
t = Complex{code2typ(c-0x40)}
elseif c >= 0x30
t = getindex((Float16, Float32, Float64), c-0x2f)
elseif c >= 0x20
t = getindex((Int8, Int16, Int32, Int64, Int128), c-0x1f)
elseif c >= 0x10
t = getindex((UInt8, UInt16, UInt32, UInt64, UInt128), c-0x0f)
elseif c == 0x01
t = String
elseif c == 0x00
t = Char
else
error("unknown type!")
end
return t
end
# SUPPORTED TYPES IN :MISC
#
#= HOW TO CHECK:
(1) copy the two lines below to the command line to generate a table like the one below
(2) if anything in column 4 of your table is false, these functions are broken
using SeisIO, SeisIO.RandSeis, BenchmarkTools, LightXML; import SeisIO:code2typ, typ2code;
for c = 0x00:0xff; try; println(stdout, rpad(string(code2typ(c)), 36), "| ", repr(typ2code(code2typ(c)))," | ", repr(c), " | ", isequal(c, typ2code(code2typ(c)))); catch; end; end
GUIDE TO THE TABLE:
Column 1 is a list of types allowed in :misc
Column 2 is the corresponding UInt8 type codes
Column 3 is the value returned by typ2code(code2typ(c))
Column 4 is the result of c == typ2code(code2typ(c))
Type | Code | Ret | ==?
:--------------- |:-----|:-----|-----
Char | 0x00 | 0x00 | true
String | 0x01 | 0x01 | true
UInt8 | 0x10 | 0x10 | true
UInt16 | 0x11 | 0x11 | true
UInt32 | 0x12 | 0x12 | true
UInt64 | 0x13 | 0x13 | true
UInt128 | 0x14 | 0x14 | true
Int8 | 0x20 | 0x20 | true
Int16 | 0x21 | 0x21 | true
Int32 | 0x22 | 0x22 | true
Int64 | 0x23 | 0x23 | true
Int128 | 0x24 | 0x24 | true
Float16 | 0x30 | 0x30 | true
Float32 | 0x31 | 0x31 | true
Float64 | 0x32 | 0x32 | true
Complex{UInt8} | 0x50 | 0x50 | true
Complex{UInt16} | 0x51 | 0x51 | true
Complex{UInt32} | 0x52 | 0x52 | true
Complex{UInt64} | 0x53 | 0x53 | true
Complex{UInt128} | 0x54 | 0x54 | true
Complex{Int8} | 0x60 | 0x60 | true
Complex{Int16} | 0x61 | 0x61 | true
Complex{Int32} | 0x62 | 0x62 | true
Complex{Int64} | 0x63 | 0x63 | true
Complex{Int128} | 0x64 | 0x64 | true
Complex{Float16} | 0x70 | 0x70 | true
Complex{Float32} | 0x71 | 0x71 | true
Complex{Float64} | 0x72 | 0x72 | true
Array{Char,N} where N | 0x80 | 0x80 | true
Array{String,N} where N | 0x81 | 0x81 | true
Array{UInt8,N} where N | 0x90 | 0x90 | true
Array{UInt16,N} where N | 0x91 | 0x91 | true
Array{UInt32,N} where N | 0x92 | 0x92 | true
Array{UInt64,N} where N | 0x93 | 0x93 | true
Array{UInt128,N} where N | 0x94 | 0x94 | true
Array{Int8,N} where N | 0xa0 | 0xa0 | true
Array{Int16,N} where N | 0xa1 | 0xa1 | true
Array{Int32,N} where N | 0xa2 | 0xa2 | true
Array{Int64,N} where N | 0xa3 | 0xa3 | true
Array{Int128,N} where N | 0xa4 | 0xa4 | true
Array{Float16,N} where N | 0xb0 | 0xb0 | true
Array{Float32,N} where N | 0xb1 | 0xb1 | true
Array{Float64,N} where N | 0xb2 | 0xb2 | true
Array{Complex{UInt8},N} where N | 0xd0 | 0xd0 | true
Array{Complex{UInt16},N} where N | 0xd1 | 0xd1 | true
Array{Complex{UInt32},N} where N | 0xd2 | 0xd2 | true
Array{Complex{UInt64},N} where N | 0xd3 | 0xd3 | true
Array{Complex{UInt128},N} where N | 0xd4 | 0xd4 | true
Array{Complex{Int8},N} where N | 0xe0 | 0xe0 | true
Array{Complex{Int16},N} where N | 0xe1 | 0xe1 | true
Array{Complex{Int32},N} where N | 0xe2 | 0xe2 | true
Array{Complex{Int64},N} where N | 0xe3 | 0xe3 | true
Array{Complex{Int128},N} where N | 0xe4 | 0xe4 | true
Array{Complex{Float16},N} where N | 0xf0 | 0xf0 | true
Array{Complex{Float32},N} where N | 0xf1 | 0xf1 | true
Array{Complex{Float64},N} where N | 0xf2 | 0xf2 | true
=#
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5098 | module FastIO
const FastReadInt = Union{Type{Int16}, Type{UInt16}, Type{Int32}, Type{UInt32}, Type{Int64}, Type{UInt64}}
const FastReads = Union{FastReadInt, Type{Float16}, Type{Float32}, Type{Float64}}
#=TO DO:
fastread Char
IBM-Float
=#
# =====================================================================
#=
This section: file position commands
No rewrite needed in 1.3: mark, reset, seekstart
=#
# fastpos
fastpos(io::IO) = io.ptr-1
fastpos(io::IOStream) = ccall(:ios_pos, Int64, (Ptr{Cvoid},), io.ios)
# fasteof
fasteof(io::IO) = (io.ptr-1 == io.size)
fasteof(io::IOStream) = Bool(ccall(:ios_eof_blocking, Cint, (Ptr{Cvoid},), io.ios))
# fastseek
fastseek(io::IO, p::Integer) = seek(io, p)
function fastseek(io::IOStream, n::Integer)
ccall(:ios_seek, Int64, (Ptr{Cvoid}, Int64), io.ios, n)
return nothing
end
# fastskip
fastskip(io::IO, n::Integer) = skip(io, n)
function fastskip(io::IOStream, n::Integer)
ccall(:ios_skip, Int64, (Ptr{Cvoid}, Int64), io.ios, n)
return nothing
end
# fastseekend
fastseekend(io::IO) = (io.ptr = io.size+1)
fastseekend(io::IOStream) = ccall(:ios_seek_end, Int64, (Ptr{Cvoid},), io.ios)
# =====================================================================
#= This section: file read commands
fastread(io) returns a UInt8
fastread(io, T) returns a T
=#
fastread!(io::IO, buf::Array{T}) where T = read!(io, buf)
fastread!(io::IOStream, buf::Array{T}) where T = @GC.preserve buf ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io, pointer(buf), sizeof(buf))
# fastread!(io::IOStream, buf::Array{UInt8,1}) = ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io.ios, pointer(buf, 1), sizeof(buf))
# fastread
fastread(io::IO) = read(io, UInt8)
fastread(io::IO, T::FastReadInt) = read(io, T)
fastread(io::IO, n::Integer) = read(io, n)
# IOStream methods avoid locking in 1.3
fastread(io::IOStream) = ccall(:ios_getc, Cint, (Ptr{Cvoid},), io.ios) % UInt8
function fastread(io::IOStream, n::Integer)
buf = Vector{UInt8}(undef, n)
ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io.ios, pointer(buf, 1), n)
return buf
end
# Working solution 2020-02-10T16:00:00
function fastread(io::IOStream, T::FastReadInt)
if VERSION > v"1.2.0"
# ccall(:ios_flush, Cint, (Ptr{Cvoid},), io.ios)
ccall(:ios_readprep, UInt64, (Ptr{Cvoid}, Csize_t), io.ios, sizeof(T))
end
return (ccall(:jl_ios_get_nbyte_int, UInt64, (Ptr{Cvoid}, Csize_t), io.ios, sizeof(T)) % T)
end
#= I *don't* like this. jl_ios_get_nbyte_int seems more likely to change than
ios_getc, and version-dependent "if" loop is undesirable =#
# function fastread1(io::IOStream, ::Type{Int16})
# ccall(:ios_flush, Cint, (Ptr{Cvoid},), io.ios)
# x = ccall(:ios_getc, Cint, (Ptr{Cvoid},), io.ios) | (ccall(:ios_getc, Cint, (Ptr{Cvoid},), io.ios) << 0x08)
# return signed(x % UInt16)
# end
#
fastread(io::IO, ::Type{Bool}) = (fastread(io) != 0x00)
fastread(io::IO, ::Type{UInt8}) = fastread(io)
fastread(io::IO, ::Type{Int8}) = signed(fastread(io))
fastread(io::IO, ::Type{Float16}) = Base.reinterpret(Float16, fastread(io, Int16))
fastread(io::IO, ::Type{Float32}) = Base.reinterpret(Float32, fastread(io, Int32))
fastread(io::IO, ::Type{Float64}) = Base.reinterpret(Float64, fastread(io, Int64))
# use if creating a new array during the call; I have yet to read anything but a 1d array in any format
function fastread(io::IO, T::Type, n::Integer)
a = Array{T, 1}(undef, n)
fastread!(io, a)
return a
end
# TO DO
fastread(io::IO, ::Type{Char}) = read(io, Char)
# =====================================================================
# fast_readbytes!
fast_readbytes!(io::IO, buf::Array{UInt8,1}, nb::Integer) = readbytes!(io, buf, nb)
function fast_readbytes!(io::IOStream, buf::Array{UInt8,1}, nb::Integer)
olb = lb = length(buf)
nr = 0
GC.@preserve buf while nr < nb
if lb < nr+1
lb = max(65536, (nr+1) * 2)
resize!(buf, lb)
end
nr += Int(ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t),
io.ios, pointer(buf, nr+1), min(lb-nr, nb-nr)))
fasteof(io) && break
end
if lb > olb && lb > nr
resize!(buf, nr) # shrink to just contain input data if was resized
end
return nr
end
# was (and crashed with)
# = @GC.preserve buf ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io.ios, pointer(buf, 1), nb)
# fast_readline
fast_readline(io::IO) = readline(io)
fast_readline(io::IOStream) = ccall(:jl_readuntil, Ref{String}, (Ptr{Cvoid}, UInt8, UInt8, UInt8), io.ios, '\n', 1, 2)
# fast_unsafe_read
fast_unsafe_read(io::IO, p::Ptr{UInt8}, nb::Integer) = unsafe_read(io, p, nb)
fast_unsafe_read(io::IOStream, p::Ptr{UInt8}, nb::Integer) = ccall(:ios_readall, Csize_t, (Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io, p, nb)
# =====================================================================
export
FastReadInt,
FastReads,
fast_readbytes!,
fast_readline,
fast_unsafe_read,
fasteof,
fastpos,
fastread!,
fastread,
fastseek,
fastseekend,
fastskip
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3918 | is_u8_digit(u::UInt8) = u > 0x2f && u < 0x3a
# id = targ vector
# cv = char vector
# i = starting index in cv
# imax = max index in cv
# j = starting index in id
# jmax = max index in id
function fill_id!(id::Array{UInt8,1}, cv::Array{UInt8,1}, i::T, i_max::T, j::T, j_max::T) where T<:Integer
o = one(T)
while true
c = getindex(cv, i)
i = i+o
i > i_max && break
c < 0x2f && continue
setindex!(id, c, j)
j = j+o
j > j_max && break
end
if j_max < T(15)
id[j_max+1] = 0x2e
end
return i
end
function checkbuf!(buf::Array{UInt8,1}, nx::T1, T::Type) where T1<:Integer
nb = Int64(nx)*sizeof(T)
if lastindex(buf) > nb
resize!(buf, nb)
end
return nb
end
function checkbuf!(buf::AbstractArray, nx::T) where {T<:Integer}
if nx > lastindex(buf)
resize!(buf, nx)
end
end
function checkbuf_strict!(buf::AbstractArray, nx::T) where {T<:Integer}
if nx != lastindex(buf)
resize!(buf, nx)
end
end
# ensures length(buf) is divisible by 8...needed to reinterpret as 64-bit type
function checkbuf_8!(buf::Array{UInt8,1}, n::Integer)
if div(n, 8) == 0
nx = n
else
nx = n + 8 - rem(n,8)
end
L = length(buf)
if nx > L
resize!(buf, nx)
else
r = rem(L, 8)
if r > 0
resize!(buf, L + 8 - r)
end
end
end
function fillx_i4!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
j = os; i = 0
while i < nx
i += 1
j += 1
y = getindex(buf, i)
x[j] = Int32(y >> 4)
if i < nx
j += 1
x[j] = Int32((y << 4) >> 4)
end
end
return nothing
end
function fillx_i8!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
j = os; i = 0
while i < nx
i += 1
j += 1
x[j] = signed(buf[i])
end
return nothing
end
function fillx_i16_le!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt16); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt16(buf[2*i-1])
y |= UInt16(buf[2*i]) << 8
x[j] = signed(y)
end
return nothing
end
function fillx_i16_be!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt16); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt16(buf[2*i-1]) << 8
y |= UInt16(buf[2*i])
x[j] = signed(y)
end
return nothing
end
function fillx_i24_be!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt32); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt32(buf[3*i-2]) << 24
y |= UInt32(buf[3*i-1]) << 16
y |= UInt32(buf[3*i]) << 8
x[j] = signed(y) >> 8
end
end
function fillx_i32_le!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt32); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt32(buf[4*i-3])
y |= UInt32(buf[4*i-2]) << 8
y |= UInt32(buf[4*i-1]) << 16
y |= UInt32(buf[4*i]) << 24
x[j] = signed(y)
end
return nothing
end
function fillx_i32_be!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt32); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt32(buf[4*i-3]) << 24
y |= UInt32(buf[4*i-2]) << 16
y |= UInt32(buf[4*i-1]) << 8
y |= UInt32(buf[4*i])
x[j] = signed(y)
end
return nothing
end
function fillx_u32_be!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt32); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt32(buf[4*i-3]) << 24
y |= UInt32(buf[4*i-2]) << 16
y |= UInt32(buf[4*i-1]) << 8
y |= UInt32(buf[4*i])
x[j] = y
end
return nothing
end
function fillx_u32_le!(x::AbstractArray, buf::Array{UInt8,1}, nx::Integer, os::Int64)
y = zero(UInt32); j = os; i = 0
while i < nx
i += 1
j += 1
y = UInt32(buf[4*i-3])
y |= UInt32(buf[4*i-2]) << 8
y |= UInt32(buf[4*i-1]) << 16
y |= UInt32(buf[4*i]) << 24
x[j] = y
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3125 | function read_string_vec(io::IO, u::Array{UInt8,1})
b = fastread(io)
S = String[]
if b == 0x00
N = fastread(io, UInt64)
p = pointer(u)
L = fastread(io, UInt16, N)
i = 0
while i < N
i = i + 1
l = getindex(L, i)
fast_unsafe_read(io, p, l)
push!(S, unsafe_string(p, l))
end
end
return S
end
function read_misc(io::IO, u::Array{UInt8,1})
D = Dict{String,Any}()
L = fastread(io, UInt64)
if L != zero(Int64)
n = zero(Int64)
dims = Array{Int64, 1}(undef, 3)
p = pointer(u)
K = read_string_vec(io, u)
for k in K
t = fastread(io)
T = code2typ(t)
# String array
if t == 0x81
nd = fastread(io, Int64)
checkbuf_strict!(dims, nd)
fastread!(io, dims)
if dims == [0]
D[k] = String[]
else
S = String[]
i = 0
N = prod(dims)
L = fastread(io, UInt16, N)
while i < N
i = i + 1
l = getindex(L, i)
fast_unsafe_read(io, p, l)
push!(S, unsafe_string(p, l))
end
D[k] = reshape(S, dims...)
end
# Numeric or Char array
elseif T <: AbstractArray
nd = fastread(io, Int64)
if nd == 1
D[k] = read!(io, T(undef, fastread(io, Int64)))
else
checkbuf_strict!(dims, nd)
fastread!(io, dims)
D[k] = read!(io, T(undef, dims...))
end
# String
elseif T == String
n = fastread(io, UInt16)
fast_unsafe_read(io, p, n)
D[k] = unsafe_string(p, n)
# Bits type
elseif Type{T} <: FastReads
D[k] = fastread(io, T)
else
D[k] = read(io, T)
end
end
end
return D
end
# ============================================================================
# Write functions
function write_string_vec(io::IO, v::Array{String,1})
b = isempty(v)
write(io, b)
if b == false
write(io, Int64(length(v)))
for s in v
write(io, UInt16(sizeof(s)))
end
for s in v
write(io, s)
end
end
return nothing
end
function write_misc(io::IO, D::Dict{String,Any})
L = Int64(length(D))
write(io, L)
if L != zero(Int64)
K = keys(D)
write_string_vec(io, collect(K))
for v in values(D)
T = typeof(v)
t = typ2code(T)
write(io, t)
# String array
if t == 0x81
dims = Int64.(size(v))
write(io, Int64(length(dims)))
for i in dims
write(io, i)
end
if dims != (0,)
for s in v
write(io, UInt16(sizeof(s)))
end
for s in v
write(io, s)
end
end
elseif T <: AbstractArray
dims = Int64.(size(v))
write(io, Int64(length(dims)))
for i in dims
write(io, i)
end
write(io, v)
elseif T == String
write(io, UInt16(sizeof(v)))
write(io, v)
elseif T <: Union{Char, AbstractFloat, Complex, Integer}
write(io, v)
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 11516 | function ah_time(t::Array{Int32,1}, ts::Float32)
broadcast!(bswap, t, t)
y = t[1]
ts = Float64(bswap(ts))
return y2μs(y) + Int64(md2j(y, t[2], t[3])-one(Int32))*86400000000 +
Int64(t[4])*3600000000 + Int64(t[5])*60000000 + round(Int64, ts*sμ)
end
function read_ah_str(io::IO, j::Int64)
n = bswap(fastread(io, Int32))
r = rem(n, 4)
# these are all very short; a "for" loop is fastest
for i = 1:n
j += 1
BUF.buf[j] = fastread(io)
end
r > 0 && fastskip(io, 4-r)
return j
end
function read_comm!(io::IO, buf::Array{UInt8,1}, full::Bool)
n = bswap(fastread(io, Int32))
r = rem(n, 4)
j = 0
k = 0
if full
checkbuf!(buf, n)
fast_readbytes!(io, buf, n)
while k < n
k += 1
c = getindex(buf, k)
if c != 0x00
j += 1
BUF.buf[j] = c
end
end
(r > 0) && fastskip(io, 4-r)
else
(r > 0) && (n += 4-r)
fastskip(io, n)
end
return j
end
# obnoxiously, our sample AH files use both 0x00 and 0x20 as string spacers
function mk_ah_id(js::Int64, jc::Int64, jn::Int64)
fill!(BUF.id, 0x00)
j = 0
k = jc+1
while j < 2 && k < jn
c = getindex(BUF.buf, k)
if c != 0x00 && c != 0x20
j += 1
BUF.id[j] = c
end
k += 1
end
j += 1
J = j+5
k = 1
BUF.id[j] = 0x2e
while j < J && k < js
c = getindex(BUF.buf, k)
if c != 0x00 && c != 0x20
j += 1
BUF.id[j] = c
end
k += 1
end
BUF.id[j] = 0x2e
BUF.id[j+1] = 0x2e
j += 1
J = j+3
k = js+1
while j < J && k < jc
c = getindex(BUF.buf, k)
if c != 0x00 && c != 0x20
j += 1
BUF.id[j] = c
end
k += 1
end
return j
end
mk_ah_chan(id::String, loc::GeoLoc, fs::Float64, resp::PZResp, ahfile::String, misc::Dict{String, Any}, notes::Array{String, 1}, nx::Integer, t0::Integer, x::FloatArray) =
SeisChannel(id,
"",
loc,
fs,
Float64(BUF.x[4]),
resp,
"",
ahfile,
misc,
notes,
mk_t(nx, t0),
x)
# AH-2
function read_ah2!(S::GphysData, ahfile::String, full::Bool, memmap::Bool, strict::Bool, v::Integer)
io = memmap ? IOBuffer(Mmap.mmap(ahfile)) : open(ahfile, "r")
str = getfield(BUF, :sac_cv)
ti = BUF.date_buf
resize!(ti, 5)
if full
stamp = timestamp() * " ¦ "
end
while !eof(io)
misc = Dict{String,Any}()
notes = Array{String,1}(undef, 0)
loc = GeoLoc()
ver = bswap(fastread(io, Int32))
ver == 1100 || error("Not a valid AH-2 file!")
len = bswap(fastread(io, UInt32))
# Station header =========================================================
fastskip(io, 4)
js = read_ah_str(io, 0)
fastskip(io, 4)
jc = read_ah_str(io, js)
fastskip(io, 4)
jn = read_ah_str(io, jc)
j = mk_ah_id(js, jc, jn)
id = unsafe_string(pointer(BUF.id), j)
(v > 1) && println("id = ", id)
jrec = read_ah_str(io, jn)
jsen = read_ah_str(io, jrec)
if full
misc["recorder"] = unsafe_string(pointer(BUF.buf, jn+1), jrec-jn)
misc["sensor"] = unsafe_string(pointer(BUF.buf, jrec+1), jsen-jrec)
end
# location
setfield!(loc, :az, Float64(bswap(fastread(io, Float32))))
setfield!(loc, :inc, 90.0-bswap(fastread(io, Float32)))
setfield!(loc, :lat, bswap(fastread(io, Float64)))
setfield!(loc, :lon, bswap(fastread(io, Float64)))
setfield!(loc, :el, Float64(bswap(fastread(io, Float32))))
BUF.x[4] = bswap(fastread(io, Float32)) # gain
BUF.x[5] = bswap(fastread(io, Float32)) # A0
# poles
NP = bswap(fastread(io, Int32))
P = zeros(Complex{Float32}, NP)
fastread!(io, P)
# zeros
NZ = bswap(fastread(io, Int32))
Z = zeros(Complex{Float32}, NZ)
fastread!(io, Z)
# station comment
j = read_comm!(io, str, full)
if full
misc["sta_comment"] = unsafe_string(pointer(BUF.buf), j)
end
# Event header ==========================================================
if full
misc["ev_lat"] = bswap(fastread(io, Float64))
misc["ev_lon"] = bswap(fastread(io, Float64))
misc["ev_dep"] = bswap(fastread(io, Float32))
fastread!(io, ti)
t_s = fastread(io, Float32)
fastskip(io, 4)
j = read_comm!(io, str, full)
misc["event_comment"] = unsafe_string(pointer(BUF.buf), j)
misc["ot"] = ah_time(ti, t_s)
(v > 1) && println("ev_lat = ", misc["ev_lat"], ", ev_lon = ", misc["ev_lon"])
else
fastskip(io, 48)
read_comm!(io, str, full)
end
# Data header ===========================================================
fmt = bswap(fastread(io, Int32))
nx = bswap(fastread(io, UInt32))
dt = bswap(fastread(io, Float32))
Amax = bswap(fastread(io, Float32))
fastread!(io, ti)
t_s = fastread(io, Float32)
n = bswap(fastread(io, Int32))
units = n > 0 ? unsafe_string(pointer(fastread(io, n))) : ""
n = bswap(fastread(io, Int32))
u_i = fastread(io, n)
n = bswap(fastread(io, Int32))
u_o = fastread(io, n)
fastskip(io, 4)
j = read_comm!(io, str, full)
if full
misc["data_comment"] = unsafe_string(pointer(BUF.buf), j)
misc["units_in"] = unsafe_string(pointer(u_i))
misc["units_out"] = unsafe_string(pointer(u_o))
misc["Amax"] = Amax
misc["ti"] = bswap.(copy(ti))
end
fastskip(io, 4)
k = read_comm!(io, str, full)
(v > 1) && println("nx = ", nx, ", dt = ", dt, ", Amax = ", Amax, ", k = ", k)
if full
# Log all processing to C.notes
i = 0
j = 0
while j < k
j += 1
c = BUF.buf[j]
if c == 0x3b && j-i > 1
push!(notes, stamp * unsafe_string(pointer(BUF.buf, i+1), j-i-1) * ", recorded in .ah file log")
i = j
end
end
end
nattr = bswap(fastread(io, Int32))
if nattr > 0
UA = Dict{String, String}()
for i = 1:nattr
j1 = read_ah_str(io, 0)
j2 = read_ah_str(io, j1)
k = unsafe_string(pointer(BUF.buf), j1)
V = unsafe_string(pointer(BUF.buf, j1+1), j2-j1)
UA[k] = V
end
v > 1 && println("UA = ", UA)
merge!(misc, UA)
end
# Determine if we have data from this channel already
fs = 1.0/dt
resp = PZResp(BUF.x[5], 0.0f0, P, Z)
units = units2ucum(units)
j = findid(S, id)
if strict
j = channel_match(S, j, fs, BUF.x[4], loc, resp, units)
end
t0 = ah_time(ti, t_s)
x = Array{fmt == one(Int32) ? Float32 : Float64, 1}(undef, nx)
fastread!(io, x)
broadcast!(bswap, x, x)
if j == 0
C = mk_ah_chan(id, loc, fs, resp, ahfile, misc, notes, nx, t0, x)
C.units = units
push!(S, C)
j = S.n
else
check_for_gap!(S, j, t0, nx, v)
append!(S.x[j], x)
end
(v > 1) && println("id = ", id, " channel ", j)
(v > 2) && println("x = [", x[1], ", ", x[2], ", ", x[3], ", ", x[4], ", ... ", x[nx-3], ", ", x[nx-2], ", ", x[nx-1], ", ", x[nx], "]")
end
close(io)
resize!(str, 192)
resize!(ti, 7)
resize!(BUF.buf, 65535)
return S
end
function read_ah1!(S::GphysData, ahfile::String, full::Bool, memmap::Bool, strict::Bool, v::Integer)
io = memmap ? IOBuffer(Mmap.mmap(ahfile)) : open(ahfile, "r")
str = getfield(BUF, :sac_cv)
ti = getfield(BUF, :date_buf)
pz_buf = getfield(BUF, :x)
if full
stamp = timestamp() * " ¦ "
end
resize!(ti, 5)
resize!(pz_buf, 125)
while !eof(io)
# Create SeisChannel, location container
misc = Dict{String,Any}()
notes = Array{String,1}(undef,0)
# Station header =========================================================
js = read_ah_str(io, 0)
jc = read_ah_str(io, js)
jn = read_ah_str(io, jc)
j = mk_ah_id(js, jc, jn)
id = unsafe_string(pointer(BUF.id), j)
fastread!(io, pz_buf)
# Event header ==========================================================
if full
misc["ev_lat"] = bswap(fastread(io, Float32))
misc["ev_lon"] = bswap(fastread(io, Float32))
misc["ev_dep"] = bswap(fastread(io, Float32))
fastread!(io, ti)
t_s = fastread(io, Float32)
j = read_comm!(io, str, full)
misc["event_comment"] = unsafe_string(pointer(BUF.buf), j)
misc["ot"] = ah_time(ti, t_s)
(v > 1) && printstyled("ev_lat = ", misc["ev_lat"], ", ev_lon = ", misc["ev_lon"], ", ev_dep = ", misc["ev_dep"], ", ")
else
fastskip(io, 36)
read_comm!(io, str, full)
end
# Data header ===========================================================
fmt = bswap(fastread(io, Int32))
nx = bswap(fastread(io, UInt32))
dt = bswap(fastread(io, Float32))
(v > 1) && println("nx = ", nx, ", dt = ", dt)
if full
misc["Amax"] = bswap(fastread(io, Float32))
fastread!(io, ti)
t_s = fastread(io, Float32)
misc["xmin"] = bswap(fastread(io, Float32))
j = read_comm!(io, str, full)
# Log all processing to :notes
misc["data_comment"] = unsafe_string(pointer(BUF.buf), j)
k = read_comm!(io, str, full)
# Log all processing to C.notes
i = 0
j = 1
while j ≤ length(str)
c = BUF.buf[j]
if c == 0x00
i = j
elseif c == 0x3b && j-i > 1
push!(notes, stamp * unsafe_string(pointer(BUF.buf, i+1), j-i-1) * ", recorded in .ah file log")
i = j
end
j += 1
end
# Set all "extras" in :misc
ne = bswap(fastread(io, UInt32))
if ne > 0
misc["extras"] = zeros(Float32, ne)
fastread!(io, misc["extras"])
broadcast!(bswap, misc["extras"], misc["extras"])
end
else
fastskip(io, 4)
fastread!(io, ti)
t_s = fastread(io, Float32)
fastskip(io, 4)
read_comm!(io, str, full)
read_comm!(io, str, full)
ne = bswap(fastread(io, UInt32))
if ne > 0
fastskip(io, 0x00000004*ne)
end
end
# Set fields in C =======================================================
broadcast!(bswap, pz_buf, pz_buf)
# :resp
NP = round(Int32, pz_buf[6])
NZ = round(Int32, pz_buf[8])
P = zeros(Complex{Float32}, NP)
Z = zeros(Complex{Float32}, NZ)
NC = max(NP, NZ)
k = 10
for i = 1:NC
(i > NP) || (P[i] = complex(pz_buf[k], pz_buf[k+1]))
(i > NZ) || (Z[i] = complex(pz_buf[k+2], pz_buf[k+3]))
k += 4
end
# Determine if we have data from this channel already
fs = 1.0/dt
loc = GeoLoc("", Float64(BUF.x[1]), Float64(BUF.x[2]), Float64(BUF.x[3]), 0.0, 0.0, 0.0)
resp = PZResp(BUF.x[5], 0.0f0, P, Z)
j = findid(S, id)
if strict
j = channel_match(S, j, fs, BUF.x[4], loc, resp, "")
end
# Assign to SeisChannel, or append S[j]
t0 = ah_time(ti, t_s)
x = Array{fmt == one(Int32) ? Float32 : Float64, 1}(undef, nx)
fastread!(io, x)
broadcast!(bswap, x, x)
if j == 0
C = mk_ah_chan(id, loc, fs, resp, ahfile, misc, notes, nx, t0, x)
push!(S, C)
j = S.n
else
S.t[j] = t_extend(S.t[j], t0, nx, S.fs[j])
append!(S.x[j], x)
end
(v > 0) && println("id = ", id, " channel ", j, ", L = ", length(S.x[j]))
(v > 2) && println("x[1:5] = ", x[1:5])
end
close(io)
resize!(BUF.sac_cv, 192)
resize!(BUF.date_buf, 7)
resize!(BUF.x, 65535)
if length(BUF.buf) != 65535
resize!(BUF.buf, 65535)
end
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 18924 | export sachdr, writesac, writesacpz
# =====================================================================
# Bytes 305:308 as a littleendian Int32 should read 0x06 0x00 0x00 0x00; compare each end to 0x0a to allow older SAC versions (if version in same place?)
function should_bswap(io::IO)
fastskip(io, 304)
u = fastread(io)
fastskip(io, 2)
v = fastread(io)
q::Bool = (
# Least significant byte in u
if 0x00 < u < 0x0a && v == 0x00
false
# Most significant byte in u
elseif u == 0x00 && 0x00 < v < 0x0a
true
else
error("Invalid SAC file.")
end
)
return q
end
function reset_sacbuf()
checkbuf_strict!(BUF.sac_fv, 70)
checkbuf_strict!(BUF.sac_iv, 40)
checkbuf_strict!(BUF.sac_cv, 192)
checkbuf_strict!(BUF.sac_dv, 22)
fill!(BUF.sac_fv, sac_nul_f)
fill!(BUF.sac_dv, sac_nul_d)
fill!(BUF.sac_iv, sac_nul_i)
for i in 1:24
BUF.sac_cv[1+8*(i-1):8*i] = sac_nul_c
end
BUF.sac_cv[17:24] .= 0x20
BUF.sac_iv[7] = Int32(7)
end
# =====================================================================
# write internals
function fill_sac_id(id_str::String)
id = split_id(id_str)
# Chars, all segments
ci = [169, 1, 25, 161]
for j = 1:4
(j == 3) && continue
sj = ci[j]
if isempty(id[j])
BUF.sac_cv[sj:sj+7] .= sac_nul_c
else
s = codeunits(id[j])
L = min(length(s), 8)
copyto!(BUF.sac_cv, sj, s, 1, L)
if L < 8
BUF.sac_cv[sj+L:sj+7] .= 0x20
end
end
end
return id
end
function fill_sac(si::Int64, nx::Int32, ts::Int64, id::Array{String,1})
@assert nx ≤ typemax(Int32)
t_arr!(BUF.sac_iv, ts)
# Ints
BUF.sac_iv[10] = Int32(nx)
# Floats
b = ts - round(Int64, ts/1000)*1000
BUF.sac_fv[6] = (b == 0) ? 0.0f0 : b*1.0f-6
BUF.sac_fv[7] = BUF.sac_fv[6] + BUF.sac_fv[1]*nx
# Filename
y_s = lpad(BUF.sac_iv[1], 4, '0')
j_s = lpad(BUF.sac_iv[2], 3, '0')
h_s = lpad(BUF.sac_iv[3], 2, '0')
m_s = lpad(BUF.sac_iv[4], 2, '0')
s_s = lpad(BUF.sac_iv[5], 2, '0')
ms_s = lpad(BUF.sac_iv[6], 3, '0')
fname = join([y_s, j_s, h_s, m_s, s_s, ms_s, id[1], id[2], id[3], id[4], "R.SAC"], '.')
return fname
end
function write_sac_file(fname::String, x::AbstractArray{T,1}) where T
open(fname, "w") do io
write(io, BUF.sac_fv)
write(io, BUF.sac_iv)
write(io, BUF.sac_cv)
if eltype(x) == Float32
write(io, x)
else
write(io, Float32.(x))
end
# Switching this to a user option
if BUF.sac_iv[7] == Int32(7)
write(io, BUF.sac_dv)
end
end
return nothing
end
function write_sac_channel(S::GphysData, i::Integer, nvhdr::Integer, fn::String, v::Integer)
id = fill_sac_id(S.id[i])
fs = S.fs[i]
t = S.t[i]
# Floats, all segments
dt = (fs == 0.0 ? 0.0 : 1.0/fs)
if nvhdr == 7
BUF.sac_dv[1] = dt
end
BUF.sac_fv[1] = Float32(dt)
BUF.sac_fv[4] = Float32(S.gain[i])
for i in (32, 33, 34, 58, 59)
BUF.sac_fv[i] = 0.0f0
end
if !isempty(S.loc[i])
loc = S.loc[i]
if typeof(loc) == GeoLoc
lat = getfield(loc, :lat)
lon = getfield(loc, :lon)
BUF.sac_fv[32] = Float32(lat)
BUF.sac_fv[33] = Float32(lon)
BUF.sac_fv[34] = Float32(getfield(loc, :el))
BUF.sac_fv[58] = Float32(getfield(loc, :az))
BUF.sac_fv[59] = Float32(getfield(loc, :inc))
# IRIS docs claim STLA, STLO order is reversed w.r.t. single floats
if nvhdr == 7
BUF.sac_dv[19] = lon
BUF.sac_dv[20] = lat
end
end
end
fname = ""
if fs == 0.0
v > 0 && @warn(string("Can't write irregular channels (fs = 0.0) to file; skipped channel ", i))
else
W = t_win(S.t[i], fs)
inds = x_inds(S.t[i])
BUF.sac_iv[7] = Int32(nvhdr)
BUF.sac_iv[16] = one(Int32)
BUF.sac_iv[36] = one(Int32)
for j in 1:size(inds,1)
si = inds[j,1]
ei = inds[j,2]
ts = W[j,1]
nx = Int32(ei-si+1)
if fn == ""
fname = fill_sac(si, nx, ts, id)
else
fill_sac(si, nx, ts, id)
fname = fn
end
vx = view(S.x[i], si:ei)
write_sac_file(fname, vx)
v > 0 && println(stdout, now(), ": Wrote SAC ts file ", fname, " from channel ", i, " segment ", j)
fwrite_note!(S, i, "writesac", fname, string(", fname=\"", fn, "\", v=", v))
end
end
return nothing
end
# =====================================================================
# read internals
function read_sac_stream(io::IO, full::Bool, swap::Bool)
fv = BUF.sac_fv
iv = BUF.sac_iv
cv = BUF.sac_cv
fastread!(io, fv)
fastread!(io, iv)
fastread!(io, cv)
if swap == true
fv .= bswap.(fv)
iv .= bswap.(iv)
end
nx = getindex(iv, 10)
x = Array{Float32,1}(undef, nx)
fastread!(io, x)
if swap == true
broadcast!(bswap, x, x)
end
# floats
loc = GeoLoc()
j = 0
lf = (:lat, :lon, :el, :az, :inc)
for k in (32,33,34,58,59)
j += 1
val = getindex(fv, k)
if val != sac_nul_f
setfield!(loc, lf[j], Float64(val))
end
end
# ints
ts = mktime(getindex(iv, 1),
getindex(iv, 2),
getindex(iv, 3),
getindex(iv, 4),
getindex(iv, 5),
getindex(iv, 6)*Int32(1000))
b = getindex(fv, 6)
if b != sac_nul_f
ts += round(Int64, b*sμ)
end
# chars
id = zeros(UInt8, 15)
i = 0x01
while i < 0xc1
c = getindex(cv, i)
if (c == sac_nul_start) && (i < 0xbc)
val = getindex(cv, i+0x01:i+0x05)
if val == sac_nul_Int8
cv[i:i+0x05] .= 0x20
end
elseif c == 0x00
setindex!(cv, i, 0x20)
end
# fill ID
if i == 0x01
i = fill_id!(id, cv, i, 0x08, 0x04, 0x08)
elseif i == 0xa1
id[0x0c] = 0x2e
i = fill_id!(id, cv, i, 0xa8, 0x0d, 0x0f)
elseif i == 0xa9
i = fill_id!(id, cv, i, 0xb0, 0x01, 0x02)
else
i = i+0x01
end
end
for i in 15:-1:1
if id[i] == 0x00
deleteat!(id, i)
end
end
# Create a seischannel
D = Dict{String,Any}()
C = SeisChannel(unsafe_string(pointer(id)),
"",
loc,
Float64(1.0f0/getindex(fv, 1)),
fv[4] == sac_nul_f ? 1.0 : Float64(fv[4]),
PZResp(),
"",
"",
D,
Array{String,1}(undef, 0),
mk_t(nx, ts),
x)
# Create dictionary if full headers are desired
if full == true
# Parse floats
m = 0x00
while m < 0x46
m += 0x01
if fv[m] != sac_nul_f
D[sac_float_k[m]] = getindex(fv, m)
end
end
# Parse ints
m = 0x00
while m < 0x28
m += 0x01
if iv[m] != sac_nul_i
D[sac_int_k[m]] = getindex(iv, m)
end
end
m = 0x00
j = 0x01
while j < 0x18
n = j == 0x02 ? 0x10 : 0x08
p = m + n
ii = m
while ii < p
ii += 0x01
if getindex(cv,ii) != 0x20
D[sac_string_k[j]] = unsafe_string(pointer(cv, m+0x01), n)
break
end
end
m += n
j += 0x01
end
end
# SAC v102.0 (NVHDR == 7) adds a footer
if iv[7] > 6
dv = BUF.sac_dv
fastread!(io, dv)
swap && (dv .= bswap.(dv))
# DELTA
if dv[1] != sac_nul_d
C.fs = 1.0/dv[1]
end
# B
if (b != sac_nul_f) && (dv[2] != sac_nul_d)
C.t[1,2] += (round(Int64, dv[2]*sμ) - round(Int64, b*sμ))
end
# STLO, STLA? (IRIS docs claim order is reversed w.r.t. single floats)
if dv[19] != sac_nul_d
C.loc.lon = dv[19]
end
if dv[20] != sac_nul_d
C.loc.lat = dv[20]
end
if full == true
m = 0x00
while m < 0x16
m += 0x01
if dv[m] != sac_nul_d
C.misc[sac_double_k[m]] = getindex(dv, m)
end
end
end
end
return C
end
function read_sac_file!(S::SeisData, fname::String, full::Bool, memmap::Bool, strict::Bool)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
q = should_bswap(io)
seekstart(io)
C = read_sac_stream(io, full, q)
close(io)
add_chan!(S, C, strict)
return nothing
end
# ============================================================================
# read_sacpz internals
function add_pzchan!(S::GphysData, D::Dict{String, Any}, file::String)
id = D["NETWORK (KNETWK)"] * "." *
D["STATION (KSTNM)"] * "." *
D["LOCATION (KHOLE)"] * "." *
D["CHANNEL (KCMPNM)"]
i = findid(id, S)
loc = GeoLoc( lat = parse(Float64, D["LATITUDE"]),
lon = parse(Float64, D["LONGITUDE"]),
el = parse(Float64, D["ELEVATION"]),
dep = parse(Float64, D["DEPTH"]),
az = parse(Float64, D["AZIMUTH"]),
inc = parse(Float64, D["DIP"])-90.0
)
fs = parse(Float64, D["SAMPLE RATE"])
# gain, units; note, not "INSTGAIN", that's just a scalar multipler
gu = split(D["SENSITIVITY"], limit=2, keepempty=false)
gain = parse(Float64, gu[1])
units = lowercase(String(gu[2]))
if startswith(units, "(")
units = units[2:end]
end
if endswith(units, ")")
units = units[1:end-1]
end
units = fix_units(units2ucum(units))
#= fix for poorly-documented fundamental shortcoming:
"INPUT UNIT" => "M"
I have no idea why SACPZ uses displacement PZ =#
u_in = fix_units(units2ucum(D["INPUT UNIT"]))
Z = get(D, "Z", ComplexF32[])
if u_in != units
if u_in == "m" && units == "m/s"
deleteat!(Z, 1)
elseif u_in == "m" && units == "m/s2"
deleteat!(Z, 1:2)
end
end
resp = PZResp(parse(Float32, D["A0"]),
0.0f0,
get(D, "P", ComplexF32[]),
Z
)
if i == 0
# resp
C = SeisChannel()
setfield!(C, :id, id)
setfield!(C, :name, D["DESCRIPTION"])
setfield!(C, :loc, loc)
setfield!(C, :fs, fs)
setfield!(C, :gain, gain)
setfield!(C, :resp, resp)
setfield!(C, :units, units)
setfield!(C, :misc, D)
push!(S, C)
else
ts = Dates.DateTime(get(D, "START", "1970-01-01T00:00:00")).instant.periods.value*1000 - dtconst
te = Dates.DateTime(get(D, "END", "2599-12-31T23:59:59")).instant.periods.value*1000 - dtconst
t0 = isempty(S.t[i]) ? ts : starttime(S.t[i], S.fs[i])
if ts ≤ t0 ≤ te
(S.fs[i] == 0.0) && (S.fs[i] = fs)
(isempty(S.units[i])) && (S.units[i] = units)
(S.gain[i] == 1.0) && (S.gain[i] = gain)
(typeof(S.resp[i]) == GenResp || isempty(S.resp[i])) && (S.resp[i] = resp)
(isempty(S.name[i])) && (S.name[i] = D["DESCRIPTION"])
isempty(S.loc[i]) && (S.loc[i] = loc)
S.misc[i] = merge(D, S.misc[i])
end
end
return nothing
end
# ============================================================================
# NOTE: Leave keyword arguments, even if some aren't type-stable!
# Use of "optional" variables instead is a 5x **slowdown**
"""
sachdr(f)
Print formatted SAC headers from file `f` to stdout. Does not accept wildcard
file strings.
"""
function sachdr(fname::String)
S = read_data("sac", fname, full=true)
for i = 1:S.n
D = getindex(getfield(S, :misc), i)
src = getindex(getfield(S, :src), i)
printstyled(string(src, "\n"), color=:light_green, bold=true)
for k in sort(collect(keys(D)))
println(stdout, uppercase(k), ": ", string(D[k]))
end
end
return nothing
end
"""
writesac(S::Union{GphysData,GphysChannel}[, chans, nvhdr=6, fname="", v=0])
Write all data in SeisData structure `S` to auto-generated SAC files.
Keywords:
* `chans="CC"` writes data from ChanSpec CC (GphysData only)
* `fname="FF"` uses filename FF (GphysChannel only)
* `nvhdr` is SAC NVHDR, the file header version (6 or 7). Default is 6.
* `v` is verbosity.
"""
function writesac(S::GphysData;
chans::ChanSpec=Int64[],
fname::String="",
nvhdr::Integer=6,
v::Integer=KW.v)
reset_sacbuf()
chans = mkchans(chans, S)
for i in chans
write_sac_channel(S, i, nvhdr, fname, v)
reset_sacbuf()
end
return nothing
end
function writesac(S::GphysChannel;
fname::String="",
nvhdr::Integer=6,
v::Integer=KW.v)
fstr = String(
if fname == ""
fname
else
if endswith(lowercase(fname), ".sac")
fname
else
fname * ".sac"
end
end
)
writesac(SeisData(S), nvhdr=nvhdr, fname=fstr, v=v)
return nothing
end
@doc """
read_sacpz!(S::GphysData, pzfile::String)
Read sacpz file `pzfile` into SeisIO struct `S`.
If an ID in the pz file matches channel `i` at times in `S.t[i]`:
* Fields :fs, :gain, :loc, :name, :resp, :units are overwritten if empty/unset
* Information from the pz file is merged into :misc if the corresponding keys
aren't in use.
""" read_sacpz!
function read_sacpz!(S::GphysData, file::String; memmap::Bool=false)
io = memmap ? IOBuffer(Mmap.mmap(file)) : open(file, "r")
read_state = 0x00
D = Dict{String, Any}()
kv = Array{String, 1}(undef, 2)
# Do this for each channel
while true
# EOF
if fasteof(io)
add_pzchan!(S, D, file)
break
end
line = fast_readline(io)
# Header section
if startswith(line, "*")
if endswith(strip(line), "**")
read_state += 0x01
if read_state == 0x03
add_pzchan!(S, D, file)
read_state = 0x01
D = Dict{String, Any}()
end
else
kv .= strip.(split(line[2:end], ":", limit=2, keepempty=false))
D[kv[1]] = kv[2]
end
# Zeros section
elseif startswith(line, "ZEROS")
N = parse(Int64, split(line, limit=2, keepempty=false)[2])
D["Z"] = zeros(Complex{Float32}, N)
for i = 1:N
try
mark(io)
zc = split(fast_readline(io), limit=2, keepempty=false)
D["Z"][i] = complex(parse(Float32, zc[1]), parse(Float32, zc[2]))
catch
reset(io)
end
end
# Poles section
elseif startswith(line, "POLES")
N = parse(Int64, split(line, limit=2, keepempty=false)[2])
D["P"] = Array{Complex{Float32},1}(undef, N)
for i = 1:N
pc = split(fast_readline(io), limit=2, keepempty=false)
D["P"][i] = complex(parse(Float32, pc[1]), parse(Float32, pc[2]))
end
# Constant section
elseif startswith(line, "CONSTANT")
D["CONSTANT"] = String(split(line, limit=2, keepempty=false)[2])
end
end
close(io)
return S
end
@doc (@doc read_sacpz)
function read_sacpz(file::String; memmap::Bool=false)
S = SeisData()
read_sacpz!(S, file, memmap=memmap)
return S
end
@doc """
writesacpz(pzfile::String, S::GphysData[, chans::ChanSpec=CC])
Write fields from SeisIO struct `S` into sacpz file `pzfile`. Uses information
from fields :fs, :gain, :loc, :misc, :name, :resp, :units. Specify `chans=CC` to only write channels `CC`.
""" writesacpz
function writesacpz(file::String, S::GphysData; chans::ChanSpec=Int64[])
cc = mkchans(chans, S, keepempty=true)
io = open(file, "w")
for i in cc
id = split_id(S.id[i])
created = get(S.misc[i], "CREATED", string(u2d(time())))
ts_str = isempty(S.t[i]) ? "1970-01-01T00:00:00" : string(u2d(starttime(S.t[i], S.fs[i])*μs))
t_start = get(S.misc[i], "START", ts_str)
t_end = get(S.misc[i], "END", "2599-12-31T23:59:59")
unit_in = get(S.misc[i], "INPUT UNIT", "?")
unit_out = get(S.misc[i], "OUTPUT UNIT", "?")
Y = typeof(S.resp[i])
if Y == GenResp
a0 = 1.0
P = S.resp[i].resp[:,1]
Z = deepcopy(S.resp[i].resp[:,2])
elseif Y in (PZResp, PZResp64)
a0 = getfield(S.resp[i], :a0)
P = getfield(S.resp[i], :p)
Z = deepcopy(getfield(S.resp[i], :z))
elseif Y == MultiStageResp
j = 0
for k = 1:length(S.resp[i].stage)
stg = S.resp[i].stage[k]
if typeof(stg) in (PZResp, PZResp64)
j = k
break
end
end
if j == 0
@warn(string("Skipped channel ", i, " (", id, "): incompatible response Type"))
continue
else
a0 = getfield(S.resp[i].stage[j], :a0)
P = getfield(S.resp[i].stage[j], :p)
Z = deepcopy(getfield(S.resp[i].stage[j], :z))
end
end
write(io, 0x2a)
write(io, 0x20)
write(io, fill!(zeros(UInt8, 34), 0x2a))
write(io, 0x0a)
write(io, "* NETWORK (KNETWK): ", id[1], 0x0a)
write(io, "* STATION (KSTNM): ", id[2], 0x0a)
write(io, "* LOCATION (KHOLE): ", isempty(id[3]) ? " " : id[3], 0x0a)
write(io, "* CHANNEL (KCMPNM): ", id[4], 0x0a)
write(io, "* CREATED : ", created, 0x0a)
write(io, "* START : ", t_start, 0x0a)
write(io, "* END : ", t_end, 0x0a)
write(io, "* DESCRIPTION : ", S.name[i], 0x0a)
loc = zeros(Float64, 6)
if typeof(S.loc[i]) == GeoLoc
for (j, f) in enumerate([:lat, :lon, :el, :dep, :inc, :az])
loc[j] = getfield(S.loc[i], f)
end
end
write(io, "* LATITUDE : ", @sprintf("%0.6f", loc[1]), 0x0a)
write(io, "* LONGITUDE : ", @sprintf("%0.6f", loc[2]), 0x0a)
write(io, "* ELEVATION : ", string(loc[3]), 0x0a)
write(io, "* DEPTH : ", string(loc[4]), 0x0a)
write(io, "* DIP : ", string(loc[5]+90.0), 0x0a)
write(io, "* AZIMUTH : ", string(loc[6]), 0x0a)
write(io, "* SAMPLE RATE : ", string(S.fs[i]), 0x0a)
for j in ("INPUT UNIT", "OUTPUT UNIT", "INSTTYPE", "INSTGAIN", "COMMENT")
write(io, 0x2a, 0x20)
write(io, rpad(j, 18))
write(io, 0x3a, 0x20)
v = get(S.misc[i], j, "")
write(io, v)
if j == "INSTGAIN" && v == ""
write(io, "1.0E+00 (", S.units[i], ")")
end
write(io, 0x0a)
end
write(io, "* SENSITIVITY : ", @sprintf("%12.6e", S.gain[i]), 0x20, 0x28, uppercase(S.units[i]), 0x29, 0x0a)
NZ = length(Z)
NP = length(P)
write(io, "* A0 : ", @sprintf("%12.6e", a0), 0x0a)
CONST = get(S.misc[i], "CONSTANT", string(a0*S.gain[i]))
write(io, 0x2a)
write(io, 0x20)
write(io, fill!(zeros(UInt8, 34), 0x2a))
write(io, 0x0a)
# fix for units_in always being m
if S.units[i] == "m/s"
NZ += 1
pushfirst!(Z, zero(ComplexF32))
elseif S.units[i] == "m/s2"
NZ += 2
prepend!(Z, zeros(ComplexF32, 2))
end
write(io, "ZEROS\t", string(NZ), 0x0a)
for i = 1:NZ
write(io, 0x09, @sprintf("%+12.6e", real(Z[i])), 0x09, @sprintf("%+12.6e", imag(Z[i])), 0x09, 0x0a)
end
write(io, "POLES\t", string(NP), 0x0a)
for i = 1:NP
write(io, 0x09, @sprintf("%+12.6e", real(P[i])), 0x09, @sprintf("%+12.6e", imag(P[i])), 0x09, 0x0a)
end
write(io, "CONSTANT\t", CONST, 0x0a)
write(io, 0x0a, 0x0a)
fwrite_note!(S, i, "writesacpz", file, string(", chans=\"", i))
end
close(io)
return nothing
end
writesacpz(fname::String, C::GphysChannel) = writesacpz(fname, SeisData(C), chans=1)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 14258 | export segyhdr
# ============================================================================
# Utility functions not for export
function ibmfloat(x::UInt32)
local fra = ntoh(x)
local sgn = UInt8((fra & 0x80000000)>>31)
fra <<= 1
local exp = Int16(fra >> 25) - Int16(64)
fra <<= 7
y = (sgn == 0x00 ? 1.0 : -1.0) * 16.0^exp * signed(fra >> 8)/16777216
return y
end
#=
This is actual IBM hexadecimal float, and correctly parses the examples from
https://en.wikipedia.org/wiki/IBM_hexadecimal_floating_point ; see tests.
The version in JuliaSeis is wrong because IBM float has a range too wide for
IEEE single float in Julia.
The description in the SEG Y manual does something strange with the last bit.
My last line is computationally expensive; working out the radix shift and
using >> would be much better.
=#
function trid(i::Int16, fs::Float64, fc::Float64)
S = ["DH", "HZ", "H1", "H2", "HZ", "HT", "HR"]
return string(getbandcode(fs, fc=fc), S[i])
end
function mk_lc(lv::Int32)
L1 = UInt8(div(lv, Int32(36)))
L2 = UInt8(rem(lv, Int32(36)))
c1 = 0x30 + L1 + (L1 > 0x09 ? 0x07 : 0x00)
c2 = 0x30 + L2 + (L2 > 0x09 ? 0x07 : 0x00)
return String([c1, c2])
end
function auto_coords(xy::Array{Int32, 1}, sc::Array{Int16, 1})
xy == Int32[0,0] && return (0.0, 0.0)
lon = Float64(xy[1])
lat = Float64(xy[2])
coord_scale = sc[1]
coord_units = sc[2]
(coord_scale < 0) && (coord_scale = -1 / coord_scale)
lat *= coord_scale
lon *= coord_scale
#=
Conversion formula for coord_units == 1 "borrowed" from PASSSOFT segy2sac.
Assumes (possibly wrong) x-y coordinate origin & spherical Earth.
coord_units == 3 and coord_units == 4 aren't supported; never seen them.
=#
if coord_units == 1
iflg = lon < 0 ? -1 : 1
c = 111194.6976
D = sqrt(lat^2 + lon^2) / c
lat /= c
d = cosd(D)
lon = iflg*acosd(d / cosd(lat))
else
lat /= 3600.0
lon /= 3600.0
end
return lat, lon
end
function do_trace(f::IO,
passcal::Bool,
full::Bool,
ll::UInt8,
swap::Bool,
fh::Array{Int16,1}
)
# First part of trace header is quite standard
buf = BUF.buf
ints = BUF.int32_buf
shorts = BUF.int16_buf
lat = 0.0
lon = 0.0
checkbuf_8!(buf, max(180, length(buf)))
fast_readbytes!(f, buf, 180)
intbuf = reinterpret(Int32, buf)
copyto!(ints, 1, intbuf, 1, 7)
copyto!(ints, 8, intbuf, 10, 8)
copyto!(ints, 16, intbuf, 19, 4)
shortbuf = reinterpret(Int16, buf)
copyto!(shorts, 1, shortbuf, passcal ? 1 : 15, 4) # shorts[1:4]
copyto!(shorts, 5, shortbuf, 35, 2) # shorts[5:6]
copyto!(shorts, 7, shortbuf, 45, 46) # shorts[7:52]
if passcal
fast_readbytes!(f, buf, 40)
scale_fac = fastread(f, Float32)
inst_no = fastread(f, UInt16)
shorts[62] = fastread(f, Int16)
ints[21] = fastread(f, Int32)
ints[22] = fastread(f, Int32)
ints[23] = fastread(f, Int32)
setindex!(shorts, shortbuf[10], 53)
copyto!(shorts, 54, shortbuf, 13, 8)
setindex!(ints, intbuf[6], 20)
if swap
shorts .= ntoh.(shorts)
ints .= ntoh.(ints)
scale_fac = bswap(scale_fac)
inst_no = bswap(inst_no)
end
chars = buf[1:18]
dt = getindex(ints,20)
n = getindex(shorts, 20)
fmt = getindex(shorts, 54)
nx = (n == typemax(Int16) ? getindex(ints,21) : Int32(n))
T = (fmt == one(Int16) ? Int32 : Int16)
nb = checkbuf!(buf, nx, T)
fast_readbytes!(f, buf, nb)
# trace processing
y = reinterpret(T, buf)
x = Array{Float32,1}(undef, nx)
if swap
for i = 1:nx
x[i] = bswap(y[i])
end
else
copyto!(x, 1, y, 1, nx)
# faster than reprocessing with fillx_ for long files
end
z = getindex(shorts, 5)
δ = getindex(shorts, 21)
fs = sμ / Float64(δ == one(Int16) ? dt : δ)
gain = 1.0 / (Float64(scale_fac) * 10.0^(-1.0*Float64(shorts[24]) / 10.0) / Float64(shorts[23]))
lat, lon = auto_coords(ints[18:19], shorts[6:7])
if abs(lat) < 0.1 && abs(lon) < 0.1
lat *= 3600.0
lon *= 3600.0
end
el = Float64(ints[9]) * Float64(abs(z))^(z < zero(Int16) ? -1.0 : 1.0)
# Create ID
id_arr = zeros(UInt8,8)
i = one(Int8)
o = one(Int8)
while i < Int8(18)
if chars[i] == 0x00
chars[i] = 0x20
end
i = i+o
end
fill_id!(id_arr, chars, one(Int16), Int16(6), Int16(2), Int16(6))
id_arr[1] = 0x2e
id_arr[8] = 0x2e
deleteat!(id_arr, id_arr.==0x00)
# Channel string is tedious; use of one-char channel names like "Z"
inds = Int64[]
for i in 15:18
if chars[i] > 0x20
push!(inds, i)
end
end
ch_arr = isempty(inds) ? UInt8[] : chars[inds]
nc = length(ch_arr)
if nc == 1
c = uppercase(Char(ch_arr[1]))
if c in ('Z','N','E')
cha = string(getbandcode(fs), 'H', c)
else
cha = "YYY"
end
elseif nc == 0
cha = "YYY"
else
cha = String(ch_arr[1:min(3, nc)])
end
id = String(id_arr) * cha
else
(dt, nx, fmt) = fh
fast_readbytes!(f, buf, 38)
ints[26] = fastread(f, Int32)
shorts[60] = fastread(f, Int16)
ints[27] = fastread(f, Int32)
shorts[61] = fastread(f, Int16)
shorts[62] = fastread(f, Int16)
ints[28] = fastread(f, Int32)
ints[29] = fastread(f, Int32)
copyto!(ints, 20, intbuf, 1, 5)
copyto!(shorts, 53, shortbuf, 11, 2) # shorts[53:54]
ints[25] = getindex(intbuf, 7)
copyto!(shorts, 55, shortbuf, 15, 5) # shorts[55:59]
T = getindex(segy_ftypes, fmt)
if (T == Any)
close(f)
error(string("Trace data code = ", fmt, " unsupported!"))
end
nb = checkbuf!(buf, nx, T)
fast_readbytes!(f, buf, nb)
# trace processing
x = Array{Float32,1}(undef, nx)
if T == Int16
fillx_i16_be!(x, buf, nx, 0)
elseif T == Int32
fillx_i32_be!(x, buf, nx, 0)
elseif T == Int8
fillx_i8!(x, buf, nx, 0)
elseif T == Float32
x .= bswap.(reinterpret(Float32, buf))[1:nx]
elseif T == UInt32
y = Array{UInt32,1}(undef, nx)
fillx_u32_le!(y, buf, nx, 0) # _le because ibmfloat bswaps
x = ibmfloat.(y)
end
if swap
shorts .= ntoh.(shorts)
ints .= ntoh.(ints)
end
z = getindex(shorts, 5)
δ = getindex(shorts, 21)
fs = sμ / Float64(δ)
# not sure about meaning of "dB" in gain constants
gain = Float64(ints[25]) * 10.0^(shorts[55] + (shorts[23] + shorts[24])/10.0)
el = Float64(ints[9]) * Float64(abs(z))^(z<Int16(0) ? -1.0 : 1.0)
# Create ID
sta = string(reinterpret(UInt16, shorts[57]))
lc = ll > 0x00 ? mk_lc(ints[ll]) : ""
cha = Int16(10) < shorts[1] < Int16(18) ? trid(shorts[1]-Int16(10), fs, 1.0) : "YYY"
id = string(".", sta[1:min(lastindex(sta),5)], ".", lc, ".", cha)
end
if full == true
shorts_k = String["trace_id_code", "n_summed_z", "n_summed_h", "data_use",
"z_sc", "h_sc", "h_units_code", "v_weather",
"v_subweather", "t_src_uphole", "t_rec_uphole", "src_static_cor",
"rec_static_cor", "total_static", "t_lag_a", "t_lag_b",
"t_delay", "t_mute_st", "t_mute_en", "nx",
"delta", "gain_type", "gain_const", "init_gain",
"correlated", "sweep_st", "sweep_en", "sweep_len",
"sweep_type", "sweep_tap_st", "sweep_tap_en", "tap_type",
"f_alias", "slope_alias", "f_notch", "slope_notch",
"f_low_cut", "f_high_cut", "slope_low_cut", "slope_high_cut",
"year", "day", "hour", "minute",
"second", "time_code", "trace_wt_fac", "geophone_roll_p1",
"geophone_first_tr", "geophone_last_tr", "gap_size", "overtravel"]
append!(shorts_k, passcal ?
String["total_static_hi", "data_form", "ms", "trigyear",
"trigday", "trighour", "trigminute", "trigsecond",
"trigms", "not_to_be_used"] :
String["shot_scalar", "trace_units_code", "trans_exp", "trans_units_code",
"device_id", "trace_time_sc", "src_type_code", "src_energy_dir",
"src_exp", "src_units_code"])
ints_k = String["trace_seq_line", "trace_seq_file", "rec_no", "channel_no",
"energy_src_pt", "ensemble_no", "trace_in_ensemble", "src-rec_dist",
"rec_ele", "src_ele", "src_dep", "rec_datum_ele",
"src_datum_ele", "src_water_dep", "rec_water_dep", "src_x",
"src_y", "rec_x", "rec_y"]
append!(ints_k, passcal ? String["samp_rate", "num_samps", "max", "min"] :
String["cdp_x", "cdp_y", "inline_3d", "crossline_3d", "shot_point",
"trans_mant", "unassigned_1", "unassigned_2"] )
misc = Dict{String,Any}()
[misc[shorts_k[i]] = shorts[i] for i in 1:length(shorts_k)]
[misc[ints_k[i]] = ints[i] for i in 1:length(ints_k)]
misc["scale_fac"] = gain
if passcal
sta = String(chars[1:6])
misc["inst_no"] = inst_no
misc["sensor_serial"] = String(chars[7:14])
end
misc["station_name"] = sta
misc["channel_name"] = cha
end
# Trace info
ts = mktime(shorts[41], shorts[42], shorts[43], shorts[45], shorts[45], zero(Int16)) +
shorts[53] + 1000*sum(shorts[15:17])
loc = GeoLoc()
loc.lat = lat
loc.lon = lon
loc.el = el
C = SeisChannel()
setfield!(C, :name, id)
setfield!(C, :id, id)
setfield!(C, :loc, loc)
setfield!(C, :gain, gain)
setfield!(C, :fs, fs)
mk_t!(C, length(x), ts)
setfield!(C, :x, x)
if passcal == false
setfield!(C, :units, get(segy_units, Int16(shorts[56]), ""))
end
if full == true
setfield!(C, :misc, misc)
end
return C
end
function read_segy_file!( S::GphysData,
fname::String,
ll::UInt8,
passcal::Bool,
memmap::Bool,
full::Bool,
swap::Bool,
strict::Bool)
f = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
trace_fh = Array{Int16, 1}(undef, 3)
if passcal == true
C = do_trace(f, true, full, ll, swap, trace_fh)
add_chan!(S, C, strict)
close(f)
else
shorts = getfield(BUF, :int16_buf)
# File headers
filehdr = fastread(f, 3200)
jobid = bswap(fastread(f, Int32))
lineid = bswap(fastread(f, Int32))
reelid = bswap(fastread(f, Int32))
fast_readbytes!(f, BUF.buf, 48)
fillx_i16_be!(shorts, BUF.buf, 24, 0)
fastskip(f, 240)
# Some early sample files had these in little-endian byte order
for i = 25:27
shorts[i] = bswap(fastread(f, Int16))
end
fastskip(f, 94)
# Process file header
nh = max(zero(Int16), getindex(shorts, 27))
if full == false
fastskip(f, 3200*nh)
else
exthdr = Array{String,1}(undef, nh)
[exthdr[i] = fastread(f, 3200) for i in 1:nh]
fhd = Dict{String,Any}(
zip(String["ntr", "naux", "filedt", "origdt", "filenx",
"orignx", "fmt", "cdpfold", "trasort", "vsum",
"swst", "swen0", "swlen", "swtyp", "tapnum",
"swtapst", "swtapen", "taptyp", "corrtra", "bgainrec",
"amprec", "msys", "zupdn", "vibpol", "segyver",
"isfixed", "n_exthdr"], shorts[1:27])
)
fhd["jobid"] = jobid
fhd["lineid"] = lineid
fhd["reelid"] = reelid
fhd["filehdr"] = filehdr
fhd["exthdr"] = exthdr
end
trace_fh[1] = getindex(shorts,3)
trace_fh[2] = getindex(shorts,5)
trace_fh[3] = getindex(shorts,7)
# Channel headers
nt = shorts[1]
for i = 1:nt
# "swap" is always true for valid SEG Y data
C = do_trace(f, false, full, ll, true, trace_fh)
j = add_chan!(S, C, strict)
if full == true
merge!(S.misc[j], fhd)
end
end
close(f)
end
resize!(BUF.buf, 65535)
return S
end
# ============================================================================
"""
segyhdr(f[; passcal=false, ll=LL, swap=false])
Print formatted, sorted SEG-Y headers of file `f` to stdout. Use keyword
`passcal=true` for PASSCAL/NMT modified SEG Y; use `swap=true` for big-endian
PASSCAL. See SeisIO `read_data` documentation for `ll` codes.
"""
function segyhdr(fname::String; ll::UInt8=0x00, passcal::Bool=false, swap::Bool=false)
if passcal
seis = read_data("passcal", fname::String, full=true, ll=ll, swap=swap)
else
seis = read_data("segy", fname::String, ll=ll, full=true)
end
if passcal
printstyled(stdout, @sprintf("%20s: %s\n", "PASSCAL SEG-Y FILE", realpath(fname)), color=:green, bold=true)
D = getindex(getfield(seis, :misc),1)
for k in sort(collect(keys(D)))
@printf(stdout, "%20s: %s\n", k, string(get(D, k, "")))
end
else
p = 1; w = 32; W = displaysize(stdout)[2]-2
S = fill("", length(seis.misc[1])+1)
printstyled(stdout, @sprintf("%20s: %s\n", "SEG-Y FILE", realpath(fname)), color=:green, bold=true)
for i = 1:seis.n
if p > 1
s = @sprintf("%20i/%i", i, seis.n)
else
s = @sprintf(" Trace # %11i/%i", i, seis.n)
end
S[1] *= s
S[1] *= " "^(w-length(s))
D = getindex(getfield(seis, :misc),1)
for (j,k) in enumerate(sort(collect(keys(D))))
if k == "exthdr" || k == "filehdr"
val = get(seis.misc[i], k, "")
if isempty(val)
s = "(empty)"
else
s = length(val) > 8 ? String(val[1:8])*"…" : String(val)
end
else
s = string(get(seis.misc[i], k, ""))
end
filler = " "^max(0, 10-length(s))
S[j+1] *= @sprintf("%20s: %s%s", k, s, filler)
end
if (p+2*w > W) || (i == seis.n)
printstyled(stdout, S[1]*"\n", color=:yellow, bold=true)
[println(S[j]) for j=2:length(S)]
println("")
S = fill("", length(seis.misc[i])+1)
p = 1
else
p += w
end
end
printstyled(stdout, @sprintf("%20s\n","END OF RECORD"), color=208, bold=true)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8822 | function findhex(hexID::UInt16, hexIDs::Array{UInt16,1})
k = 0
@inbounds while k < lastindex(hexIDs)
k += 1
if hexID == getindex(hexIDs, k)
return k
end
end
k = -1
return k
end
function win32_cfile!( fname::String,
hex_bytes::Array{UInt8,1},
hexIDs::Array{UInt16,1},
S::SeisData,
fc::Array{Float32,1},
hc::Array{Float32,1},
nx_new::Int64,
memmap::Bool
)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
while !fasteof(io)
chan_line = fast_readline(io)
occursin(r"^\s*(?:#|$)", chan_line) && continue
# chan_info fills S
chan_info = String.(split(chan_line))
# Assign identifying info to placeholder arrays
hex2bytes!(hex_bytes, chan_info[1])
push!(hexIDs, reinterpret(UInt16, hex_bytes)[1])
push!(fc, one(Float32)/parse(Float32, chan_info[10]))
push!(hc, parse(Float32, chan_info[11]))
# Create new channel in S from chan_info
loc = GeoLoc()
loc.lat = parse(Float64, chan_info[14])
loc.lon = parse(Float64, chan_info[15])
loc.el = parse(Float64, chan_info[16])
C = SeisChannel()
setfield!(C, :id, string(chan_info[4], ".", chan_info[5]))
if length(chan_info) > 18
setfield!(C, :name, chan_info[19])
end
setfield!(C, :units, chan_info[9])
setfield!(C, :gain, Float64(parse(Float32, chan_info[13]) /
(parse(Float32, chan_info[8]) *
10.0f0^(parse(Float32, chan_info[12]) / 20.0f0))))
setfield!(C, :loc, loc)
setfield!(C, :x, Array{Float32,1}(undef, nx_new))
D = getfield(C, :misc)
D["lineDelay"] = parse(Float32, chan_info[3]) / 1000.0f0
D["pCorr"] = parse(Float32, chan_info[17])
D["sCorr"] = parse(Float32, chan_info[18])
push!(S, C)
end
return nothing
end
@doc """
S = readwin32(dfilestr, cfilestr)
Read all win32 data files matching pattern `dfilestr` into SeisData object `S`,
with channel info stored in channel files matching pattern `cfilestr`. Both
file patterns accept wild cards.
readwin32!(S, dfilestr, cfilestr)
As above, appending data to an existing SeisData object `S`.
!!! warning
Using multiple channel files applies no redundancy checks of any kind.
""" readwin32
function readwin32( dfilestr::String,
cfilestr::String,
jst::Bool,
nx_new::Int64,
nx_add::Int64,
memmap::Bool,
v::Integer
)
S = SeisData()
# Parse channel file(s)
hex_bytes = Array{UInt8,1}(undef,2)
hexIDs = Array{UInt16,1}(undef, 0)
fc = Array{Float32,1}(undef,0)
hc = Array{Float32,1}(undef,0)
if safe_isfile(cfilestr)
win32_cfile!(cfilestr, hex_bytes, hexIDs, S, fc, hc, nx_new, memmap)
else
cfiles = ls(cfilestr)
@inbounds for cfile in cfiles
v > 1 && println("Reading channel file ", cfile)
win32_cfile!(cfile, hex_bytes, hexIDs, S, fc, hc, nx_new, memmap)
end
end
L = lastindex(hexIDs)
# Parse data files
files = ls(dfilestr)
nf = length(files)
jst_const = (jst == true ? 32400000000 : 0)
date_hex = zeros(UInt8, 8)
date_arr = zeros(Int64, 6)
# checkbuf!(BUF.int32_buf, 100)
checkbuf!(BUF.buf, 1000)
buf = getfield(BUF, :buf)
x = view(BUF.int32_buf, 1:100)
# Preallocate arrays
sums = zeros(Int64, L)
seisN = zeros(Int64, L)
OldTime = zeros(Int64, L)
Δgap = zeros(Int64, L)
locID = Array{String,1}(undef, L)
xi = zeros(Int64, L)
gapStart = Array{Array{Int64,1},1}(undef, L)
gapEnd = Array{Array{Int64,1},1}(undef, L)
@inbounds for fname in files
v > 0 && println("Processing ", fname)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
fastskip(io, 4)
while !eof(io)
# Start time
read!(io, date_hex)
t_new = datehex2μs!(date_arr, date_hex)
fastskip(io, 4)
# Bytes to read
lsecb = Int64(ntoh(fastread(io, UInt32))); τ = 0
while τ < lsecb
orgID = fastread(io)
netID = fastread(io)
hexID = fastread(io, UInt16)
k = findhex(hexID, hexIDs)
V = ntoh(fastread(io, UInt16))
C = Int64(V >> 12)
N = Int64(V & 0x0fff); Nh = N
# Increment bytes read (this file), decrement N if not 4-bit
if C == 0
B = div(N, 2)
else
N -= 1
B = C*N
end
τ += 10 + B
ii = getindex(xi, k)
# Create new channel
if ii == 0
nx = 60*Nh*nf
if nx != lastindex(S.x[k])
resize!(S.x[k], nx)
end
setindex!(getfield(S, :fs), Float64(Nh), k)
setindex!(Δgap, div(1000000, 2*Nh), k)
setindex!(getfield(S, :t), mk_t(nx, t_new-jst_const), k)
setindex!(gapEnd, Int64[], k)
setindex!(gapStart, Int64[], k)
setindex!(locID, bytes2hex([orgID & 0xff | (netID << 4) & 0xf0]), k)
D = getindex(getfield(S, :misc), k)
D["orgID"] = orgID
D["netID"] = netID
D["hexID"] = hexID
D["locID"] = locID[k]
end
# Parse data
x[1] = bswap(fastread(io, Int32))
if C == 0
fast_readbytes!(io, buf, B)
fillx_i4!(x, buf, B, 1)
elseif C == 1
fast_readbytes!(io, buf, N)
fillx_i8!(x, buf, N, 1)
elseif C == 2
fast_readbytes!(io, buf, 2*N)
fillx_i16_be!(x, buf, N, 1)
elseif C == 3
fast_readbytes!(io, buf, 3*N)
fillx_i24_be!(x, buf, N, 1)
else
fast_readbytes!(io, buf, 4*N)
fillx_i32_be!(x, buf, N, 1)
end
# Account for time gaps
t_old = getindex(OldTime, k)
gap = t_new - t_old
if ((gap - 1000000) > Δgap[k]) && (t_old > 0)
gl = div((gap - 1000000), 1000000)
(v > 0) && @warn(string("Time gap detected! (channel ", hexID, ", length ", @sprintf("%.3f", gl*μs), "s, begin ", u2d(t_old*μs), ")"))
P = Nh*gl
push!(gapStart[k], ii + 1)
push!(gapEnd[k], ii + P)
ii += P
end
y = getindex(getfield(S, :x), k)
xa = first(x)
for j in 2:Nh
xa += getindex(x,j)
setindex!(x, xa, j)
end
copyto!(y, ii+1, x, 1, Nh)
# Update counters
OldTime[k] = t_new
sums[k] += x[Nh]
seisN[k] += Nh
xi[k] = ii + Nh
end
end
close(io)
end
# Post-process
κ = findall(xi.==0)
@inbounds for i = 1:S.n
(i in κ) && continue
χ = getindex(S.x, i)
# Ensure we aren't overcompensating
lastindex(χ) == getindex(xi, i) || resize!(χ, xi[i])
# Get resp for passive velocity sensors
fci = getindex(fc, i)
hci = getindex(hc, i)
resp = fctoresp(fci, hci)
setindex!(getfield(S, :resp), resp, i)
# There will be issues here. Japanese files use NIED or local station
# names, which don't necessarily use international station or network codes.
# For an example of the (lack of) correspondence see
# http://data.sokki.jmbsc.or.jp/cdrom/seismological/catalog/appendix/apendixe.htm
(net, sta, cha) = split(S.id[i], ".", limit=3, keepempty=true)
# Band code
bb = getbandcode(getindex(getfield(S, :fs), i), fc=fci)
# Channel code
cc = String(cha)[1:1]
if cc == "U"
cc = "Z"; S.loc[i].inc = 180.0
elseif cc == "N"
S.loc[i].inc = 90.0
elseif cc == "E"
S.loc[i].az = 90.0
S.loc[i].inc = 90.0
end
id = string(net, ".", sta, ".", locID[i], ".", bb, "H", cc)
setindex!(getfield(S, :id), id, i)
# Fill gaps with mean of data
J = length(gapStart[i])
if J > 0
μ = sums[i] / seisN[i]
gs = gapStart[i]
ge = gapEnd[i]
for n in 1:J
fill!(view(χ, gs[n]:ge[n]), μ)
end
end
end
if !isempty(κ)
deleteat!(S, κ)
v > 0 && println("Deleted ", length(κ), " empty channels after read.")
end
return S
end
@doc (@doc readwin32)
function readwin32!(S::SeisData,
dfilestr::String,
cfilestr::String,
jst::Bool,
nx_new::Int64,
nx_add::Int64,
memmap::Bool,
strict::Bool,
v::Integer
)
U = readwin32(dfilestr, cfilestr, jst, nx_new, nx_add, memmap, v)
if S.n > 0
for i in 1:U.n
j = add_chan!(S, U[i], strict)
end
else
append!(S, U)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5206 | export fill_pbo!
# https://www.unavco.org/data/strain-seismic/bsm-data/lib/docs/bottle_format.pdf
bottle_chans = Dict{String, Tuple{String,String}}(
"BatteryVolts" => ("ABV", "V"),
"CalOffsetCH0G3" => ("AO0", "{unknown}"),
"CalOffsetCH1G3" => ("AO1", "{unknown}"),
"CalOffsetCH2G3" => ("AO2", "{unknown}"),
"CalOffsetCH3G3" => ("AO3", "{unknown}"),
"CalStepCH0G2" => ("A02", "{unknown}"),
"CalStepCH0G3" => ("A03", "{unknown}"),
"CalStepCH1G2" => ("A12", "{unknown}"),
"CalStepCH1G3" => ("A13", "{unknown}"),
"CalStepCH2G2" => ("A22", "{unknown}"),
"CalStepCH2G3" => ("A23", "{unknown}"),
"CalStepCH3G2" => ("A32", "{unknown}"),
"CalStepCH3G3" => ("A33", "{unknown}"),
"DownholeDegC" => ("KD", "Cel"),
"LoggerDegC" => ("K1", "Cel"),
"PowerBoxDegC" => ("K2", "Cel"),
"PressureKPa" => ("DI", "kPa"),
"Rainfallmm" => ("R0", "mm"),
"RTSettingCH0" => ("AR0", "{unknown}"),
"RTSettingCH1" => ("AR1", "{unknown}"),
"RTSettingCH2" => ("AR2", "{unknown}"),
"RTSettingCH3" => ("AR3", "{unknown}"),
"SolarAmps" => ("ASO", "A"),
"SystemAmps" => ("ASY", "A")
)
bottle_nets = Dict{String, String}(
"AIRS" => "MC",
"TEPE" => "GF",
"BUY1" => "GF",
"ESN1" => "GF",
"TRNT" => "MC",
"HALK" => "GF",
"B948" => "ARRA",
"OLV1" => "MC",
"OLV2" => "MC",
"GERD" => "MC",
"SIV1" => "GF",
"BOZ1" => "GF",
"B947" => "ARRA"
)
function check_bads!(x::AbstractArray, nv::T) where T
# Check for bad samples
@inbounds for i in x
if i == nv
return true
end
end
return false
end
function channel_guess(str::AbstractString, fs::Float64)
si = fs >= 1.0 ? 14 : 10
ei = length(str) - (endswith(str, "_20") ? 3 : 0)
# name, id, units
str = str[si:ei]
if length(str) == 3
units = "m/m"
else
(str, units) = get(bottle_chans, str, ("YY", "{unknown}"))
end
# form channel string
if length(str) == 2
str = (fs > 1.0 ? "B" : fs > 0.1 ? "L" : "R")*str
end
return (str, units)
end
function read_bottle!(S::GphysData, fstr::String, nx_new::Int64, nx_add::Int64, memmap::Bool, strict::Bool, v::Integer)
buf = BUF.buf
files = ls(fstr)
for file in files
io = memmap ? IOBuffer(Mmap.mmap(file)) : open(file, "r")
# Read header ============================================================
fastskip(io, 8)
t0 = round(Int64, fastread(io, Float64)*sμ)
dt = fastread(io, Float32)
nx = fastread(io, Int32)
ty = fastread(io, Int32)
nv = fastread(io, Int32)
fastskip(io, 8)
fs = 1.0/dt
v > 2 && println("t0 = ", t0, ", fs = ", fs, ", nx = ", nx, ", ty = ", ty, ", nv = ", nv)
# Read data ==============================================================
T = ty == 0 ? Int16 : ty == 1 ? Int32 : Float32
nb = nx*sizeof(T)
checkbuf_8!(buf, nb)
fast_readbytes!(io, buf, nb)
close(io)
# Try to create an ID from the file name =================================
# Assumes fname SSSSyyJJJ... (SSSS = station, yy = year, JJJ = Julian day)
fname = splitdir(file)[2]
sta = fname[1:4]
(cha, units) = channel_guess(fname, fs)
# find relevant entry in station data
net = get(bottle_nets, sta, "PB")
id = net * "." * sta * ".." * cha
# Load into S ============================================================
i = findid(id, S.id)
if strict
i = channel_match(S, i, fs)
end
if i == 0
# Create C.x
x = Array{Float64,1}(undef, max(nx_new, nx))
os = 1
C = SeisChannel()
setfield!(C, :id, id)
setfield!(C, :fs, fs)
setfield!(C, :units, units)
mk_t!(C, nx, t0)
setfield!(C, :x, x)
push!(S, C)
else
xi = S.t[i][end,1]
x = getindex(getfield(S, :x), i)
check_for_gap!(S, i, t0, nx, v)
Lx = length(x)
if xi + nx > Lx
resize!(x, Lx + max(nx_add, nx))
end
os = xi + 1
end
# Check for null values
nv = T(nv)
y = reinterpret(T, buf)
b = T == Int16 ? false : check_bads!(y, nv)
if b
j = os
@inbounds for i = 1:nx
if y[i] == nv
x[j] = NaN
else
x[j] = y[i]
end
j += 1
end
else
copyto!(x, os, y, 1, nx)
end
end
trunc_x!(S)
resize!(buf, 65535)
return nothing
end
function read_bottle(fstr::String, nx_new::Int64, nx_add::Int64, memmap::Bool, strict::Bool, v::Integer)
S = SeisData()
read_bottle!(S, fstr, nx_new, nx_add, memmap, strict, v)
return S
end
"""
fill_pbo!(S)
Attempt to fill `:name` and `:loc` fields of S using station names (second field of S.id) cross-referenced against a PBO station info file.
"""
function fill_pbo!(S::GphysData)
sta_data = readdlm(path * "/Formats/PBO_bsm_coords.txt", ',', comments=false)
sta_data[:,2] .= strip.(sta_data[:,2])
sta_data[:,6] .= strip.(sta_data[:,6])
n_sta = size(sta_data, 1)
for i = 1:S.n
sta = split(S.id[i], '.')[2]
for j = 1:n_sta
if sta_data[j, 1] == sta
S.name[i] = String(sta_data[j,2])
lat = sta_data[j,3]
lon = sta_data[j,4]
el = sta_data[j,5]
S.loc[i] = GeoLoc(lat = lat, lon = lon, el = el)
break
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 27429 | export write_sxml
function full_resp(xe::XMLElement)
resp = MultiStageResp(24)
ns = 0
nmax = 0
gain = one(Float64)
f0 = one(Float64)
xr = child_elements(xe)
units = ""
for r in xr
if name(r) == "InstrumentSensitivity"
for ii in child_elements(r)
nii = name(ii)
if nii == "Value"
gain = parse(Float64, content(ii))
elseif nii == "Frequency"
f0 = parse(Float64, content(ii))
elseif nii == "InputUnits"
units = units2ucum(fix_units(content(get_elements_by_tagname(ii, "Name")[1])))
end
end
# stages
elseif name(r) == "Stage"
ns = parse(Int64, attribute(r, "number"))
nmax = max(ns, nmax)
resp_code = 0x00
c = one(Float64)
a0 = one(Float64)
f0 = one(Float64)
p = Array{Complex{Float64},1}(undef, 0)
z = Array{Complex{Float64},1}(undef, 0)
num = Array{Float64,1}(undef, 0)
den = Array{Float64,1}(undef, 0)
units_in = ""
units_out = ""
for ii in child_elements(r)
nii = name(ii)
if nii == "PolesZeros"
resp_code = 0x02
for iii in child_elements(ii)
niii = name(iii)
if niii == "PzTransferFunctionType"
c = content(iii) == "LAPLACE (RADIANS/SECOND)" ? c : Float64(2pi)
# Zero, Pole
elseif niii in ["Zero", "Pole"]
pzv = complex(
parse(Float64, content(get_elements_by_tagname(iii, "Real")[1])),
parse(Float64, content(get_elements_by_tagname(iii, "Imaginary")[1]))
)
if niii == "Zero"
push!(z, pzv)
else
push!(p, pzv)
end
elseif niii == "InputUnits"
resp.i[ns] = fix_units(content(get_elements_by_tagname(iii, "Name")[1]))
elseif niii == "OutputUnits"
resp.o[ns] = fix_units(content(get_elements_by_tagname(iii, "Name")[1]))
elseif niii == "NormalizationFactor"
a0 = parse(Float64, content(iii))
elseif niii == "NormalizationFrequency"
f0 = parse(Float64, content(iii))
end
end
elseif nii == "StageGain"
resp.gain[ns] = parse(Float64, content(get_elements_by_tagname(ii, "Value")[1]))
resp.fg[ns] = parse(Float64, content(get_elements_by_tagname(ii, "Frequency")[1]))
elseif nii == "Coefficients" || nii == "FIR"
resp_code = 0x03
for iii in child_elements(ii)
niii = name(iii)
if niii == "InputUnits"
resp.i[ns] = fix_units(content(get_elements_by_tagname(iii, "Name")[1]))
elseif niii == "OutputUnits"
resp.o[ns] = fix_units(content(get_elements_by_tagname(iii, "Name")[1]))
elseif niii == "NumeratorCoefficient" || niii == "Numerator"
push!(num, parse(Float64, content(iii)))
elseif niii == "DenominatorCoefficient" || niii == "Denominator"
push!(den, parse(Float64, content(iii)))
end
end
elseif nii == "Decimation"
resp.fs[ns] = parse(Float64, content(get_elements_by_tagname(ii, "InputSampleRate")[1]))
resp.fac[ns] = parse(Int64, content(get_elements_by_tagname(ii, "Factor")[1]))
resp.os[ns] = parse(Int64, content(get_elements_by_tagname(ii, "Offset")[1]))
resp.delay[ns] = parse(Float64, content(get_elements_by_tagname(ii, "Delay")[1]))
resp.corr[ns] = parse(Float64, content(get_elements_by_tagname(ii, "Correction")[1]))
end
end
# Post-process
if resp_code == 0x02
rmul!(z, c)
rmul!(p ,c)
resp.stage[ns] = PZResp64(z = z, p = p, a0 = a0, f0 = f0)
elseif resp_code == 0x03
resp.stage[ns] = CoeffResp(num, den)
else
resp.stage[ns] = nothing
end
end
# end stages
end
L = length(resp.fs)
for f in fieldnames(MultiStageResp)
deleteat!(getfield(resp, f), (nmax+1):L)
end
return gain, units, resp
end
function FDSN_sta_xml(xmlf::String, msr::Bool, s::String, t::String, v::Integer)
xdoc = LightXML.parse_string(xmlf)
xroot = LightXML.root(xdoc)
xnet = child_elements(xroot)
S = SeisData()
s = string_time(s, BUF.date_buf)
t = string_time(t, BUF.date_buf)
for net in xnet
# network level
if name(net) == "Network"
net_s = string_time(has_attribute(net, "startDate") ? attribute(net, "startDate") : "0001-01-01T00:00:00", BUF.date_buf)
net_t = string_time(has_attribute(net, "endDate") ? attribute(net, "endDate") : "9999-12-31T11:59:59", BUF.date_buf)
# do string comparisons work with <, > in Julia when dates are correctly formatted?
if net_s ≤ t && net_t ≥ s
nn = has_attribute(net, "code") ? attribute(net, "code") : ""
xsta = child_elements(net)
# station level
for sta in xsta
if name(sta) == "Station"
sta_s = string_time(has_attribute(sta, "startDate") ? attribute(sta, "startDate") : "0001-01-01T00:00:00", BUF.date_buf)
sta_t = string_time(has_attribute(sta, "endDate") ? attribute(sta, "endDate") : "9999-12-31T11:59:59", BUF.date_buf)
if sta_s ≤ t && sta_t ≥ s
ss = has_attribute(sta, "code") ? attribute(sta, "code") : ""
# loop over child tags
xcha = child_elements(sta)
s_lat = 0.0
s_lon = 0.0
s_el = 0.0
s_name = ""
for c in xcha
# station lat
if name(c) == "Latitude"
s_lat = parse(Float64, content(c))
# station lon
elseif name(c) == "Longitude"
s_lat = parse(Float64, content(c))
# station el
elseif name(c) == "Elevation"
s_el = parse(Float64, content(c))
# site description -- we only care about the name
elseif name(c) == "Site"
cx = child_elements(c)
for i in cx
if name(i) == "Name"
s_name = content(i)
end
end
# channel element
elseif name(c) == "Channel"
cha_s = string_time(has_attribute(c, "startDate") ? attribute(c, "startDate") : "0001-01-01T00:00:00", BUF.date_buf)
cha_t = string_time(has_attribute(c, "endDate") ? attribute(c, "endDate") : "9999-12-31T11:59:59", BUF.date_buf)
ll = has_attribute(c, "locationCode") ? attribute(c, "locationCode") : ""
cc = has_attribute(c, "code") ? attribute(c, "code") : ""
if cha_s ≤ t && cha_t ≥ s
c_id = "...."
c_name = identity(s_name)
c_units = ""
c_sensor = ""
# location defaults
c_lat = s_lat
c_lon = s_lon
c_el = s_el
c_dep = 0.0
c_az = 0.0
c_inc = 0.0
# sampling and response defaults
c_fs = 0.0
c_gain = 1.0
c_normfreq = 1.0
c_drift = 0.0
c_resp = PZResp()
cx = child_elements(c)
for i in cx
# location params
if name(i) == "Latitude"
c_lat = parse(Float64, content(i))
elseif name(i) == "Longitude"
c_lon = parse(Float64, content(i))
elseif name(i) == "Elevation"
c_el = parse(Float64, content(i))
elseif name(i) == "Depth"
c_dep = parse(Float64, content(i))
elseif name(i) == "Azimuth"
c_az = parse(Float64, content(i))
elseif name(i) == "Dip"
c_inc = parse(Float64, content(i)) - 90.0
# sampling, drift
elseif name(i) == "SampleRate"
c_fs = parse(Float64, content(i))
elseif name(i) == "ClockDrift"
c_drift = parse(Float64, content(i))
# sensor type
elseif name(i) == "Sensor"
rx = child_elements(i)
for r in rx
if name(r) == "Description"
c_sensor = content(r)
end
end
# response
elseif name(i) == "Response"
if msr
c_gain, c_units, c_resp = full_resp(i)
else
rx = child_elements(i)
for r in rx
if name(r) == "InstrumentSensitivity"
for y in child_elements(r)
if name(y) == "Value"
# gain
c_gain = parse(Float64, content(y))
# println("gain = ", c_gain)
elseif name(y) == "Frequency"
# normfreq
# c_normfreq = parse(Float64, content(y))
elseif name(y) == "InputUnits"
# calibrationunits
c_units = units2ucum(fix_units(content(get_elements_by_tagname(y, "Name")[1])))
end
end
# stages
elseif name(r) == "Stage"
for sx in child_elements(r)
if name(sx) == "PolesZeros"
p = Array{Complex{Float32},1}(undef, 0)
z = Array{Complex{Float32},1}(undef, 0)
c = 1.0f0
for pzx in child_elements(sx)
npzx = name(pzx)
if npzx == "PzTransferFunctionType"
c = content(pzx) == "LAPLACE (RADIANS/SECOND)" ? c : Float32(2pi)
#= ...this assumes that "DIGITAL (Z-TRANSFORM)"
means what I think it does -- like all terms in
FDSN, there is no documentation =#
# Zero, Pole
elseif npzx in ["Zero", "Pole"]
pzv = complex(
parse(Float32, content(get_elements_by_tagname(pzx, "Real")[1])),
parse(Float32, content(get_elements_by_tagname(pzx, "Imaginary")[1]))
)
if npzx == "Zero"
push!(z, pzv)
else
push!(p, pzv)
end
elseif npzx == "NormalizationFactor"
c_resp.a0 = parse(Float64, content(pzx))
elseif npzx == "NormalizationFrequency"
c_resp.f0 = parse(Float64, content(pzx))
end
end
if length(z) > 0
rmul!(z, c)
append!(c_resp.z, z)
end
if length(p) > 0
rmul!(p, c)
append!(c_resp.p, p)
end
end
end
end
# end stages
end
end
end
# end response
end
# channel id
c_id = join([nn, ss, ll, cc], ".")
if findid(c_id, S) > 0 && (v > 0)
@warn(string("Channel ", c_id, " has multiple sets of parameters in time range ", s, " - ", t))
end
# channel location
c_loc = GeoLoc( lat = c_lat,
lon = c_lon,
el = c_el,
dep = c_dep,
az = c_az,
inc = c_inc )
# build SeisChannel object
C = SeisChannel(id = c_id,
name = c_name,
gain = c_gain,
fs = c_fs,
loc = c_loc,
units = c_units,
resp = c_resp )
C.misc["ClockDrift"] = c_drift
C.misc["startDate"] = cha_s
C.misc["endDate"] = cha_t
if !isempty(c_sensor)
C.misc["SensorDescription"] = c_sensor
end
push!(S, C)
end
# done with channel
end
# end channel element
end
end
end
end
# end station element
end
end
# end network level
end
free(xdoc)
return S
end
function read_sxml(fpat::String, s::String, t::String, memmap::Bool, msr::Bool, v::Integer)
if safe_isfile(fpat)
io = memmap ? IOBuffer(Mmap.mmap(fpat)) : open(fpat, "r")
xsta = read(io, String)
close(io)
S = FDSN_sta_xml(xsta, msr, s, t, v)
else
files = ls(fpat)
if length(files) > 0
io = memmap ? IOBuffer(Mmap.mmap(files[1])) : open(files[1], "r")
xsta = read(io, String)
close(io)
S = FDSN_sta_xml(xsta, msr, s, t, v)
if length(files) > 1
for i = 2:length(files)
io = memmap ? IOBuffer(Mmap.mmap(files[i])) : open(files[i], "r")
xsta = read(io, String)
close(io)
T = FDSN_sta_xml(xsta, msr, s, t, v)
append!(S, T)
end
end
else
error("file(s) not found!")
end
end
return S
end
function sxml_mergehdr!(S::GphysData, T::GphysData, app::Bool, nofs::Bool, v::Integer)
relevant_fields = nofs ? (:name, :loc, :gain, :resp, :units) : (:name, :loc, :fs, :gain, :resp, :units)
k = Int64[]
for i = 1:length(T)
# Match on ID, si, ei
si = isempty(T.t[i]) ? get(T.misc[i], "startDate", typemin(Int64)) : starttime(T.t[i], T.fs[i])
ei = isempty(T.t[i]) ? get(T.misc[i], "endDate", typemax(Int64)) : endtime(T.t[i], T.fs[i])
id = T.id[i]
c = 0
for j = 1:length(S.id)
if S.id[j] == id
sj = isempty(S.t[j]) ? get(S.misc[j], "startDate", si) : starttime(S.t[j], S.fs[j])
ej = isempty(S.t[j]) ? get(S.misc[j], "endDate", ei) : endtime(S.t[j], S.fs[j])
if min(si ≤ ej, ei ≥ sj) == true
c = j
break
end
end
end
# Overwrite S[j] headers on match
if c != 0
(v > 2) && println("id/time match! id = ", S.id[c], ". Overwriting S[", c, "] headers from T[", i, "]")
for f in relevant_fields
# S.(f)[c] = T.(f)[i]
setindex!(getfield(S, f), getindex(getfield(T, f), i), c)
end
note!(S, c, string("sxml_mergehdr!, overwrote ", relevant_fields))
S.misc[c] = merge(T.misc[i], S.misc[c])
# Flag k for delete from T
push!(k, i)
end
end
# Delete channels that were already used to overwrite headers in S
if isempty(k) == false
deleteat!(T, k)
end
# Append remainder of T
(v > 1) && println("remaining entries in T = ", T.n)
if app && (isempty(T) == false)
append!(S, T)
end
return nothing
end
function read_station_xml!(S::GphysData, file::String, s::String, t::String, msr::Bool, v::Integer)
if sizeof(file) < 256
io = open(file, "r")
xsta = read(io, String)
close(io)
else
xsta = file
end
T = FDSN_sta_xml(xsta, msr, s, t, v)
sxml_mergehdr!(S, T, true, false, v)
return nothing
end
function read_station_xml(file::String, s::String, t::String, msr::Bool, v::Integer)
S = SeisData()
read_station_xml!(S, file, s, t, msr, v)
return S
end
function msr_to_xml(io::IO, r::MultiStageResp, gain::Float64, units::String)
Nstg = length(r.stage)
sens_f = 0.0
if Nstg > 0
sens_f = r.fg[1]
end
write(io, " <InstrumentSensitivity>\n <Value>")
print(io, gain)
write(io, "</Value>\n <Frequency>")
print(io, sens_f)
write(io, "</Frequency>\n <InputUnits>\n <Name>")
write(io, get(ucum_to_seed, units, units))
write(io, "</Name>\n </InputUnits>\n </InstrumentSensitivity>\n")
u1 = " <InputUnits>\n <Name>"
u2 = "</Name>\n </InputUnits>\n <OutputUnits>\n <Name>"
u3 = "</Name>\n </OutputUnits>\n"
@inbounds for i in 1:Nstg
ui = get(ucum_to_seed, r.i[i], r.i[i])
uo = get(ucum_to_seed, r.o[i], r.o[i])
write(io, " <Stage number=\"")
print(io, i)
write(io, "\">\n")
if typeof(r.stage[i]) in (PZResp64, PZResp)
write(io, " <PolesZeros>\n")
write(io, u1)
write(io, ui)
write(io, u2)
write(io, uo)
write(io, u3)
write(io, " <PzTransferFunctionType>LAPLACE (RADIANS/SECOND)</PzTransferFunctionType>\n <NormalizationFactor>")
print(io, r.stage[i].a0)
write(io, "</NormalizationFactor>\n <NormalizationFrequency>")
print(io, r.stage[i].f0)
write(io, "</NormalizationFrequency>\n")
for j = 1:length(r.stage[i].z)
write(io, " <Zero number = \"")
print(io, j)
write(io, "\">\n <Real minusError=\"0\" plusError=\"0\">")
print(io, real(r.stage[i].z[j]))
write(io, "</Real>\n <Imaginary minusError=\"0\" plusError=\"0\">")
print(io, imag(r.stage[i].z[j]))
write(io, "</Imaginary>\n </Zero>\n")
end
for j = 1:length(r.stage[i].p)
write(io, " <Pole number = \"")
print(io, j)
write(io, "\">\n <Real minusError=\"0\" plusError=\"0\">")
print(io, real(r.stage[i].p[j]))
write(io, "</Real>\n <Imaginary minusError=\"0\" plusError=\"0\">")
print(io, imag(r.stage[i].p[j]))
write(io, "</Imaginary>\n </Pole>\n")
end
write(io, " </PolesZeros>\n")
else
if typeof(r.stage[i]) == CoeffResp
write(io, " <Coefficients>\n")
write(io, u1)
write(io, ui)
write(io, u2)
write(io, uo)
write(io, u3)
write(io, " <CfTransferFunctionType>DIGITAL</CfTransferFunctionType>\n")
for j = 1:length(r.stage[i].b)
write(io, " <Numerator minusError=\"0\" plusError=\"0\">")
print(io, r.stage[i].b[j])
write(io, "</Numerator>\n")
end
for j = 1:length(r.stage[i].a)
write(io, " <Denominator minusError=\"0\" plusError=\"0\">")
print(io, r.stage[i].a[j])
write(io, "</Denominator>\n")
end
write(io, " </Coefficients>\n")
end
if r.fac[i] > 0
write(io, " <Decimation>\n <InputSampleRate>")
print(io, r.fs[i])
write(io, "</InputSampleRate>\n <Factor>")
print(io, r.fac[i])
write(io, "</Factor>\n <Offset>")
print(io, r.os[i])
write(io, "</Offset>\n <Delay>")
print(io, r.delay[i])
write(io, "</Delay>\n <Correction>")
print(io, r.corr[i])
write(io, "</Correction>\n </Decimation>\n")
end
end
write(io, " <StageGain>\n <Value>")
print(io, r.gain[i])
write(io, "</Value>\n <Frequency>")
print(io, r.fg[i])
write(io, "</Frequency>\n </StageGain>\n </Stage>\n")
end
return nothing
end
function mk_xml!(io::IO, S::GphysData, chans::Array{Int64,1})
blank_t0 = round(Int64, d2u(now())*μs)
write(io, "<?xml version=\"1.0\" encoding=\"ISO-8859-1\"?>\n\n<FDSNStationXML xmlns=\"http://www.fdsn.org/xml/station/1\" schemaVersion=\"1.0\" xsi:schemaLocation=\"http://www.fdsn.org/xml/station/1 http://www.fdsn.org/xml/station/fdsn-station-1.0.xsd\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\">\n <Source>SeisIO</Source>\n\n <Created>")
print(io, now())
write(io, "</Created>\n")
nch = length(chans)
nets = Array{String,1}(undef, nch)
stas = Array{String,1}(undef, nch)
locs = Array{String,1}(undef, nch)
chas = Array{String,1}(undef, nch)
t0 = zeros(Int64, nch)
t1 = zeros(Int64, nch)
done = falses(nch)
# Fill in nets, stas, locs, chas
fill!(t0, blank_t0)
fill!(t1, xml_endtime)
@inbounds for j = 1:nch
i = chans[j]
id = split_id(S.id[i])
nets[j] = id[1]
stas[j] = id[2]
locs[j] = id[3]
chas[j] = id[4]
# precedence for start time: S.misc[i]["startDate"] > starttime(S.t[i], S.fs[i]) > blank_t0
if haskey(S.misc[i], "startDate")
c_start = get(S.misc[i], "startDate", blank_t0)
if typeof(c_start) == Int64
t0[j] = c_start
end
end
# precedence for end time: S.misc[i]["startDate"] > endtime(S.t[i],S.fs[i]) > 19880899199000000
if haskey(S.misc[i], "endDate")
c_end = get(S.misc[i], "endDate", xml_endtime)
if typeof(c_end) == Int64
t1[j] = c_end
end
end
if isempty(S.t[i]) == false
if t0[j] == blank_t0
t0[j] = starttime(S.t[i], S.fs[i])
end
if t1[j] == xml_endtime
t1[j] = endtime(S.t[i], S.fs[i])
end
end
end
# Loop over all channels in cha that are not done
@inbounds for j = 1:nch
(done[j]) && continue
# Get all selected channels from same network
nn = nets[j]
ss = stas[j]
net = findall(nets.==nn)
ts = minimum(t0[net])*μs
te = maximum(t1[net])*μs
ds = u2d(ts)
de = u2d(te)
# Open net node
write(io, " <Network code=\"")
write(io, nn)
write(io, "\" startDate=\"")
print(io, ds)
write(io, "\" endDate=\"")
print(io, de)
write(io, "\">\n")
# Loop over stations in network
ci = Int64[]
sta_name = ""
sta_lat = 0.0
sta_lon = 0.0
sta_el = 0.0
# Fill channel index array, ci; set station location and name
for j = 1:nch
if stas[j] == ss && nets[j] == nn
i = chans[j]
push!(ci, j)
if isempty(sta_name)
if !isempty(S.name[i])
sta_name = S.name[i]
end
end
if (sta_lat == 0.0) || (sta_lon == 0.0) || (sta_el == 0.0)
if typeof(S.loc[i]) == GeoLoc
loc = S.loc[i]
if loc.lat != 0.0
sta_lat = loc.lat
end
if loc.lon != 0.0
sta_lon = loc.lon
end
if loc.el != 0.0
sta_el = loc.el
end
end
end
end
end
ts = minimum(t0[ci])*μs
te = maximum(t1[ci])*μs
ds = u2d(ts)
de = u2d(te)
# Open sta node
write(io, " <Station code=\"")
write(io, ss)
write(io, "\" startDate=\"")
print(io, ds)
write(io, "\" endDate=\"")
print(io, de)
write(io, "\">\n <Latitude>")
print(io, sta_lat)
write(io, "</Latitude>\n <Longitude>")
print(io, sta_lon)
write(io, "</Longitude>\n <Elevation>")
print(io, sta_el)
write(io, "</Elevation>\n <Site>\n <Name>")
write(io, sta_name)
write(io, "</Name>\n </Site>\n")
# Loop over cha nodes
for j in ci
i = chans[j]
# Channel location
if typeof(S.loc[i]) == GeoLoc
loc = S.loc[i]
else
loc = GeoLoc()
end
# Instrument response
if typeof(S.resp[i]) == MultiStageResp
resp = S.resp[i]
else
if typeof(S.resp[i]) in (PZResp, PZResp64)
resp = MultiStageResp(2)
resp.fac[2] = 1
else
resp = MultiStageResp(1)
end
resp.stage[1] = S.resp[i]
end
# Open cha node
write(io, " <Channel code=\"")
write(io, chas[j])
write(io, "\" locationCode=\"")
write(io, locs[j])
write(io, "\" startDate=\"")
print(io, u2d(t0[j]*μs))
write(io, "\" endDate=\"")
print(io, u2d(t1[j]*μs))
write(io, "\">\n", " <Latitude>")
print(io, loc.lat)
write(io, "</Latitude>\n <Longitude>")
print(io, loc.lon)
write(io, "</Longitude>\n <Elevation>")
print(io, loc.el)
write(io, "</Elevation>\n <Depth>")
print(io, loc.dep)
write(io, "</Depth>\n <Azimuth>")
print(io, loc.az)
write(io, "</Azimuth>\n <Dip>")
print(io, 90.0 + loc.inc)
write(io, "</Dip>\n <SampleRate>")
print(io, S.fs[i])
write(io, "</SampleRate>\n <ClockDrift>")
print(io, get(S.misc[i], "ClockDrift", 0.0))
write(io, "</ClockDrift>\n <Sensor>\n <Description>")
write(io, get(S.misc[i], "SensorDescription", "Unknown"))
write(io, "</Description>\n </Sensor>\n <Response>\n")
msr_to_xml(io, resp, S.gain[i], S.units[i])
write(io, " </Response>\n </Channel>\n")
done[j] = true
end
write(io, " </Station>\n </Network>\n")
end
write(io, "</FDSNStationXML>\n")
return nothing
end
"""
write_sxml(fname::String, S::GphysData[, chans=Cha])
write_sxml(fname::String, C::GphysChannel)
Write station XML from the fields of `S` or `C` to file `fname`.
Use keyword `chans=Cha` to restrict station XML write to `Cha`. This keyword
can accept an Integer, UnitRange, or Array{Int64,1} as its argument.
"""
function write_sxml(fname::String, S::GphysData;
chans::ChanSpec=Int64[])
chans = mkchans(chans, S, keepempty=true)
io = open(fname, "w")
mk_xml!(io, S, chans)
close(io)
for i in chans
fwrite_note!(S, i, "write_sxml", fname, string(", chans=", i))
end
return nothing
end
write_sxml(fname::String, C::GphysChannel) = write_sxml(fname, SeisData(C), chans=1)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 9925 | export rseis, wseis
# SeisIO file format version changes
# 0.54 2020-07-14 added Types: NodalChannel, NodalData, NodalLoc
# 0.53 2019-09-11 removed :i, :o from CoeffResp
# added :i, :o to MultiStageResp
# 0.52 2019-09-03 added Types: CoeffResp, MultiStageResp
# 0.51 2019-08-01 added :f0 to PZResp, PZResp64
# 0.50 all custom types can use write(); rseis, wseis no longer required
# String arrays and :misc are written in a completely different way
# Type codes for :misc changed
# deprecated BigFloat/BigInt support in :misc
# :n is no longer stored as a UInt32
# :x compression no longer automatic and changed from Blosc to lz4
# ======== File versions below 0.5 are no longer supported ===================
# 0.41 :loc, :resp are now custom types with their own io subroutines
# Julia version is no longer written to file
# (likely misidentified in file header as 0.50)
# 0.40 SeisData.id[i] no longer needs to be a length ≤ 15 ASCII string
# files have a new file TOC field: number of channels in each record
# 0.30 SeisData.loc[i] no longer is assumed to be length 5
# 0.20 First reliable file format
const TNames = Type[ EventChannel,
SeisChannel,
NodalChannel,
EventTraceData,
SeisData,
NodalData,
GenLoc,
GeoLoc,
UTMLoc,
XYLoc,
NodalLoc,
GenResp,
PZResp64,
PZResp,
CoeffResp,
MultiStageResp,
PhaseCat,
SeisEvent,
SeisHdr,
SeisPha,
SeisSrc,
SourceTime,
EQLoc,
EQMag ]
const TCodes = UInt32[ 0x20474330, # " GC0" EventChannel
0x20474331, # " GC1" SeisChannel
0x20474332, # " GC2" NodalChannel
0x20474430, # " GD0" EventTraceData
0x20474431, # " GD1" SeisData
0x20474432, # " GD2" NodalData
0x20495030, # " IP0" GenLoc
0x20495031, # " IP1" GeoLoc
0x20495032, # " IP2" UTMLoc
0x20495033, # " IP3" XYLoc
0x20495034, # " IP4" NodalLoc
0x20495230, # " IR0" GenResp
0x20495231, # " IR1" PZResp64
0x20495232, # " IR2" PZResp
0x20495233, # " IR3" CoeffResp
0x20495234, # " IR4" MultiStageResp
0x20504330, # " PC0" PhaseCat = Dict{String, SeisPha}
0x20534530, # " SE0" SeisEvent
0x20534830, # " SH0" SeisHdr
0x20535030, # " SP0" SeisPha
0x20535330, # " SS0" SeisSrc
0x20535430, # " ST0" SourceTime
0x45514c30, # "EQL0" EQLoc
0x45514d30 # "EQM0" EQMag
]
#= check:
L = length(TCodes)
u = reinterpret(UInt8, TCodes)
S = Array{String,1}(undef, L)
for i = 1:L
S[i] = reverse(join([Char(u[4*i-3]), Char(u[4*i-2]), Char(u[4*i-1]), Char(u[4*i])]))
end
should yield the character codes in the comments above
=#
# ===========================================================================
# Auxiliary file read functions
function read_rec(io::IO, ver::Float32, c::UInt32, b::UInt64)
i = 0
while i < length(TCodes)
i = i + 1
if c == getindex(TCodes, i)
if (ver < vSeisIO) && (c == 0x20474431)
return read_legacy(io, ver)
else
return read(io, getindex(TNames, i))
end
end
end
@warn("Non-SeisIO data at byte ", fastpos(io), "; skipped.")
fastseek(io, b)
return nothing
end
function build_file_list(patts::Union{String,Array{String,1}})
plist = String[]
if isa(patts, String)
if safe_isfile(patts)
return [patts]
else
plist = [patts]
end
else
plist = patts
end
file_list = String[]
for pat in plist
if safe_isfile(pat)
push!(file_list, pat)
else
append!(file_list, ls(pat))
end
end
return file_list
end
"""
rseis(fstr::String[, c::Array{Int64,1}=C, v::Integer=0, memmap::Bool=false])
Read SeisIO files matching file pattern ``fstr`` into memory. If an array of
record indices is passed to keyword c, only those record indices are read from
each file.
* Set v>0 to control verbosity.
* Set memmap=true to use memory mapping. Faster but potentially unsafe.
"""
function rseis(patts::Union{String,Array{String,1}};
c::Union{Int64,Array{Int64,1}} = Int64[],
memmap::Bool = false,
v::Integer = KW.v)
A = []
files = build_file_list(patts)
sbuf = Array{UInt8, 1}(undef, 65535)
chans = isa(c, Int64) ? [c] : c
ver = vSeisIO
L = zero(Int64)
nf = 0
for f in files
nf = nf + 1
io = memmap ? IOBuffer(Mmap.mmap(f)) : open(f, "r")
# All SeisIO files begin with "SEISIO"
if fastread(io, 6) != UInt8[0x53, 0x45, 0x49, 0x53, 0x49, 0x4f]
@warn string("Skipped ", f, ": not a SeisIO file!")
close(io)
continue
end
ver = fastread(io, Float32)
L = fastread(io, Int64)
C = read!(io, Array{UInt32,1}(undef, L))
B = read!(io, Array{UInt64,1}(undef, L))
# DND -- faster to avoid seek
@inbounds if isempty(chans)
(v > 0) && println("Reading ", L, " objects from ", f)
(v > 1) && println("C = ", C)
for n = 1:L
(v > 1) && println("Reading object with code ", repr(getindex(C, n)), " (", n, "/", L, ")")
R = read_rec(io, ver, getindex(C, n), getindex(B, n == L ? n : n+1))
push!(A, R)
(v > 1) && println("Read ", typeof(getindex(A, n)), " object (", n, "/", L, ")")
end
else
if minimum(chans) > L
(v > 0) && println("Skipped ", f, ": no matching record numbers.")
close(io)
continue
end
for n in chans
if n in 1:L
fastseek(io, getindex(B, n))
R = read_rec(io, ver, getindex(C, n), getindex(B, n == L ? n : n+1))
push!(A, R)
(v > 1) && println("Read ", typeof(last(A)), " object from ", f, ", bytes ", getindex(B, n), ":", ((n == L) ? fastpos(io) : getindex(B, n+1)))
else
(v > 0) && println(n > L ? "No" : "Skipped", " record ", n, " in ", f)
end
end
end
close(io)
end
(v > 0) && println("Processed ", nf, " files.")
return A
end
"""
wseis(fname, S)
Write SeisIO objects S to file. S can be a single object, multiple comma-delineated objects, or an array of objects.
"""
function wseis(fname::String, S...)
L = Int64(lastindex(S))
(L == zero(Int64)) && return nothing
C = zeros(UInt32, L) # Codes
B = zeros(UInt64, L) # Byte indices
ID = Array{UInt64,1}(undef, 0) # IDs
TS = Array{Int64,1}(undef, 0) # Start times
TE = Array{Int64,1}(undef, 0) # End times
P = Array{Int64,1}(undef, 0) # Parent object indices in C, B
# Buffer checks
checkbuf!(BUF.int64_buf, 8)
# open file for writing
io = open(fname, "w")
write(io, "SEISIO")
write(io, vSeisIO)
write(io, L)
p = fastpos(io)
skip(io, sizeof(C) + sizeof(B))
# Write all objects
i = 0
@inbounds while i < L
i = i + 1
setindex!(B, UInt64(fastpos(io)), i)
seis = getindex(S, i)
write(io, seis)
T = typeof(seis)
# store type code to C
j = 0
while j < length(TNames)
j = j + 1
if T == getindex(TNames, j)
setindex!(C, getindex(TCodes, j), i)
break
end
end
# Add to id, time index; log to notes
if T <: GphysChannel
push!(ID, hash(getfield(seis, :id)))
push!(P, i)
fs = getfield(seis, :fs)
if !isempty(seis.t)
t = getfield(seis, :t)
push!(TS, starttime(t, seis.fs))
push!(TE, fs == zero(Float64) ? t[end,2] : sum(t, dims=1)[2] +
round(Int64, sμ*lastindex(seis.x)/fs))
end
fwrite_note!(seis, "wseis", fname, "")
elseif T <: GphysData
append!(ID, hash.(getfield(seis, :id)))
append!(P, ones(Int64, seis.n).*i)
k = 0
ts = Array{Int64,1}(undef,seis.n)
te = similar(ts)
@inbounds while k < seis.n
k = k + 1
t = seis.t[k]
fs = seis.fs[k]
if !isempty(seis.t)
setindex!(ts, starttime(t, fs), i)
setindex!(te, fs == zero(Float64) ? t[end,2] : sum(t, dims=1)[2] +
round(Int64, sμ*lastindex(seis.x)/fs), i)
end
fwrite_note!(seis, k, "wseis", fname, "")
end
append!(TS, ts)
append!(TE, te)
elseif T in (SeisHdr, SeisSrc)
fwrite_note_quake!(seis, "wseis", fname, "")
end
end
# Write TOC.
# format: array of object types, array of byte indices
fastseek(io, p)
write(io, C)
write(io, B)
# File index added 2017-02-23; changed 2019-05-27
# index format: ID hash, TS, TE, P, positions
fastseekend(io)
b = zeros(Int64, 4)
if isempty(ID)
fill!(b, fastpos(io))
else
setindex!(b, Int64(fastpos(io)), 1)
write(io, ID)
setindex!(b, Int64(fastpos(io)), 2)
write(io, TS)
setindex!(b, Int64(fastpos(io)), 3)
write(io, TE)
setindex!(b, Int64(fastpos(io)), 4)
write(io, P)
end
write(io, b)
close(io)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4562 | function read_legacy(io::IO, ver::Float32)
Z = getfield(BUF, :buf)
L = getfield(BUF, :int64_buf)
# read begins ------------------------------------------------------
N = fastread(io, Int64)
checkbuf_strict!(L, 2*N)
fast_readbytes!(io, Z, 3*N)
c1 = copy(Z[1:N])
c2 = copy(Z[N+1:2*N])
y = code2typ.(getindex(Z, 2*N+1:3*N))
cmp = fastread(io) % Bool
read!(io, L)
nx = getindex(L, N+1:2*N)
cmp && checkbuf_8!(Z, maximum(nx))
ver < 0.5f0 && error("No legacy support for SeisIO file format version < 0.5")
# Get file creation time
fname = split(io.name)[2][1:end-1]
isfile(fname) || error(string("Can't stat file ", fname))
st = stat(fname)
t0 = st.ctime
if ver == 0.5f0
S = SeisData(N)
setfield!(S, :id, read_string_vec(io, Z))
setfield!(S, :name, read_string_vec(io, Z))
setfield!(S, :loc, InstrumentPosition[read(io, code2loctyp(getindex(c1, i))) for i = 1:N])
read!(io, S.fs)
read!(io, S.gain)
# Here is the hard part. For older files, we read a degenerate InstResp
R = InstrumentResponse[]
for i = 1:N
T = code2resptyp(getindex(c2, i))
(T == GenResp) && (push!(R, read(io, GenResp)); continue)
et = Complex{T == PZResp ? Float32 : Float64}
# skip :c, it was never used
fastskip(io, T == PZResp ? 4 : 8)
# read poles
np = fastread(io, Int64)
p = zeros(et, np)
read!(io, p)
# read zeros
nz = fastread(io, Int64)
z = zeros(et, nz)
read!(io, z)
# push to R
push!(R, T(p = p, z = z))
end
S.resp .= R
setfield!(S, :units, read_string_vec(io, Z))
setfield!(S, :src, read_string_vec(io, Z))
setfield!(S, :misc, [read_misc(io, Z) for i = 1:N])
setfield!(S, :notes, [read_string_vec(io, Z) for i = 1:N])
setfield!(S, :t, [read!(io, Array{Int64, 2}(undef, getindex(L, i), 2)) for i = 1:N])
setfield!(S, :x,
FloatArray[cmp ?
(fast_readbytes!(io, Z, getindex(nx, i)); Blosc.decompress(getindex(y,i), Z)) :
read!(io, Array{getindex(y,i), 1}(undef, getindex(nx, i))) for i = 1:N])
elseif ver < 0.53
#= Read process for 0.51 and 0.52 is identical; 0.52 added two Types <:
InstrumentResponse that v0.51 doesn't know about, so this is a safe
procedure =#
S = SeisData(N)
setfield!(S, :id, read_string_vec(io, Z))
setfield!(S, :name, read_string_vec(io, Z))
setfield!(S, :loc, InstrumentPosition[read(io, code2loctyp(getindex(c1, i))) for i = 1:N])
read!(io, S.fs)
read!(io, S.gain)
# Here is the hard part. For older files, we read a degenerate InstResp
R = InstrumentResponse[]
for i = 1:N
T = code2resptyp(getindex(c2, i))
if T in (PZResp, PZResp64, GenResp)
push!(R, read(io, T))
elseif T == MultiStageResp
K = fastread(io, Int64)
codes = fastread(io, K)
M = MultiStageResp(K)
A = Array{RespStage,1}(undef, 0)
for j = 1:K
c = codes[j]
if c == 0x03
units_out = String(fastread(io, fastread(io, Int64)))
units_in = String(fastread(io, fastread(io, Int64)))
M.i[j] = units_in
M.o[j] = units_out
if j == 2
M.i[1] = units_out
end
CR = CoeffResp(
read!(io, Array{Float64,1}(undef, fastread(io, Int64))),
read!(io, Array{Float64,1}(undef, fastread(io, Int64)))
)
push!(A, CR)
else
push!(A, read(io, code2resptyp(c)))
end
end
M.stage .= A
read!(io, M.fs)
read!(io, M.gain)
read!(io, M.fg)
read!(io, M.delay)
read!(io, M.corr)
read!(io, M.fac)
read!(io, M.os)
push!(R, M)
end
end
S.resp .= R
setfield!(S, :units, read_string_vec(io, Z))
for i = 1:N
if typeof(S.resp[i]) == MultiStageResp
K = length(S.resp[i].fs)
if K > 0
S.resp[i].o[1] = S.units[i]
end
end
end
setfield!(S, :src, read_string_vec(io, Z))
setfield!(S, :misc, [read_misc(io, Z) for i = 1:N])
setfield!(S, :notes, [read_string_vec(io, Z) for i = 1:N])
setfield!(S, :t, [read!(io, Array{Int64, 2}(undef, getindex(L, i), 2)) for i = 1:N])
setfield!(S, :x,
FloatArray[cmp ?
(fast_readbytes!(io, Z, getindex(nx, i)); Blosc.decompress(getindex(y,i), Z)) :
read!(io, Array{getindex(y,i), 1}(undef, getindex(nx, i))) for i = 1:N])
end
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 594 | export get_file_ver, set_file_ver
function set_file_ver(f::String, ver::Float32=vSeisIO)
isfile(f) || error("File not found!")
io = open(f, "a+")
seekstart(io)
String(fastread(io, 6)) == "SEISIO" || error("Not a SeisIO file!")
write(io, ver)
close(io)
return nothing
end
set_file_ver(f::String, ver::Float64) = set_file_ver(f, Float32(ver))
function get_file_ver(f::String)
isfile(f) || error("File not found!")
io = open(f, "r")
seekstart(io)
String(fastread(io, 6)) == "SEISIO" || error("Not a SeisIO file!")
ver = fastread(io, Float32)
close(io)
return ver
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2736 | export mseis!
function SeisData(U...)
S = SeisData()
for i = 1:length(U)
Y = getindex(U,i)
if typeof(Y) == SeisChannel
push!(S, Y)
elseif typeof(Y) <: GphysChannel
push!(S, convert(SeisChannel, Y))
elseif typeof(Y) == SeisData
append!(S, Y)
elseif typeof(Y) <: GphysData
append!(S, convert(SeisData, Y))
elseif typeof(Y) == SeisEvent
append!(S, convert(SeisData, getfield(Y, :data)))
else
@warn(string("Tried to join incompatible type into SeisData at arg ", i, "; skipped."))
end
end
return S
end
"""
mseis!(S::SeisData, U...)
Merge multiple SeisData structures at once. The first argument (merge target)
must be a SeisData structure. Subsequent structures can be any type T <: Union{GphysData, GphysChannel, SeisEvent}.
mseis!(C::GphysChannel, U...)
Merge all channels in U that match channel C into object C. To be merged, a
channel must match on fields `:id`, `:fs`, `:loc`, `:resp`, `:units`.
See also: `merge!`
"""
function mseis!(S1::SeisData, S...)
U = Union{GphysData, GphysChannel, SeisEvent}
L = Int64(length(S))
(L == 0) && return
(typeof(S1) == SeisData) || error("Target must be type SeisData!")
for i in 1:L
T = typeof(getindex(S, i))
if (T <: U) == false
@warn(string("Object of incompatible type passed to mseis! at ", i+1, "; skipped!"))
continue
end
if T == SeisData
append!(S1, getindex(S, i))
elseif T == EventTraceData
append!(S1, convert(SeisData, getindex(S, i)))
elseif T == SeisChannel
append!(S1, SeisData(getindex(S, i)))
elseif T == EventChannel
append!(S1, SeisData(convert(SeisChannel, getindex(S, i))))
elseif T == SeisEvent
append!(S1, convert(SeisData, getfield(getindex(S, i), :data)))
end
end
merge!(S1)
return S1
end
function mseis!(C::Y, S...) where Y<:GphysChannel
U = Union{GphysData, GphysChannel, SeisEvent}
for i in 1:length(S)
X = S[i]
T = typeof(X)
# Only merge compatible types
if (T <: U) == false
@warn(string("Object of incompatible type passed to mchan at ", i+1, "; skipped!"))
continue
end
# Only call merge_chan on channels with matching ID
if T <: GphysChannel
if channel_match(C, X, use_gain=false)
merge!(C, X)
end
elseif T <: GphysData
ID = getfield(X, :id)
for i in 1:X.n
D = getindex(X, i)
if channel_match(C, D, use_gain=false)
merge!(C, D)
end
end
elseif T == SeisEvent
X = X.data
for i in 1:X.n
D = getindex(X, i)
if channel_match(C, D, use_gain=false)
merge!(C, D)
end
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2734 | export note!, clear_notes!
# ============================================================================
# Annotation
# Adding a string to SeisData writes a note; if the string mentions a channel
# name or ID, the note is restricted to the given channels(s), else it's
# added to all channels
@doc """
note!(S::SeisData, i::Int64, s::String)
Append `s` to channel `i` of `S` and time stamp.
note!(S::SeisData, id::String, s::String)
As above for the first channel in `S` whose id is an exact match to `id`.
note!(S::SeisData, s::String)
Append `s` to `S.notes` and time stamp. If `txt` contains a channel name or ID, only the channel mentioned is annotated; otherwise, all channels are annotated.
See also: `clear_notes!`, `show_processing`, `show_src`, `show_writes`
""" note!
note!(S::T, i::Int64, s::String) where {T<:GphysData} = push!(S.notes[i], tnote(s))
function note!(S::GphysData, s::String)
J = [occursin(i, s) for i in S.name]
K = [occursin(i, s) for i in S.id]
j = findall(max.(J,K) .== true)
if !isempty(j)
[push!(S.notes[i], tnote(s)) for i in j]
else
tn = tnote(s)
for i = 1:S.n
push!(S.notes[i], tn)
end
end
return nothing
end
function note!(S::GphysData, id::String, s::String)
i = findid(id, S)
(i == 0) && error(string("id = ", id, " not found in S!"))
push!(S.notes[i], tnote(s))
return nothing
end
function note!(S::GphysData, chans::Union{UnitRange,Array{Int64,1}}, s::String)
tn = tnote(s)
for c in chans
push!(S.notes[c], tn)
end
return nothing
end
note!(S::GphysChannel, s::String) = push!(S.notes, tnote(s))
# DND, these methods prevent memory reuse
"""
clear_notes!(U::Union{SeisData,SeisChannel,SeisHdr})
Clear all notes from `U` and leaves a note about this.
clear_notes!(S::SeisData, i::Int64, s::String)
Clear all notes from channel `i` of `S` and leaves a note about this.
clear_notes!(S::SeisData, id::String, s::String)
As above for the first channel in `S` whose id is an exact match to `id`.
See also: `note!`, `show_processing`, `show_src`
"""
function clear_notes!(S::GphysData)
cstr = tnote("notes cleared.")
for i = 1:S.n
empty!(S.notes[i])
push!(S.notes[i], identity(cstr))
end
return nothing
end
function clear_notes!(S::GphysData, i::Int64)
empty!(S.notes[i])
push!(S.notes[i], tnote("notes cleared."))
return nothing
end
function clear_notes!(S::GphysData, id::String)
i = findid(id, S)
(i == 0) && error(string("id = ", id, " not found in S!"))
empty!(S.notes[i])
push!(S.notes[i], tnote("notes cleared."))
return nothing
end
clear_notes!(U::GphysChannel) = (U.notes = Array{String,1}(undef,1); U.notes[1] = tnote("notes cleared."); return nothing)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1174 | function fread_note!(S::GphysData, N::Array{Int64,1}, method::String, fmt::String, filestr::String, opts::String)
src_str = string(timestamp(), " ¦ +source ¦ ", method, "(S, \"", fmt, "\", \"", filestr, "\", ", opts, ")" )
for i in N
push!(S.notes[i], src_str)
end
return nothing
end
# Five arguments: S, N, method, fname, opts
function fwrite_note!(S::GphysData, i::Int64, method::String, fname::String, opts::String)
wstr = string(timestamp(), " ¦ write ¦ ", method, "(S", opts, ") ¦ wrote to file ", fname)
push!(S.notes[i], wstr)
return nothing
end
function fwrite_note!(C::GphysChannel, method::String, fname::String, opts::String)
wstr = string(timestamp(), " ¦ write ¦ ", method, "(C", opts, ") ¦ wrote to file ", fname)
push!(C.notes, wstr)
return nothing
end
proc_note!(S::GphysData, nn::Array{Int64, 1}, method::String, desc::String) = note!(S, nn, string("processing ¦ ", method, " ¦ ", desc))
proc_note!(S::GphysData, i::Int64, method::String, desc::String) = note!(S, i, string("processing ¦ ", method, " ¦ ", desc))
proc_note!(C::GphysChannel, method::String, desc::String) = note!(C, string("processing ¦ ", method, " ¦ ", desc))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2608 | export show_processing, show_src, show_writes
function print_log(notes::Array{String,1}, k::String)
mm = 60
println("")
pl = string("| Time | ", titlecase(k), k == "processing" ? " | Description |\n|:-----|:---------|:------------|\n" : " |\n|:-----|:---------|\n")
ee = true
for i = 1:length(notes)
nn = split(notes[i], " ¦ ", keepempty=true, limit=4)
(length(nn) < 3) && continue
L = lastindex(nn[3])
if nn[2] == k
(ee == true) && (ee = false)
func_str = (L > mm) ? (nn[3][firstindex(nn[3]):prevind(nn[3], mm)] * "…") : nn[3]
if k in ("processing", "write")
pl *= string("| ", nn[1], "|`", func_str, "`|", nn[4], "|\n")
else
pl *= string("| ", nn[1], "|`", func_str, "`|\n")
end
end
end
if ee
pl *= (k == "processing") ? "| | (none) | |\n" : "| | (none) |\n"
end
show(stdout, MIME("text/plain"), Markdown.parse(pl))
println("")
return nothing
end
"""
show_processing(S::GphysData)
show_processing(S::GphysData, i::Int64)
show_processing(C::GphysChannel)
Tabulate and print all processing steps in `:notes` to stdout in human-readable format.
See also: `show_src`, `show_writes`, `note!`, `clear_notes!`
"""
function show_processing(S::GphysData)
for i in 1:S.n
println("\nChannel ", i)
print_log(S.notes[i], "processing")
end
return nothing
end
show_processing(S::GphysData, i::Int) = print_log(S.notes[i], "processing")
show_processing(C::GphysChannel) = print_log(C.notes, "processing")
"""
show_src(S::GphysData)
show_src(S::GphysData, i::Int64)
show_src(C::GphysChannel)
Tabulate and print all data sources logged in `:notes` to stdout in human-readable format.
See also: `show_processing`, `show_writes`, `note!`, `clear_notes!`
"""
function show_src(S::GphysData)
for i in 1:S.n
println("\nChannel ", i)
print_log(S.notes[i], "+source")
end
return nothing
end
show_src(S::GphysData, i::Int) = print_log(S.notes[i], "+source")
show_src(C::GphysChannel) = print_log(C.notes, "+source")
"""
show_writes(S::GphysData)
show_writes(S::GphysData, i::Int64)
show_writes(C::GphysChannel)
Tabulate and print all data writes logged in `:notes` to stdout in human-readable format.
See also: `show_processing`, `show_src`, `note!`, `clear_notes!`
"""
function show_writes(S::GphysData)
for i in 1:S.n
println("\nChannel ", i)
print_log(S.notes[i], "write")
end
return nothing
end
show_writes(S::GphysData, i::Int) = print_log(S.notes[i], "write")
show_writes(C::GphysChannel) = print_log(C.notes, "write")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 783 | function hdr_hash(S::SeisData, i::Int64)
h = hash(zero(UInt64))
for f in (:id, :name, :loc, :fs, :gain, :resp, :units)
h = hash(getfield(S, f)[i], h)
end
return h
end
function track_hdr!(S::GphysData, hashes::Array{UInt64, 1}, fmt::String, fname::String, opts::String)
to_track = Array{Int64, 1}(undef, 0)
# Add new channels
L = length(hashes)
if S.n > L
append!(hashes, zeros(UInt64, S.n-L))
end
# Check existing channels
for i in 1:S.n
if hdr_hash(S, i) != hashes[i]
push!(to_track, i)
end
end
# note new meta source in to_track
note!(S, to_track, string( "+meta ¦ read_meta!(S, \"",
fmt, "\", \"",
fname, "\", ",
opts) )
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 491 | function track_src!(S::GphysData, j::Int64, nx::Array{Int64,1}, last_src::Array{Int64,1})
n = length(nx)
# Check existing channels for changes
for i in 1:n
if length(S.x[i]) > nx[i]
last_src[i] = j
nx[i] = length(S.x[i])
end
end
# Add new channels
if n < S.n
δn = S.n - n
append!(nx, zeros(Int64, δn))
append!(last_src, zeros(Int64, δn))
for i in n+1:S.n
nx[i] = length(S.x[i])
last_src[i] = j
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1626 | """
gapfill!(x t, fs; m::Bool=true)
y = gapfill(x, t, fs; m::Bool=true)
Fill gaps in `x`, sampled at `fs`, with gap indices given by `t[:,1]` and
gap lengths in μs given by `t[:,2]`.
Specify `m=false` to fill with NaNs; else, fill with the mean of non-NaN
values in `x`.
"""
function gapfill!(x::Array{T,1}, t::Array{Int64,2}, fs::Float64; m::Bool=true) where T<: Real
(fs == 0.0 || isempty(x)) && (return x)
nt = size(t,1)
ng = nt - (t[nt,2] == 0 ? 2 : 1)
(ng == 0) && (return x)
timegaps = t[2:end,2] # number of time gaps
mx = m ? mean(skipmissing(x)) : T(NaN) # mean or NaN
Δ = round(Int64, 1.0/(fs*μs)) # sampling interval in μs
# this always yields rows in w whose indices are correct output assignments
w = t_win(t, Δ)
nw = size(w,1)
broadcast!(-, w, w, w[1,1])
broadcast!(div, w, w, Δ)
broadcast!(+, w, w, 1)
nx = maximum(w)-minimum(w)+1
if minimum(timegaps) ≥ 0
resize!(x, nx)
for i = nw:-1:1
N = w[i,2]-w[i,1]+1
j = t[i,1]
copyto!(x, w[i,1], x, j, N)
if i > 1
fill_s = w[i-1,2]+1
fill_e = w[i,1]-1
x[fill_s:fill_e] .= mx
end
end
else
# Fix for issue #29
x1 = Array{T,1}(undef, nx)
# with negative gaps, source start indices are in t[:,1]
for i in 1:nw
N = w[i,2]-w[i,1]+1
j = t[i,1]
copyto!(x1, w[i,1], x, j, N)
if i > 1
fill_s = w[i-1,2]+1
fill_e = w[i,1]-1
(fill_e > fill_s) && (x1[fill_s:fill_e] .= mx)
end
end
resize!(x, nx)
copyto!(x, 1, x1, 1, nx)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6545 | export taper, taper!
# Create increasing part of a cosine taper
function mktaper!(W::Array{T,1}, L::Int64) where T<:Real
LL = 2*L
pp = 2.0*pi
@inbounds for i = 1:L
W[i] = 0.5*(1.0 - cos(pp*((i-1)/LL)))
end
return nothing
end
# Apply taper W to both ends of X
function taper_seg!(X::AbstractVector, W::Array{T,1}, L::Int64, μ::T; rev::Bool=false) where T<:Real
if rev == true
j = L
@inbounds for i = 1:L
X[j] = (X[j]-μ)*W[i] + μ
j -= 1
end
else
@inbounds for i = 1:L
X[i] = (X[i]-μ)*W[i] + μ
end
end
return nothing
end
# Taper a GphysChannel
@doc """
taper!(C[; t_max::Real=10.0, α::Real=0.05, N_min::Int64=10])
Cosine taper all time-series data in C. Tapers each segment of each channel
that contains at least `N_min` total samples.
taper!(S[; chans=CC, t_max::Real=10.0, α::Real=0.05, N_min::Int64=10])
Cosine taper each segment of time-series data in GphysChannel object C that
contains at least `N_min` total samples.
Does not modify irregularly-sampled data channels.
Keywords:
* `chans`: Only taper the specified channels.
* `N_min`: Data segments with N < N_min total samples are not tapered.
* `t_max`: Maximum taper edge in seconds.
* `α`: Taper edge area; as for a Tukey window, the first and last 100*α% of
samples in each window are tapered, up to `t_max` seconds of data.
See also: `DSP.Windows.tukey`
""" taper!
function taper!(C::GphysChannel; t_max::Real=10.0, α::Real=0.05, N_min::Int64=10)
if !(C.fs > 0.0)
return nothing
end
N_min = max(div(N_min,2), 0)
# detrend!(C)
# Reserve a window of length t_max*C.fs for the taper
L = round(Int, t_max*C.fs)
T = eltype(C.x)
W = Array{T,1}(undef, L)
# Determine window lengths of all segments
window_lengths = diff(C.t[:,1])
window_lengths[end] += 1
n_seg = length(window_lengths)
N_max = maximum(window_lengths)
# Sanity check, with requirement α ≤ 0.5
L_max = round(Int, t_max*C.fs)
α = min(α, 0.5)
L = min(L, round(Int, N_max*α))
mktaper!(W, L)
μ = T(mean(C.x))
# Begin tapering by segment
if n_seg == 1
Nx = length(C.x)
Xl = view(C.x, 1:L)
Xr = view(C.x, Nx-L+1:Nx)
taper_seg!(Xl, W, L, μ)
taper_seg!(Xr, W, L, μ, rev=true)
else
# Get taper lengths and segment indices
si = copy(C.t[1:n_seg, 1])
ei = copy(C.t[2:n_seg, 1]).-1
push!(ei, C.t[end,1])
ii = sortperm(window_lengths, rev=true)
si = si[ii]
ei = ei[ii]
Lw = window_lengths[ii]
for i = 1:n_seg
s = si[i]
t = ei[i]
L_tap = Lw[i]
L_tap < N_min && break
if L_tap < L
L = min(L_max, round(Int, L_tap*α))
resize!(W, L)
mktaper!(W, L)
end
X = view(C.x, s:t)
μ = T(sum(X)/(t-s+1))
Xl = view(C.x, s:s+L-1)
Xr = view(C.x, t-L+1:t)
taper_seg!(Xl, W, L, μ)
taper_seg!(Xr, W, L, μ, rev=true)
end
end
proc_note!(C, string("taper!(C, t_max = ",
t_max,
", ",
"α = ",
α,
", ",
"N_min = ",
N_min,
")"), "cosine taper by segment")
return nothing
end
# This approach leads to heinous-looking code but uses virtually no memory.
# I could probably clean it up by creating one main taper
# and passing/editing views into the taper.
function taper!(S::GphysData;
chans::ChanSpec=Int64[],
t_max::Real=10.0,
α::Real=0.05,
N_min::Int64=10)
if !any(getfield(S, :fs) .> 0.0)
return nothing
end
chans = mkchans(chans, S, keepirr=false)
proc_str = string("taper!(S, chans=", chans, ", t_max = ", t_max, ", ", "α = ", α, ", ", "N_min = ", N_min, ")")
α = min(α, 0.5)
N_min = max(div(N_min,2), 0)
T = unique([eltype(i) for i in S.x])
nT = length(T)
N_max = zeros(Int64, nT)
# Arrays of views for each data type; we'll store these and lengths L_taps
means = Array{Union{[Array{ty,1} for ty in T]...},1}(undef, nT)
L_taps = Array{Array{Int64,1},1}(undef, nT)
Xl = Array{Array{SubArray,1},1}(undef, nT)
Xr = similar(Xl)
for j = 1:nT
Xl[j] = Array{SubArray{T[j],1,Array{T[j],1},Tuple{StepRange{Int64,Int64}},true},1}(undef,0)
Xr[j] = similar(Xl[j])
L_taps[j] = Array{Int64,1}(undef, 0)
means[j] = Array{T[j],1}(undef, 0)
end
# Loop over channels, pushing views to the appropriate view array for eltype(S.x[i])
for i = 1:S.n
if i in chans
S.fs[i] <= 0.0 && continue
j = findfirst(T.==eltype(S.x[i]))
# Compute length
window_lengths = diff(S.t[i][:,1])
window_lengths[end] += 1
n_seg = length(window_lengths)
# Get taper lengths and segment indices
L_max = round(Int, t_max*S.fs[i])
L_seg = min.(L_max, round.(Int, window_lengths*α))
si = copy(S.t[i][1:n_seg, 1])
ei = copy(S.t[i][2:n_seg, 1]).-1
push!(ei, S.t[i][end,1])
μ = T[j](mean(S.x[i]))
for k = 1:length(L_seg)
push!(Xl[j], view(S.x[i], si[k]:si[k]+L_seg[k]-1))
push!(Xr[j], view(S.x[i], ei[k]-L_seg[k]+1:ei[k]))
push!(means[j], μ)
N_max[j] = max(N_max[j], L_seg[k])
end
append!(L_taps[j], L_seg)
end
end
# Loop over data type
for j = 1:nT
L = N_max[j]
W = Array{T[j],1}(undef, L)
mktaper!(W, L)
ii = sortperm(L_taps[j], rev=true)
Nw = L_taps[j][ii]
Xl[j] = Xl[j][ii]
Xr[j] = Xr[j][ii]
means[j] = means[j][ii]
# Loop over left & right X-views
for k = 1:length(Nw)
Nw[k] < N_min && break
# Note here: "break", not "continue", because we've sorted in reverse
# order; once our taper regions become shorter than N_min, we're done.
if Nw[k] < L
L = Nw[k]
resize!(W, L)
mktaper!(W, L)
end
taper_seg!(Xl[j][k], W, L, means[j][k])
taper_seg!(Xr[j][k], W, L, means[j][k], rev=true)
end
end
# Annotate
for i = 1:S.n
if i in chans
S.fs[i] == 0.0 && continue
proc_note!(S, i, proc_str, "cosine taper by segment")
end
end
return nothing
end
@doc (@doc taper!)
function taper(C::GphysChannel; t_max::Real=10.0, α::Real=0.05, N_min::Int64=10)
U = deepcopy(C)
taper!(U, t_max = t_max, α=α, N_min=N_min)
return U
end
function taper(S::GphysData;
chans::ChanSpec=Int64[],
t_max::Real=10.0,
α::Real=0.05,
N_min::Int64=10)
U = deepcopy(S)
taper!(U, chans=chans, t_max=t_max, α=α, N_min=N_min)
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2026 | export ungap, ungap!
@doc """
ungap!(S[, chans=CC, m=true, tap=false])
ungap(S[, chans=CC, m=true, tap=false])
Fill time gaps in each channel of S with the mean of the channel data.
ungap!(C[, m=true, tap=false])
ungap(C[, m=true, tap=false])
As above for GphysChannel object C.
### Keywords
* `chans=CC`: only ungap channels `CC`.
* `m=false`: this flag fills gaps with NaNs instead of the mean.
* `tap=true`: taper data before filling gaps.
!!! warning
If channel segments aren't in chronological order, call `merge` before using `ungap`.
""" ungap!
function ungap!(C::GphysChannel; m::Bool=true, tap::Bool=false)
if tap
taper!(C)
end
N = size(C.t,1)-2
(N < 0 || C.fs == 0) && return nothing
(N == 0 && C.t[2,2] == 0) && return nothing
gapfill!(C.x, C.t, C.fs, m=m)
proc_note!(C, string("ungap!(C, m = ", m, ", tap = ", tap, ")"),
string("filled ", N, " gaps (sum = ",
sum(C.t[2:end-1, 2]), " μs)"))
C.t = [C.t[1:1,:]; [length(C.x) 0]]
return nothing
end
@doc(@doc ungap!)
function ungap!(S::GphysData;
chans::ChanSpec=Int64[],
m::Bool=true,
tap::Bool=false)
chans = mkchans(chans, S, keepirr=false)
if tap
taper!(S, chans=chans)
end
for i in chans
N = size(S.t[i],1)-2
(N < 0 || S.fs[i] == 0) && continue
(N == 0 && S.t[i][2,2] == 0) && continue
gapfill!(S.x[i], S.t[i], S.fs[i], m=m)
proc_note!(S, i, string("ungap!(S, chans=", i, ", m = ", m,
", tap = ", tap, ")"),
string("filled ", N, " gaps (sum = ",
sum(S.t[i][2:end-1, 2]), " μs)"))
S.t[i] = [S.t[i][1:1,:]; [length(S.x[i]) 0]]
end
return nothing
end
@doc (@doc ungap!)
ungap(S::GphysChannel; m::Bool=true, tap::Bool=false) = (T = deepcopy(S); ungap!(T, m=m, tap=tap); return T)
@doc(@doc ungap!)
ungap(S::GphysData;
chans::ChanSpec=Int64[],
m::Bool=true,
tap::Bool=false) = (T = deepcopy(S); ungap!(T, chans=chans, m=m, tap=tap); return T)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5204 | export convert_seis, convert_seis!
@doc """
convert_seis!(S[, chans=CC, units_out=UU, v=V])
convert_seis(S, chans=CC, units_out=UU, v=V])
convert_seis!(C[, units_out=UU, v=V])
convert_seis(CC, units_out=UU, v=V)
Convert all seismic data channels in `S` to velocity seismograms, differentiating or integrating as needed.
### Keywords
* `units_out=UU` specifies output units.
+ Default: "m/s".
+ Allowed: "m", "m/s", or "m/s2". (SeisIO uses Unicode (UTF-8) UCUM units.)
* `v=V` sets verbosity.
* `chans=CC` restricts seismogram conversion to seismic data channels within `CC`.
+ `chans` can be an Integer, UnitRange, or Array{Int64,1}.
+ By default, all seismic data channels in `S` are converted (if needed).
+ This does not allow `convert_seis!` to work on non-seismic data.
!!! warning
`convert_seis!` becomes less reversible as seismograms lengthen, particularly at Float32 precision.
### References
[^1] Neumaier, A. (1974). "Rundungsfehleranalyse einiger Verfahren zur Summation endlicher Summen" [Rounding Error Analysis of Some Methods for Summing Finite Sums]. Zeitschrift für Angewandte Mathematik und Mechanik (in German). 54 (1): 39–51. doi:10.1002/zamm.19740540106.
""" convert_seis!
function convert_seis!(S::GphysData;
chans::ChanSpec=Int64[],
units_out::String="m/s",
v::Integer=KW.v)
# sanity check
if (units_out in ("m", "m/s", "m/s2")) == false
error("units_out must be in (\"m\", \"m/s\", \"m/s2\")!")
end
# get seismic data channels
chans = mkchans(chans, S, keepirr=false)
filt_seis_chans!(chans, S)
# now loop over all seismic channels
for i in chans
if v > 2
println(stdout, "Begin channel ", i)
end
# get units
units_in = lowercase(S.units[i])
units_in == units_out && continue
# differentiate or integrate
if v > 0
println(stdout, "Converting channel ", i, ": ", units_in, " => ", units_out)
end
T = eltype(S.x[i])
fs = T(S.fs[i])
δ = one(T)/fs
dt = round(Int64, 1.0/(S.fs[i]*μs))
d2 = div(dt, 2)
t = S.t[i]
Nt = size(t, 1) - 1
if units_out == "m/s"
# differentiate from m to m/s
if units_in == "m"
diff_x!(S.x[i], t[:,1], fs)
for j = 1:Nt
t[j,2] -= d2
end
# integrate from m/s2 to m/s
else
int_x!(S.x[i], t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
end
elseif units_out == "m"
int_x!(S.x[i], t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
if units_in == "m/s2"
int_x!(S.x[i], t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
end
else # units == "m/s2"
diff_x!(S.x[i], t[:,1], fs)
for j = 1:Nt
t[j,2] += d2
end
if units_in == "m"
diff_x!(S.x[i], t[:,1], fs)
for j = 1:Nt
t[j,2] += d2
end
end
end
# change units
S.units[i] = units_out
# log processing
proc_note!(S, i, string("convert_seis!(S, chans=", i,
", units_out = ", units_out, ")"),
string("converted units from ", units_in, " to ", units_out))
if v > 2
println(stdout, "Done channel ", i)
end
end
return nothing
end
function convert_seis!(C::GphysChannel;
units_out::String="m/s",
v::Integer=KW.v)
# sanity check
if (units_out in ("m", "m/s", "m/s2")) == false || (C.units in ("m", "m/s", "m/s2")) == false
error("units must be in (\"m\", \"m/s\", \"m/s2\")!")
end
units_in = lowercase(C.units)
units_in == units_out && return nothing
# differentiate or integrate
if v > 0
println(stdout, "Converting ", units_in, " => ", units_out)
end
T = eltype(C.x)
fs = T(C.fs)
δ = one(T)/fs
dt = round(Int64, 1.0/(C.fs*μs))
d2 = div(dt, 2)
t = C.t
Nt = size(t, 1) - 1
if units_out == "m/s"
# differentiate from m to m/s
if units_in == "m"
diff_x!(C.x, t[:,1], fs)
for j = 1:Nt
t[j,2] -= d2
end
# integrate from m/s2 to m/s
else
int_x!(C.x, t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
end
elseif units_out == "m"
int_x!(C.x, t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
if units_in == "m/s2"
int_x!(C.x, t[:,1], δ)
for j = 1:Nt
t[j,2] -= d2
end
end
else # units == "m/s2"
diff_x!(C.x, t[:,1], fs)
for j = 1:Nt
t[j,2] += d2
end
if units_in == "m"
diff_x!(C.x, t[:,1], fs)
for j = 1:Nt
t[j,2] += d2
end
end
end
# change units
C.units = units_out
# log processing
proc_note!(C, string("convert_seis!(C, units_out = ", units_out, ")"),
string("converted units from ", units_in, " to ", units_out))
return nothing
end
@doc (@doc convert_seis!)
function convert_seis(S::GphysData;
chans::ChanSpec=Int64[],
units_out::String="m/s",
v::Integer=KW.v)
U = deepcopy(S)
convert_seis!(U, chans=chans, units_out=units_out, v=v)
return U
end
function convert_seis(C::GphysChannel;
units_out::String="m/s",
v::Integer=KW.v)
U = deepcopy(C)
convert_seis!(U, units_out=units_out, v=v)
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 221 | function del_flagged!(S::GphysData, dflag::BitArray{1}, reason::String)
d = findall(dflag)
L = length(d)
if L > 0
@warn(string("Deleting (", reason, ")"), S.id[d])
deleteat!(S, d)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3775 | export demean!, demean, detrend!, detrend
@doc """
demean!(S::SeisData[; chans=CC, irr=false])
Remove the mean from all channels `i` with `S.fs[i] > 0.0`. Specify `irr=true`
to also remove the mean from irregularly sampled channels (with S.fs[i] == 0.0).
Specifying a channel list with `chans=CC` restricts processing to channels CC.
demean!(C::SeisChannel)
Remove the mean from data in `C`.
Ignores NaNs.
""" demean!
function demean!(S::GphysData;
chans::ChanSpec=Int64[],
irr::Bool=false)
if chans == Int64[]
chans = 1:S.n
end
for i in chans
(irr==false && S.fs[i]<=0.0) && continue
T = eltype(S.x[i])
K = findall(isnan.(S.x[i]))
if isempty(K)
L = length(S.x[i])
μ = T(sum(S.x[i]) / T(L))
for j = 1:L
S.x[i][j] -= μ
end
else
J = findall(isnan.(S.x[i]) .== false)
L = length(J)
μ = T(sum(S.x[i][J])/T(L))
for j in J
S.x[i][j] -= μ
end
end
proc_note!(S, i, string("demean!(S, chans=", i, ")"), "removed mean")
end
return nothing
end
function demean!(C::GphysChannel)
T = eltype(C.x)
K = findall(isnan.(C.x))
if isempty(K)
L = length(C.x)
μ = T(sum(C.x) / T(L))
for j = 1:L
C.x[j] -= μ
end
else
J = findall(isnan.(C.x) .== false)
L = length(J)
μ = T(sum(C.x[J])/T(L))
for j in J
C.x[j] -= μ
end
end
proc_note!(C, "demean!(C)", "removed mean")
return nothing
end
@doc (@doc demean!)
demean(S::GphysData,
chans::ChanSpec=Int64[],
irr::Bool=false) = (U = deepcopy(S); demean!(U, chans=chans, irr=irr); return U)
demean(C::GphysChannel) = (U = deepcopy(C); demean!(U); return U)
function dtr!(x::AbstractArray{T,1}, ti::Array{Int64,2}, fs::Float64, n::Int64) where T <: AbstractFloat
L = length(x)
# check for nans
nf = false
for i = 1:length(x)
if isnan(x[i])
nf = true
break
end
end
if nf
t = tx_float(ti, fs)
j = findall((isnan.(x)).==false)
x1 = x[j]
t1 = t[j]
p = n == 1 ? linreg(t1, x1) : polyfit(t1, x1, n)
else
if n == 1 && size(ti,1) == 2 && fs > 0.0
dt = 1.0/fs
p = linreg(x, dt)
v = zero(T)
for i = 1:length(x)
v = polyval(p, dt*i)
x[i] -= v
end
else
t = tx_float(ti, fs)
p = n == 1 ? linreg(t, x) : polyfit(t, x, n)
v = zero(T)
for i = 1:length(x)
v = polyval(p, t[i])
x[i] -= v
end
end
end
return p
end
@doc """
detrend!(S::SeisData[; chans=CC, n=1]))
Remove the linear trend from channels `CC`. Ignores NaNs.
To remove a higher-order polynomial fit than a linear trend, choose `n` >1.
detrend!(C::SeisChanel[; n=1]))
Remove the linear trend from data in `C`. Ignores NaNs.
To remove a higher-order polynomial fit than a linear trend, choose n>1.
!!! warning
detrend! does *not* check for data gaps; if this is problematic, call ungap!(S, m=true) first!
""" detrend!
function detrend!(S::GphysData;
chans::ChanSpec=Int64[],
n::Int64=1)
if chans == Int64[]
chans = 1:S.n
end
@inbounds for i in chans
p = dtr!(S.x[i], S.t[i], S.fs[i], n)
proc_note!(S, i, string("detrend!(S, chans=", i, ", n = ", n, ")"),
string("detrended order ", n, " polynomial; polyfit result = ", p))
end
return nothing
end
function detrend!(C::GphysChannel; n::Int64=1)
p = dtr!(C.x, C.t, C.fs, n)
proc_note!(C, string("detrend!(C, n = ", n, ")"),
string("detrended order ", n, " polynomial; polyfit result = ", p))
return nothing
end
@doc (@doc detrend!)
detrend(S::GphysData;
chans::ChanSpec=Int64[],
n::Int64=1) = (U = deepcopy(S); detrend!(U, chans=chans, n=n); return U)
detrend(C::GphysChannel; n::Int64=1) = (U = deepcopy(C); detrend!(U, n=n); return U)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4147 | export env!, env
@doc """
env!(S::GphysData[, chans=CC, v=V])
env(S::GphysData)
In-place conversion of S.x[i] ==> Env(S.x[i]) (≡ |H(S.x[i])|, where H denotes
the Hilbert transform).
### Keywords
* chans=CC: only process channels in CC (with fs > 0.0).
* v=V: verbosity.
""" env!
function env!(S::GphysData;
chans::ChanSpec=Int64[],
v::Integer=KW.v)
# preprocess data channels
chans = mkchans(chans, S, keepirr=false)
proc_str = string("env!(S, chans=", chans, ")")
# Get groups
GRPS = get_unique(S, ["eltype"], chans)
# Initialize Y
Nx = nx_max(S, chans)
(Nx == 0) && return
Y = Array{Float64,1}(undef, 2*max(Nx, 128))
# Arrays to hold data and windows
@inbounds for grp in GRPS
c = grp[1]
T = eltype(S.x[c])
R = reinterpret(Complex{T}, Y)
# Get views and window lengths of each segment
c2 = T(2)
(L, X) = get_views(S, grp)
nL = length(L)
nx = L[1]
n2 = div(nx, 2)
H = view(R, 1:nx)
v1 = view(R, 1:n2 + 1)
v2 = view(R, 2:n2 + (isodd(nx) ? 1 : 0))
v3 = view(R, n2+1:nx)
P = plan_rfft(X[1])
nx_last = nx
np = 0
for i = 1:nL
nx = L[i]
x = X[i]
# determine whether to update P, YS
too_short = false
# update P, nx, YS
if nx < 128 || nx != nx_last
# very short segments
if nx < 128
y = zeros(eltype(x), 128)
copyto!(y, 1, X[i], 1, nx)
X₀ = y
nx₀ = nx
x = view(y, :)
too_short = true
nx = length(x)
end
P = plan_rfft(x)
n2 = div(nx, 2)
H = view(R, 1:nx)
v1 = view(R, 1:n2 + 1)
v2 = view(R, 2:n2 + (isodd(nx) ? 1 : 0))
v3 = view(R, n2+1:nx)
end
# Compute envelope → adapted from DSP.hilbert for recycled H
fill!(v3, zero(Complex{T}))
mul!(v1, P, x)
broadcast!(*, v2, v2, c2)
ifft!(H)
# overwrite x
if too_short
copyto!(X[i], 1, x, 1, nx₀)
else
broadcast!(abs, x, H)
end
# update nx_last
nx_last = nx
end
# log processing to :notes
proc_note!(S, grp, proc_str, "replaced :x with abs(H(:x))")
end
return nothing
end
function env!(C::GphysChannel;
v::Integer=KW.v
)
C.fs == 0.0 && return
# Arrays to hold data and windows
if size(C.t, 1) == 2
C.x .= abs.(DSP.hilbert(C.x))
else
# Initialize R
T = eltype(C.x)
Nx = nx_max(C)
(Nx == 0) && return
R = Array{Complex{T}, 1}(undef, max(Nx, 128))
# Get views and window lengths of each segment
c2 = T(2)
(L, X) = get_views(C)
nL = length(L)
nx = L[1]
n2 = div(nx, 2)
H = view(R, 1:nx)
v1 = view(R, 1:n2 + 1)
v2 = view(R, 2:n2 + (isodd(nx) ? 1 : 0))
v3 = view(R, n2+1:nx)
P = plan_rfft(X[1])
nx_last = nx
np = 0
for i = 1:nL
nx = L[i]
x = X[i]
# determine whether to update P, YS
too_short = false
# update P, nx, YS
if nx < 128 || nx != nx_last
# very short segments
if nx < 128
y = copyto!(zeros(eltype(x), 128), x)
X₀ = y
nx₀ = nx
nx = 128
x = view(y, :)
too_short = true
end
P = plan_rfft(x)
n2 = div(nx, 2)
H = view(R, 1:nx)
v1 = view(R, 1:n2 + 1)
v2 = view(R, 2:n2 + (isodd(nx) ? 1 : 0))
v3 = view(R, n2+1:nx)
end
# Compute envelope → adapted from DSP.hilbert for recycled H
fill!(v3, zero(Complex{T}))
mul!(v1, P, x)
broadcast!(*, v2, v2, c2)
ifft!(H)
# overwrite x
if too_short
copyto!(X[i], 1, x, 1, nx₀)
else
broadcast!(abs, x, H)
end
# update nx_last
nx_last = nx
end
end
# log processing to :notes
proc_note!(C, "env!(C)", "replaced :x with abs(H(:x))")
return nothing
end
@doc (@doc env!)
function env(S::GphysData;
chans::ChanSpec=Int64[],
v::Integer=KW.v
)
U = deepcopy(S)
env!(U, chans=chans, v=v)
return U
end
function env(C::GphysChannel;
v::Integer=KW.v
)
U = deepcopy(C)
env!(U, v=v)
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8355 | import DSP:filtfilt
export filtfilt, filtfilt!
#= Regenerate filter; largely identical to DSP.Filters.filt_stepstate with
some optimization for SeisIO data handling =#
function update_filt(fl::T, fh::T, fs::T, np::Int64, rp::Int, rs::Int, rt::String, dm::String) where T<:Real
# response type
if rt == "Highpass"
ff = Highpass(fl, fs=fs)
elseif rt == "Lowpass"
ff = Lowpass(fh, fs=fs)
else
ff = getfield(DSP.Filters, Symbol(rt))(fl, fh, fs=fs)
end
# design method
if dm == "Elliptic"
zp = Elliptic(np, rp, rs)
elseif dm == "Chebyshev1"
zp = Chebyshev1(np, rp)
elseif dm == "Chebyshev2"
zp = Chebyshev2(np, rs)
else
zp = Butterworth(np)
end
# polynomial ratio
pr = convert(PolynomialRatio, digitalfilter(ff, zp))
# create and scale coeffs
a = coefa(pr)
b = coefb(pr)
scale_factor = a[1]
(scale_factor == 1.0) || (r = 1.0/scale_factor; rmul!(a, r); rmul!(b, r))
# size
bs = length(b)
as = length(a)
sz = max(bs, as)
# Pad the coefficients with zeros if needed
bs < sz && (b = copyto!(zeros(Float64, sz), b))
as < sz && (a = copyto!(zeros(Float64, sz), a))
# construct the companion matrix A and vector B:
A = [-a[2:sz] [I; zeros(Float64, 1, sz-2)]]
B = b[2:sz] - a[2:sz] * b[1]
# Solve for Z: (I - A)*si = B
Z = scale_factor \ (I - A) \ B
p = 3*(sz-1)
return (b, a, Z, p)
end
#= Adapted from Julia DSP filtfilt for how SeisIO stores data; X and its
padded, interpolated version (Y) can be reused until fs or length(x)
changes =#
function zero_phase_filt!(X::AbstractArray,
Y::AbstractArray,
b::Array{T,1},
a::Array{T,1},
zi::Array{T,1},
p::Int64) where T<:Real
nx = length(X)
z_copy = copy(zi)
# Extrapolate X into Y
j = p
@inbounds for i = 1:nx
j += 1
Y[j] = X[i]
end
y = 2*first(X)
j = 2+p
@inbounds for i = 1:p
j -= 1
Y[i] = y - X[j]
end
y = 2*X[nx]
j = nx
k = nx+p
@inbounds for i = 1:p
j -= 1
k += 1
Y[k] = y - X[j]
end
# Filtering
mul!(z_copy, zi, first(Y))
filt!(Y, b, a, Y, z_copy)
reverse!(Y)
mul!(z_copy, zi, first(Y))
filt!(Y, b, a, Y, z_copy)
j = length(Y)-p+1
@inbounds for i = 1:nx
j -= 1
X[i] = Y[j]
end
return nothing
end
function do_filtfilt!(X::AbstractArray,
Y::AbstractArray,
yview::AbstractArray,
L::Int64,
last_L::Int64,
b::Array{T,1},
a::Array{T,1},
zi::Array{T,1},
p::Int64) where T<:Real
too_short::Bool = false
if L < 3*(2*p) # effecive filter order doubles for a zero-phase filter
L₀ = L
L = 6*p
x = copyto!(zeros(eltype(X), L), X)
X₀ = X
X = view(x, :)
too_short = true
end
if L != last_L
# condition to update filter
yview = view(Y, 1 : L+2*p)
last_L = L
end
fill!(yview, zero(eltype(X)))
# Zero-phase filter in X using Y
zero_phase_filt!(X, yview, b, a, zi, p)
if too_short
copyto!(X₀, 1, X, 1, L₀)
end
return nothing
end
@doc """
filtfilt!(S::GphysData[; KWs])
Apply zero-phase filter to S.x.
filtfilt!(C::GphysChannel[; KWs])
Apply zero-phase filter to C.x
Keywords control filtering behavior; specify as e.g. filtfilt!(S, fl=0.1, np=2, rt="Lowpass").
### Keywords
| Name | Default | Type | Description |
|:------|:--------------|:--------|:------------------------------------|
| chans | (all) | [^1] | channel numbers to filter |
| fl | 1.0 | Float64 | lower corner frequency [Hz] [^2] |
| fh | 15.0 | Float64 | upper corner frequency [Hz] [^2] |
| np | 4 | Int64 | number of poles |
| rp | 10 | Int64 | pass-band ripple (dB) |
| rs | 30 | Int64 | stop-band ripple (dB) |
| rt | "Bandpass" | String | response type (type of filter) |
| dm | "Butterworth" | String | design mode (name of filter) |
[^1]: Allowed types are Integer, UnitRange, and Array{Int64, 1}.
[^2]: By convention, the lower corner frequency (fl) is used in a Highpass
filter, and fh is used in a Lowpass filter.
See also: DSP.jl documentation
""" filtfilt!
function filtfilt!(S::GphysData;
chans::ChanSpec=Int64[],
fl::Float64=KW.Filt.fl,
fh::Float64=KW.Filt.fh,
np::Int=KW.Filt.np,
rp::Int=KW.Filt.rp,
rs::Int=KW.Filt.rs,
rt::String=KW.Filt.rt,
dm::String=KW.Filt.dm
)
isempty(S) && return nothing
chans = mkchans(chans, S, keepirr=false)
proc_str = string("filtfilt!(S, chans=", chans, ", fl = ", fl,
", fh = ", fh,
", np = ", np,
", rp = ", rp,
", rs = ", rs,
", rt = ", rt,
", dm = ", dm, ")")
desc_str = string("convolved :x with a ", np, "-pole ", dm, " ", rt, " filter")
N = nx_max(S, chans)
(N == 0) && return
# Determine array structures
T = unique([eltype(i) for i in S.x])
nT = length(T)
sz = 0
yy = Any
for i = 1:nT
zz = sizeof(T[i])
if zz > sz
yy = T[i]
sz = zz
end
end
b, a, zi, p = update_filt(yy(fl), yy(fh), yy(maximum(S.fs)), np, rp, rs, rt, dm)
Y = Array{yy,1}(undef, max(N, 6*p) + 2*p) # right value for Butterworth
# Get groups
GRPS = get_unique(S, ["fs", "eltype"], chans)
for grp in GRPS
# get fs, eltype
c = grp[1]
ty = eltype(S.x[c])
fs = ty(S.fs[c])
# reinterpret Y if needed
if ty != eltype(Y)
Y = reinterpret(ty, isa(Y, Base.ReinterpretArray) ? Y.parent : Y)
end
# Get views and window lengths of each segment
(L,X) = get_views(S, grp)
nL = length(L)
# Initialize filter
b, a, zi, p = update_filt(ty(fl), ty(fh), fs, np, rp, rs, rt, dm)
# Place the first copy outside the loop as we expect many cases where nL=1
nx = first(L)
yview = view(Y, 1 : nx+2*p)
# Use nx_last to track changes
nx_last = nx
# Loop over (rest of) views
for i = 1:nL
do_filtfilt!(X[i], Y, yview, L[i], nx_last, b, a, zi, p)
end
proc_note!(S, grp, proc_str, desc_str)
end
return nothing
end
function filtfilt!(C::GphysChannel;
fl::Float64=KW.Filt.fl,
fh::Float64=KW.Filt.fh,
np::Int=KW.Filt.np,
rp::Int=KW.Filt.rp,
rs::Int=KW.Filt.rs,
rt::String=KW.Filt.rt,
dm::String=KW.Filt.dm
)
N = nx_max(C)
(N == 0) && return
# Determine array structures
ty = eltype(C.x)
# Initialize filter
b, a, zi, p = update_filt(ty(fl), ty(fh), ty(C.fs), np, rp, rs, rt, dm)
Y = Array{ty,1}(undef, max(N, 6*p) + 2*p)
# Get views
if size(C.t,1) == 2
L = length(C.x)
do_filtfilt!(C.x, Y, view(Y,1:L+2*p), L, L, b, a, zi, p)
else
(L,X) = get_views(C)
nL = length(L)
nx = first(L)
yview = view(Y, 1 : nx+2*p)
# Use nx_last to track changes
nx_last = nx
# Loop over (rest of) views
for i = 1:nL
do_filtfilt!(X[i], Y, yview, L[i], nx_last, b, a, zi, p)
end
end
proc_str = string("processing ¦ filtfilt!(C, fl = ", fl,
", fh = ", fh,
", np = ", np,
", rp = ", rp,
", rs = ", rs,
", rt = ", rt,
", dm = ", dm, ")")
desc_str = string("convolved :x with a ", np, "-pole ", dm, " ", rt, " filter")
proc_note!(C, proc_str, desc_str)
return nothing
end
@doc (@doc filtfilt!)
filtfilt(S::GphysData;
fl::Float64=KW.Filt.fl,
fh::Float64=KW.Filt.fh,
np::Int=KW.Filt.np,
rp::Int=KW.Filt.rp,
rs::Int=KW.Filt.rs,
rt::String=KW.Filt.rt,
dm::String=KW.Filt.dm
) = (
U = deepcopy(S);
filtfilt!(U, fl=fl, fh=fh, np=np, rp=rp, rs=rs, rt=rt, dm=dm);
return U
)
filtfilt(C::GphysChannel;
fl::Float64=KW.Filt.fl,
fh::Float64=KW.Filt.fh,
np::Int=KW.Filt.np,
rp::Int=KW.Filt.rp,
rs::Int=KW.Filt.rs,
rt::String=KW.Filt.rt,
dm::String=KW.Filt.dm
) = (
D = deepcopy(C);
filtfilt!(D, fl=fl, fh=fh, np=np, rp=rp, rs=rs, rt=rt, dm=dm);
return D
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 833 | export nanfill!
# replace NaNs with the mean
function nanfill!(x::Array{T,1}) where T<: Real
J = findall(isnan.(x))
if !isempty(J)
if length(J) == length(x)
fill!(x, zero(T))
else
x[J] .= T(mean(findall(isnan.(x).==false)))
end
end
return length(J)
end
"""
nanfill!(S::SeisData)
nanfill!(C::SeisChannel)
Replace NaNs in `:x` with mean of non-NaN values.
"""
function nanfill!(S::GphysData)
for i = 1:S.n
if !isempty(S.x[i])
nn = nanfill!(S.x[i])
if nn > 0
proc_note!(S, i, "nanfill!(S)", "replaced NaNs with the mean of all non-NaN values")
end
end
end
return nothing
end
function nanfill!(C::GphysChannel)
nn = nanfill!(C.x)
if nn > 0
proc_note!(C, "nanfill!(C)", "replaced NaNs with the mean of all non-NaN values")
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2847 | import DSP.resample
export resample, resample!
function cheap_resample!(t::Array{Int64, 2}, x::FloatArray, fs_new::Float64, fs_old::Float64)
r = fs_new/fs_old
n_seg = size(t,1)-1
gap_inds = zeros(Int64, n_seg+1)
# resize S.x if we're upsampling
if (r > 1.0)
resize!(x, ceil(Int64, length(x)*r))
end
for k = n_seg:-1:1
# indexing
si = t[k,1]
ei = t[k+1,1] - (k == n_seg ? 0 : 1)
nx_in = ei-si+1
xr = DSP.resample(x[si:ei], r)
# indexing
nx_out = min(floor(Int64, nx_in*r), length(xr))
gap_inds[k+1] = nx_out
# resample and copy
copyto!(x, si, xr, 1, nx_out)
# resize S.x if we downsampled
(fs_new < fs_old) && (deleteat!(x, si+nx_out:ei))
end
for k = 2:n_seg+1
gap_inds[k] += gap_inds[k-1]
end
copyto!(t, 2, gap_inds, 2, n_seg)
# ensure length(S.x[i]) == S.t[i][end,1] if upsampled
(r > 1.0) && resize!(x, t[end, 1])
return nothing
end
@doc """
resample!(S::SeisData [, chans=CC, fs=FS])
resample(S::SeisData [, chans=CC, fs=FS])
Resample data in S to `FS`. If keyword `fs` is not specified, data are resampled
to the lowest non-zero value in `S.fs[CC]`.Note that a poor choice of `FS` can
lead to upsampling and other undesirable behavior.
Use keyword `chans=CC` to only resample channel numbers `CC`. By default, all
channels `i` with `S.fs[i] > 0.0` are resampled.
resample!(C::SeisChannel, fs::Float64)
resample(C::SeisChannel, fs::Float64)
Resample `C.x` to `fs`.
""" resample!
function resample!(S::GphysData;
chans::ChanSpec=Int64[],
fs::Float64=0.0)
chans = mkchans(chans, S, keepirr=false)
f0 = fs == 0.0 ? minimum(S.fs[S.fs .> 0.0]) : fs
proc_str = string("resample!(S, chans=", chans, ", fs=",
repr("text/plain", f0, context=:compact=>true), ")")
for i = 1:S.n
(S.fs[i] == 0.0) && continue
(S.fs[i] == f0) && continue
cheap_resample!(S.t[i], S.x[i], f0, S.fs[i])
desc_str = string("resampled from ", S.fs[i], " to ", f0, "Hz")
proc_note!(S, i, proc_str, desc_str)
S.fs[i] = f0
end
return nothing
end
function resample!(C::GphysChannel, f0::Float64)
C.fs > 0.0 || error("Can't resample non-timeseries data!")
(C.fs == f0) && return nothing
@assert f0 > 0.0
proc_str = string("resample!(C, fs=",
repr("text/plain", f0, context=:compact=>true), ")")
cheap_resample!(C.t, C.x, f0, C.fs)
desc_str = string("resampled from ", C.fs, " to ", f0, "Hz")
proc_note!(C, proc_str, desc_str)
C.fs = f0
return nothing
end
@doc (@doc resample!)
function resample(S::GphysData;
chans::ChanSpec=Int64[],
fs::Float64=0.0)
U = deepcopy(S)
resample!(U, chans=chans, fs=fs)
return U
end
function resample(C::GphysChannel, f0::Float64)
U = deepcopy(C)
resample!(U, f0)
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2763 | export rescale, rescale!
@doc """
rescale!(S::GphysData, g::Float64; chans=CC)
rescale(S, g; chans=CC)
Rescale all channels of S to gain `g`. By default, all channels are rescaled.
rescale!(S::GphysData; c=c, chans=CC)
rescale(S; c=c, chans=CC)
Change `S` to use `S.gain[c]` for all channels. Rescales data as needed. By default, `c=1`.
rescale!(St::GphysData, Ss::GphysData)
rescale(St, Ss)
Rescale data in `St.x` to `Ss.gain` using channel ID matching; also changes `St.gain`.
rescale!(C::GphysChannel, g::Float64)
rescale(C, g)
Rescale `C.x` to gain `g` and set `C.gain = g`.
rescale!(Ct::GphysChannel, Cs::GphysChannel)
rescale(Ct, Cs)
Rescale data in `Ct.x` to `Cs.gain` and change `Ct.gain`. ID fields must match.
"""
function rescale!(S::GphysData, gain::Float64; chans::ChanSpec=Int64[])
CC = mkchans(chans, S, keepirr=false)
GAIN = getfield(S, :gain)
X = getfield(S, :x)
for i in CC
T = eltype(X[i])
scalefac = T(gain / getindex(GAIN, i))
if scalefac != one(T)
rmul!(getindex(X, i), scalefac)
end
GAIN[i] = gain
end
return nothing
end
function rescale!(S_targ::GphysData, S_src::GphysData)
N = getfield(S_targ, :n)
IT = getfield(S_targ, :id)
IS = getfield(S_src, :id)
GT = getfield(S_targ, :gain)
GS = getfield(S_src, :gain)
X = getfield(S_targ, :x)
for i in 1:N
id = getindex(IT, i)
j = findid(id, IS)
(j == 0) && continue
g_new = getindex(GS, j)
g_old = getindex(GT, i)
if isapprox(g_new, g_old) == false
x = getindex(X, i)
T = eltype(x)
rmul!(x, T(g_new/g_old))
GT[i] = g_new
end
end
return nothing
end
rescale!(S::GphysData; c::Int=1, chans::ChanSpec=Int64[]) = rescale!(S, S.gain[c], chans=chans)
function rescale!(Ct::GphysChannel, Cs::GphysChannel)
(Ct.id == Cs.id) || error("ID mismatch!")
gt = Ct.gain
gs = Cs.gain
if gt != gs
T = eltype(Ct.x)
rmul!(Ct.x, T(gs/gt))
Ct.gain = gs
end
return nothing
end
function rescale!(C::GphysChannel, gt::Float64)
gs = C.gain
if gs != gt
T = eltype(C.x)
rmul!(C.x, T(gs/gt))
C.gain = gt
end
return nothing
end
@doc (@doc rescale!)
function rescale(S::GphysData, gain::Float64; chans::ChanSpec=Int64[])
U = deepcopy(S)
rescale!(U, gain, chans=chans)
return U
end
function rescale(S_targ::GphysData, S_src::GphysData)
U = deepcopy(S_targ)
rescale!(U, S_src)
return U
end
function rescale(S::GphysData; c::Int=1, chans::ChanSpec=Int64[])
U = deepcopy(S)
rescale!(U, U.gain[c], chans=chans)
return U
end
function rescale(Ct::GphysChannel, Cs::GphysChannel)
U = deepcopy(Ct)
rescale!(U, Cs)
return U
end
rescale(C::GphysChannel, g::Float64) = (U = deepcopy(C); rescale!(U, g); return U)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8610 | export sync, sync!
function get_sync_t(s::Union{String,DateTime}, t::Array{Int64,1})
isa(s, DateTime) && return floor(Int64, d2u(s)*sμ)
if s == "first"
return minimum(t)
elseif s == "last"
return maximum(t)
else
return floor(Int64, d2u(DateTime(s))*sμ)
end
end
# # still used for irregular data
function get_sync_inds(t::AbstractArray{Int64,1}, Ω::Bool, t₀::Int64, t₁::Int64)
if Ω == true
return union(findall(t.<t₀), findall(t.>t₁))
else
return findall(t.<t₀)
end
end
function get_del_ranges(xi::Array{Int64, 2}, nx::Int64)
nw = size(xi, 1)
x_del = Array{UnitRange, 1}(undef, 0)
sizehint!(x_del, nw+1)
# Does first row start at 1?
if xi[1] != 1
push!(x_del, UnitRange(1, xi[1]-1))
end
# Any gaps in rows 2:n-1
for i in 2:nw
if xi[i,1] - xi[i-1,2] > 1
push!(x_del, UnitRange(xi[i-1,2]+1, xi[i,1]-1))
end
end
# Does last row end at nx?
if xi[nw,2] < nx
push!(x_del, UnitRange(xi[nw,2]+1, nx))
end
return x_del
end
#=
OUTPUTS
xi start and end indices of X to keep
W truncated time windows
=#
function sync_t(t::Array{Int64, 2}, Δ::Int64, t_min::Int64, t_max::Int64)
(t_max == 0) && (t_max = typemax(Int64))
xi = x_inds(t)
W = t_win(t, Δ)
nw = size(W, 1)
wi = zeros(Int64, nw, 2)
wk = Array{Bool, 1}(undef, nw)
fill!(wk, true)
for i in 1:nw
if (W[i,2] < t_min) || (W[i,1] > t_max)
wk[i] = false
continue
end
# Find last index ≤ t_max
if W[i,2] > t_max
k = xi[i,2]
v = W[i,2]
while v > t_max
v -= Δ
k -= 1
end
W[i,2] = v
xi[i,2] = k
end
# Find first index ≥ t_min
if W[i,1] < t_min
j = xi[i,1]
v = W[i,1]
while v < t_min
j += 1
v += Δ
end
W[i,1] = v
xi[i,1] = j
end
end
# eliminated windows
if minimum(wk) == false
W = W[wk, :]
xi = xi[wk, :]
end
return xi, W
end
# ==========
@doc """
sync!(S::GphysData)
Synchronize the start times of all data in S to begin at or after the last
start time in S.
sync!(S[, s=TS, t=TT, pad=false, v=V])
Synchronize all data in S to start no earlier than `TS` and terminate no later
than `TT`, with verbosity level `V`.
By default, a channel with mean `μᵢ = mean(S.x[i])` that begins after `TS` is
prepended with `μᵢ` to begin exactly at `TS`; similarly, if keyword `t` is used,
`μᵢ` is appended so that data ends at `TT`. If `pad=false`, channels that begin
after `TS` or end before `TT` are not extended in either direction.
For regularly-sampled channels, gaps between the specified and true times
are filled with the mean; this isn't possible with irregularly-sampled data.
#### Specifying start time (`s=`)
* s="last": (Default) sync to the last start time of any channel in `S`.
* s="first": sync to the first start time of any channel in `S`.
* A numeric value is treated as an epoch time (`?time` for details).
* A DateTime is treated as a DateTime. (see Dates.DateTime for details.)
* Any string other than "last" or "first" is parsed as a DateTime.
#### Specifying end time (`t=``)
* t="none": (Default) end times are not synchronized.
* t="last": synchronize all channels to end at the last end time in `S`.
* t="first" synchronize to the first end time in `S`.
* numeric, datetime, and non-reserved strings are treated as for `s=`.
See also: `TimeSpec`, `Dates.DateTime`, `parsetimewin`
!!! warning
`sync!` calls `prune!`; empty channels will be deleted.
""" sync!
function sync!(S::GphysData;
s::Union{String,DateTime}="last",
t::Union{String,DateTime}="none",
pad::Bool=true,
v::Integer=KW.v,
)
# Delete empty traces
prune!(S) # delete empty channels
S.n == 0 && return nothing # pointless to continue
do_end = t=="none" ? false : true
proc_str = string("sync!(S, s = \"", s, "\", t = \"", t, "\")")
# Do not edit order of operations -------------------------------------------
start_times = zeros(Int64, S.n)
if do_end
end_times = zeros(Int64, S.n)
end
fs = S.fs
irr = falses(S.n)
z = zero(Int64)
# (1) Determine start and end times
for i = 1:S.n
start_times[i] = starttime(S.t[i], S.fs[i])
if do_end
end_times[i] = endtime(S.t[i], S.fs[i])
end
if fs[i] == 0.0
irr[i] = true
end
end
# (2) Determine earliest start and latest end
t_start = get_sync_t(s, start_times)
t_end = 0
t_str = "none"
if do_end
t_end = get_sync_t(t, end_times)
(t_end > t_start) || error("No time overlap with given start & end times!")
t_str = string(u2d(t_end*μs))
if v > 0
@info(@sprintf("Synchronizing %.2f seconds of data\n", (t_end - t_start)*μs))
if v > 1
@info(string("t_start = ", u2d(t_start*μs)))
@info(string("t_end = ", t_str))
end
end
elseif v > 0
@info(string("Synchronizing to start at ", u2d(t_start*μs)))
end
# (3) Synchronization to t_start (and maybe t_end)
dflag = falses(S.n)
for i = 1:S.n
# non-timeseries data
if fs[i] == 0.0
t = view(S.t[i], :, 2)
nt = length(t)
k = get_sync_inds(t, do_end, t_start, t_end)
nk = length(k)
if nk ≥ nt
dflag[i] = true
proc_note!(S, i, proc_str, "synchronize, :x unchanged")
continue
else
proc_note!(S, i, proc_str, string("synchronize, deleted ", nk, " samples from :x"))
end
deleteat!(S.x[i], k)
ti = collect(1:nt)
deleteat!(ti, k)
S.t[i] = S.t[i][ti,:]
S.t[i][:,1] .= 1:length(S.x[i])
# timeseries data
else
sync_str = Array{String, 1}(undef, 0)
desc_str = ""
# truncate X to values within bounds
Δ = round(Int64, sμ/fs[i])
(xi, W) = sync_t(S.t[i], Δ, t_start, t_end)
if isempty(W)
dflag[i] = true
continue
else
nx = length(S.x[i])
x_del = get_del_ranges(xi, nx)
nr = size(x_del, 1)
for j in nr:-1:1
if last(x_del[j]) == nx
resize!(S.x[i], first(x_del[j])-1)
else
deleteat!(S.x[i], x_del[j])
end
end
#= length 0 traces _can_ happen with resampled :x
where (lx*f_rat) < 0.5; requries short series of
high-frequency data resampled to too-low fs
=#
if length(S.x[i]) == 0
dflag[i] = true
continue
end
if length(S.x[i]) < nx
push!(sync_str, string("deleted ", nx-length(S.x[i]), " samples from :x"))
end
# prepend points to time series data that begin late
T = eltype(S.x[i])
μ = T(mean(S.x[i]))
ni = div(start_times[i] - t_start, Δ)
sort_segs!(W)
if (ni > 0) && (pad == true)
prepend!(S.x[i], ones(T, ni).*μ)
W[1] -= ni*Δ
# logging
push!(sync_str, string("prepended ", ni, " samples to :x."))
end
# append points to time series data that end early
if do_end
nj = div(t_end - W[end], Δ)
if (nj > 0) && (pad == true)
nx = length(S.x[i])
resize!(S.x[i], nx+nj)
V = view(S.x[i], nx+1:nx+nj)
fill!(V, μ)
W[end] += nj*Δ
# logging
push!(sync_str, string("appended ", nj, " samples to :x."))
end
end
# last step
S.t[i] = w_time(W, Δ)
# logging
if length(sync_str) > 0
desc_str = string(", ", join(sync_str, ";"))
end
proc_note!(S, i, proc_str, desc_str)
end
end
end
del_flagged!(S, dflag, "length 0 after sync")
return nothing
end
function sync!(C::SeisChannel;
pad::Bool=true,
s::Union{String,DateTime}="last",
t::Union{String,DateTime}="none",
v::Integer=KW.v )
S = SeisData(C)
sync!(S, pad=pad, s=s, t=t, v=v)
return nothing
end
@doc (@doc sync!)
function sync(S::GphysData;
pad::Bool=true,
s::Union{String,DateTime}="last",
t::Union{String,DateTime}="none",
v::Integer=KW.v )
T = deepcopy(S)
sync!(T, pad=pad, s=s, t=t, v=v)
return T
end
function sync(C::SeisChannel;
pad::Bool=true,
s::Union{String,DateTime}="last",
t::Union{String,DateTime}="none",
v::Integer=KW.v )
U = deepcopy(C)
S = SeisData(U)
sync!(S, pad=pad, s=s, t=t, v=v)
return S[1]
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1337 | export unscale, unscale!
@doc """
unscale!(S::GphysData[, chans=CC, irr=false])
Divide out the gains of all channels `i` where `S.fs[i] > 0.0`. Specify
`irr=true` to also remove the gains of irregularly-sampled channels. Use keyword
`chans=CC` to only resample channel numbers `CC`.
""" unscale!
function unscale!(S::GphysData;
chans::ChanSpec=Int64[],
irr::Bool=false)
if chans == Int64[]
chans = 1:S.n
end
proc_str = string("unscale!(S, chans=", chans, ")")
@inbounds for i = 1:S.n
(irr==false && S.fs[i]<=0.0) && continue
if (S.gain[i] != 1.0) && (i in chans)
T = eltype(S.x[i])
rmul!(S.x[i], T(1.0/S.gain[i]))
proc_note!(S, i, proc_str, string("divided out gain = ", repr(S.gain[i], context=:compact=>true)))
S.gain[i] = 1.0
end
end
return nothing
end
function unscale!(C::GphysChannel)
rmul!(C.x, eltype(C.x)(1.0/C.gain))
proc_note!(C, "unscale!(C)", string("divided out gain = ", repr(C.gain, context=:compact=>true)))
C.gain = 1.0
return nothing
end
@doc (@doc unscale!)
function unscale(S::GphysData;
chans::ChanSpec=Int64[],
irr::Bool=false)
U = deepcopy(S)
unscale!(U, chans=chans, irr=irr)
return U
end
function unscale(C::GphysChannel; irr::Bool=false)
U = deepcopy(C)
rmul!(U.x, eltype(C.x)(1.0/U.gain))
U.gain = 1.0
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 11352 | merge_ext!(S::SeisData, Ω::Int64, rest::Array{Int64, 1}) = nothing
function merge_ext!(C::T1, D::T2) where {T1<:GphysChannel, T2<:GphysChannel}
if T1 == T2
ff = setdiff(fieldnames(T1), SeisIO.datafields)
for f in ff
setfield!(C, f, deepcopy(getfield(D, f)))
end
end
return nothing
end
function dup_check!(subgrp::Array{Int64, 1}, to_delete::Array{Int64, 1}, T::Array{Array{Int64, 2}, 1}, X::Array{FloatArray, 1})
N = length(subgrp)
if N > 1
# Check for duplicates
sort!(subgrp)
u = falses(N)
while N > 1
t1 = getindex(T, getindex(subgrp, N))
x1 = getindex(X, getindex(subgrp, N))
# if a channel is already known to be a duplicate, skip it
if getindex(u, N) == false
j = N
while j > 1
j = j-1
t2 = getindex(T, getindex(subgrp, j))
x2 = getindex(X, getindex(subgrp, j))
if t1 == t2 && x1 == x2
setindex!(u, true, j)
end
end
end
N = N-1
end
# flag duplicates for deletion
for i in 1:length(u)
if u[i]
push!(to_delete, subgrp[i])
end
end
# remove duplicates from merge targets
deleteat!(subgrp, u)
end
return length(subgrp)
end
function get_δt(t::Int64, Δ::Int64)
δts = rem(t, Δ)
if δts > div(Δ,2)
δts -= Δ
end
return δts
end
function check_alignment(Ti::Array{Int64,1}, Tj::Array{Int64,1}, Xi::Array{T,1}, Xj::Array{T,1}, Δ::Int64) where {T<:AbstractFloat}
z = zero(Int64)
# Rare case, but possible: they're the same data
if first(Ti) == first(Tj) && last(Ti) == last(Tj) && Xi == Xj
return Ti, Xi, false, z
else
# Find a time correction δt that gets applied to Ti, ts[i], te[i], etc.
# Find overlapping region, if one exists
L = length(Ti)
for n = 1:4
if n > L-1
break
end
xi_f = view(Xi, n+1:L)
xj_f = view(Xj, 1:L-n)
if isapprox(xi_f, xj_f)
return Tj[1:L-n], xj_f, false, 1*n
end
xi_b = view(Xi, 1:L-n)
xj_b = view(Xj, n+1:L)
if isapprox(xi_b, xj_b)
return Ti[1:L-n], xi_f, false, -1*n
end
end
end
return vcat(Ti,Tj), vcat(Xi,Xj), true, z
end
function xtmerge!(t::Array{Int64,1}, x::Array{T,1}, d::Int64) where {T<:AbstractFloat}
# Sanity check
(length(t) == length(x)) || error(string("Badly set times (Nt=", length(t), ",Nx=", length(x), "); can't merge!"))
# Sort
i = sortperm(t)
sort!(t)
x[:] = x[i]
# Check for duplicates
J0 = findall((diff(t).==0).*(diff(x).==0))
while !isempty(J0)
deleteat!(x, J0)
deleteat!(t, J0)
J0 = findall(diff(t) .== 0)
end
J0 = findall(diff(t) .< d)
while !isempty(J0)
J1 = J0.+1
K = [isnan.(x[J0]) isnan.(x[J1])]
# Average nearly-overlapping x that are either both NaN or neither Nan
ii = findall(K[:,1].==K[:,2])
i0 = J0[ii]
i1 = J1[ii]
t[i0] = div.(t[i0].+t[i1], 2)
x[i0] = 0.5.*(x[i0].+x[i1])
# Delete nearly-overlapping x with only one NaN (and delete all x ∈ i1)
i3 = findall(K[:,1].*(K[:,2].==false))
i4 = findall((K[:,1].==false).*K[:,2])
II = sort([J0[i4]; J1[i3]; i1])
deleteat!(t, II)
deleteat!(x, II)
J0 = findall(diff(t) .< d)
end
return nothing
end
# get hash of each non-empty loc, resp, units, fs ( fs should never be empty )
function get_subgroups( LOC ::Array{InstrumentPosition,1},
FS ::Array{Float64,1},
RESP ::Array{InstrumentResponse,1},
UNITS ::Array{String,1},
group ::Array{Int64,1} )
zh = zero(UInt64)
N_grp = length(group)
N_grp == 1 && return([group])
H = Array{UInt,2}(undef, N_grp, 4)
for (n,g) in enumerate(group)
H[n,1] = hash(getindex(FS, g))
H[n,2] = isempty(getindex(LOC, g)) ? zh : hash(getindex(LOC, g))
H[n,3] = isempty(getindex(RESP, g)) ? zh : hash(getindex(RESP, g))
H[n,4] = isempty(getindex(UNITS, g)) ? zh : hash(getindex(UNITS, g))
end
# If an entire column is unset, we don't care
H = H[:, findall([sum(H[:,i])>0 for i in 1:4])]
(Nh,Nc) = size(H)
# Find unique rows of H; sort
H_filled = sum(H .!= zh, dims=2)[:]
H_inds = sortperm(H_filled, rev=true)
N_subgrp = length(H_inds)
N_subgrp == 1 && return([group])
subgrp_inds = Array{Array{Int64,1},1}(undef, N_subgrp)
H = H[H_inds, :]
group = group[H_inds]
H_sub = deepcopy(H)
for i = 1:N_subgrp
subgrp_inds[i] = Array{Int64,1}(undef, 0)
subgrp_hash = H_sub[i, :]
for j = N_grp:-1:1
m = prod([H[j,k] in (zh, subgrp_hash[k]) for k=1:Nc])
if m
push!(subgrp_inds[i], group[j])
deleteat!(group, j)
H = H[setdiff(1:end, j), :]
end
N_grp = length(group)
end
if N_grp == 0
subgrp_inds = subgrp_inds[1:i]
N_subgrp = length(subgrp_inds)
break
end
end
deleteat!(subgrp_inds, [isempty(subgrp_inds[j]) for j=1:length(subgrp_inds)])
return subgrp_inds
end
function get_next_pair(W::Array{Int64,2})
L = size(W,1)
i = 1
# dest loop
while i < L
si = getindex(W, i)
ei = getindex(W, i + L)
j = i + 1
# src loop
while j ≤ L
sj = getindex(W, j)
ej = getindex(W, j+L)
if min(si ≤ ej, ei ≥ sj) == true
# src # dest
return vcat(W[j,:], j), vcat(W[i,:], i)
end
j = j + 1
end
i = i + 1
end
return zeros(Int64, 7), zeros(Int64, 7)
end
function get_merge_w(Δ::Int64, subgrp::Array{Int64,1}, T::Array{Array{Int64, 2}, 1}, X::Array{FloatArray, 1})
N = length(subgrp)
w_tmp = Array{Array{Int64,2}, 1}(undef, N)
te = Array{Int64, 1}(undef, N)
for i = 1:N
m = getindex(subgrp, i)
w_m = t_win(getindex(T, m), Δ)
n_w = size(w_m, 1)
# Store: w_start, w_end, channel_number, window_number, x_start, x_end
w = hcat(w_m, Array{Int64,2}(undef, n_w, 4))
j = 0
ws = 0
we = 0
while j < n_w
j = j + 1
we = max(we, getindex(w_m, j, 2))
setindex!(w, m, j, 3) # channel number
setindex!(w, j, j, 4) # window number
setindex!(w, ws+1, j, 5) # x_start
ws = ws + div(w[j,2]-w[j,1], Δ)+1 # x_end
setindex!(w, ws, j, 6)
end
setindex!(te, we, i)
setindex!(w_tmp, w, i)
end
W = vcat(w_tmp...)
ii = sortperm(W[:,2], rev=true)
W = W[ii,:]
#= added 2019-11-19:
fixes a rare off-by-one bug with slightly-offset windows (issue #29)
issue creator only sees bug in one file from 20 years of data
clumsy fix based on sound principles:
* force X to start an integer # of samples from the epoch
- prevents a discrepancy between length(X) and length(T)
* add the offset back to the start time of the merged channel data
=#
# Let Ω be the channel number in subgrp with the last end time
Ω = subgrp[argmax(te)]
return W, Ω
end
function segment_merge(Δ::Int64, Ω::Int64, W::Array{Int64, 2}, X::Array{FloatArray, 1})
nW = size(W,1)
i = argmin(W[:,1])
δts = get_δt(W[i,1], Δ)
if δts != 0
for i in 1:size(W,1)
W[i,1] -= δts
W[i,2] -= δts
end
end
(src, dest) = get_next_pair(W)
while (src, dest) != (zeros(Int64, 7), zeros(Int64, 7))
ts_i = src[1]; te_i = src[2]; p = src[3]; p_i = src[4]; os_p = src[5]; W_p = src[7]
ts_j = dest[1]; te_j = dest[2]; q = dest[3]; q_i = dest[4]; os_q = dest[5]; W_q = dest[7]
ts_max = max(ts_i, ts_j); ts_max -= get_δt(ts_max, Δ)
te_min = min(te_i, te_j); te_min -= get_δt(te_min, Δ)
nov = 1 + div(te_min - ts_max, Δ)
Xq = getindex(X, q)
# (1) determine the times and indices of overlap within each pair
# a. determine sample times of overlap
Ti = collect(ts_max:Δ:te_min)
Tj = deepcopy(Ti)
# b. get sample indices within each overlap window
# i
xsi_i = round(Int64, (ts_max - ts_i)/Δ) + os_p
xei_i = xsi_i + nov - 1
# j
xsi_j = round(Int64, (ts_max - ts_j)/Δ) + os_q
xei_j = xsi_j + nov - 1
# (2) Extract sample windows
Xi = getindex(getindex(X, p), xsi_i:xei_i)
Xj = getindex(getindex(X, q), xsi_j:xei_j)
lxp = length(getindex(X, p))
lxq = length(getindex(X, q))
# ================================================================
# check for duplicate windows
if (ts_i == ts_j) && (te_i == te_j) && (Xi == Xj)
# delete time window
W = W[setdiff(1:end, W_p), :]
else
# Check for misalignment:
τ, χ, do_xtmerge, δj = check_alignment(Ti, Tj, Xi, Xj, Δ)
if do_xtmerge
xtmerge!(τ, χ, div(Δ,2))
end
if δj != 0
xsi_i += δj
xei_j -= δj
end
# (3) Merge X,T into S[q]
deleteat!(Xq, xsi_j:xei_j)
if xsi_j == 1
prepend!(Xq, χ)
else
splice!(Xq, xsi_j:xsi_j-1, χ)
end
# (4) Adjust start, end indices of windows ≥ q_i in q
# structure: w_start, w_end, channel_number, window_number, x_start, x_end
nxq = length(Xq) - lxq
i = 0
while i < nW
i += 1
if W[i, 3] == q && W[i, 4] ≥ q_i
W[i, 1] += nxq*Δ
W[i, 2] += nxq*Δ
W[i, 5] += nxq
W[i, 6] += nxq
end
end
#= if xsi_i ≤ os_p (which is always true, at this point in
the control flow), we decrease W[W_p, 1:2] =#
nxp = xei_i-xsi_i+1
W[W_p, 1] -= δj*Δ
W[W_p, 2] -= (nxp + δj)*Δ
#= Control for when window P is emptied; the above two statements
make this possible =#
if (W[W_p, 2] < W[W_p, 1])
W = W[setdiff(1:end, W_p), :]
else
W[W_p, 6] -= nxp
end
end
# Sort by end time, to ensure we pick the window with latest end next
k = sortperm(W[:, 2], rev=true)
W = W[k, :]
nW = size(W, 1)
# Repeat until no further merges are possible
(src, dest) = get_next_pair(W)
end
kk = sortperm(W[:, 1])
W = W[kk, :]
#= At this point, we have nothing left that can be merged. So we're going
to arrange T[subgrp] and X[subgrp] in windows using t_win =#
n = size(W, 1)
nx = broadcast(+, getindex(W, :, 6).-getindex(W, :,5), 1)
X_Ω = Array{eltype(X[Ω]),1}(undef, sum(nx))
xi = 1
i = 0
while i < n
i = i + 1
p = getindex(W, i, 3)
lx = getindex(nx, i)
copyto!(X_Ω, xi, getindex(X, p), getindex(W, i, 5), lx)
xi = xi + lx
end
# Shrink W and eliminate windows with no actual gap between them
m = trues(n)
while n > 1
if W[n-1, 2] + Δ == W[n,1]
W[n-1, 2] = W[n,2]
m[n] = false
end
n = n - 1
end
W = W[m,[1,2]]
broadcast!(+, W, W, δts)
T_Ω = w_time(W, Δ)
return T_Ω, X_Ω
end
# merges into the channel with the most recent data
function merge_non_ts!(S::GphysData, subgrp::Array{Int64,1})
te = [maximum(t[:,2]) for t in S.t[subgrp]]
Ω = subgrp[argmax(te)]
T = vcat(S.t[subgrp]...)[:,2]
X = vcat(S.x[subgrp]...)
Z = unique(collect(zip(T,X)))
T = first.(Z)
X = last.(Z)
ii = sortperm(T)
S.t[Ω] = hcat(collect(1:1:length(T)), T[ii])
S.x[Ω] = X[ii]
return Ω
end
function merge_non_ts!(C::GphysChannel, D::GphysChannel)
T = vcat(C.t, D.t)[:, 2]
X = vcat(C.x, D.x)
Z = unique(collect(zip(T,X)))
T = first.(Z)
X = last.(Z)
ii = sortperm(T)
C.t = hcat(collect(1:length(T)), T[ii])
C.x = X[ii]
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5470 | @doc """
merge!(S::SeisData, U::SeisData[, prune_only=true])
Merge channels of two SeisData structures.
merge!(S::SeisData[, prune_only=true])
"Flatten" a SeisData structure by merging channels with identical properties.
If `prune_only=true`, the only action taken is deletion of empty and duplicate
channels; `merge!(S, U, prune_only=true)` is identical to an in-place `S+U`.
""" merge!
function merge!(S::Y; v::Integer=KW.v, purge_only::Bool=false) where Y<:GphysData
# Required preprocessing
prune!(S)
# Initialize variables
ID = getfield(S, :id)
NAME = getfield(S, :name)
LOC = getfield(S, :loc)
FS = getfield(S, :fs)
GAIN = getfield(S, :gain)
RESP = getfield(S, :resp)
UNITS = getfield(S, :units)
SRC = getfield(S, :src)
MISC = getfield(S, :misc)
NOTES = getfield(S, :notes)
T = getfield(S, :t)
X = getfield(S, :x)
UID = unique(getfield(S, :id))
cnt = 0
note_head = string(SeisIO.timestamp(), " ¦ ")
to_delete = Array{Int64,1}(undef, 0)
while cnt < length(UID)
cnt = cnt + 1
id = getindex(UID, cnt)
GRP = findall(S.id.==id)
SUBGRPS = get_subgroups(LOC, FS, RESP, UNITS, GRP)
for subgrp in SUBGRPS
dup_check!(subgrp, to_delete, T, X)
(purge_only == true) && continue
N = length(subgrp)
i1 = getindex(subgrp, 1)
fs = getindex(FS, i1)
if fs == 0.0
Ω = merge_non_ts!(S, subgrp)
append!(to_delete, subgrp[subgrp.!=Ω])
continue
end
Δ = round(Int64, sμ/fs)
W, Ω = get_merge_w(Δ, subgrp, T, X)
rest = subgrp[subgrp.!=Ω]
# GAIN, MISC, NAME, NOTES, SRC ==========================================
gain = getindex(GAIN, Ω)
notes = getindex(NOTES, Ω)
misc = getindex(MISC, Ω)
for i in rest
scalefac = gain / getindex(GAIN, i)
if scalefac != 1.0
rmul!(getindex(X, i), scalefac)
GAIN[i] = gain # added 2020-12-03, in case merge breaks
end
if getindex(SRC, i) != getindex(SRC, Ω)
push!(notes, string(note_head, "+source ¦ ", getindex(SRC, i)))
end
if getindex(NAME, i) != getindex(NAME, Ω)
push!(notes, string(note_head, "alt name ¦ ", getindex(NAME, i)))
end
append!(notes, getindex(NOTES, i))
merge!(misc, getindex(MISC, i))
end
sort!(notes)
# Extra fields ==========================================================
merge_ext!(S, Ω, rest)
# T,X ===================================================================
T_Ω, X_Ω = segment_merge(Δ, Ω, W, X)
setindex!(X, X_Ω, Ω)
setindex!(T, T_Ω, Ω)
# Document it
proc_note!(S, Ω, "merge!", string("combined channels ",
repr(subgrp), " (N = ", N, ") into :t, :x"))
append!(to_delete, rest)
end
# Done with SUBGRPS
end
deleteat!(S, to_delete)
sort!(S)
return nothing
end
# The following must be the same (or unset) to merge:
# :loc
# :resp
# :fs cannot be unset; not a nullable array
# :units
# The following are easily dealt with:
# :gain can be translated with no trouble
# :misc can call merge! method for dictionaries
# :notes append and sort
# :name not important, can log extra names to :notes
# :src use most recent, log extras to :notes
function merge!(C::T1, D::T2) where {T1<:GphysChannel, T2<:GphysChannel}
# Identical structures or empty D
((C == D) || isempty(D.x) || isempty(D.t)) && return nothing
# empty C
if (isempty(C.x) || isempty(C.t))
ff = (T1 == T2) ? fieldnames(T1) : SeisIO.datafields
for f in ff
setfield!(C, f, deepcopy(getfield(D, f)))
end
return nothing
end
# partial match or exit
m = cmatch_p!(C, D)
(m == false) && (@warn("Critical field mismatch! Not merged!"); return nothing)
# auto-convert T2 to T1
if T2 != T1
D = convert(T1, D)
end
note_head = string(SeisIO.timestamp(), " ¦ ")
ttest = (endtime(C.t, C.fs) > endtime(D.t, D.fs))
ω = ttest ? C : D
α = ttest ? D : C
# at this point, (:fs, :gain, :loc, :resp, :units) are known to match
# this is stricter than merge!, which allows gain mismatches within S
# MISC, NAME, NOTES, SRC ================================================
if ω.src != α.src
push!(ω.notes, string(note_head, "+source ¦ ", α.src))
end
if ω.name != α.name
push!(ω.notes, string(note_head, "alt name ¦ ", α.name))
end
append!(C.notes, D.notes)
sort!(C.notes)
if ttest
merge!(C.misc, D.misc)
else
C.misc = merge(D.misc, C.misc)
end
# Extra fields ==========================================================
merge_ext!(ω, α)
# T,X ===================================================================
if C.fs == 0.0
merge_non_ts!(C, D)
return nothing
end
# Proceed only for time-series data
subgrp = ttest ? [1, 2] : [2, 1]
T = [ω.t, α.t]
X = Array{FloatArray, 1}(undef, 2)
X[1] = ω.x
X[2] = α.x
dup_check!(subgrp, Int64[], T, X)
N = length(subgrp)
# At this point, N == 1 means the same data at the same times; nothing to do
(N == 1) && return nothing
# on to time windows
fs = C.fs
Δ = round(Int64, sμ/fs)
W, Ω = get_merge_w(Δ, subgrp, T, X)
TΩ, XΩ = segment_merge(Δ, Ω, W, X)
C.t = TΩ
C.x = XΩ
proc_note!(C, "merge!", string("combined data from a structure of type $T2"))
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1577 | export purge!, purge
# Home of all extended merge! methods
@doc (@doc merge)
merge(S::SeisData; v::Integer=KW.v) = (U = deepcopy(S); merge!(U, v=v); return U)
merge!(S::SeisData, U::SeisData; v::Integer=KW.v) = ([append!(getfield(S, f), getfield(U, f)) for f in SeisIO.datafields]; S.n += U.n; merge!(S; v=v))
merge!(S::SeisData, C::SeisChannel; v::Integer=KW.v) = merge!(S, SeisData(C), v=v)
"""
S = merge(A::Array{SeisData,1})
Merge an array of SeisData objects, creating a single output with the merged
input data.
See also: `merge!`
"""
function merge(A::Array{SeisData,1}; v::Integer=KW.v)
L::Int64 = length(A)
n = sum([A[i].n for i = 1:L])
T = SeisData(n)
[setfield!(T, f, vcat([getfield(A[i],f) for i = 1:L]...)) for f in SeisIO.datafields]
merge!(T, v=v)
return T
end
merge(S::SeisData, U::SeisData; v::Integer=KW.v) = merge(Array{SeisData,1}([S,U]), v=v)
merge(S::SeisData, C::SeisChannel; v::Integer=KW.v) = merge(S, SeisData(C), v=v)
merge(C::SeisChannel, S::SeisData; v::Integer=KW.v) = merge(SeisData(C), S, v=v)
merge(C::SeisChannel, D::SeisChannel; v::Integer=KW.v) = (S = SeisData(C,D); merge!(S, v=v); return S)
"""
purge!(S::SeisData)
Remove empty and duplicated channels in S; alias to `merge!(S, purge_only=true)`
purge(S::SeisData)
"Safe" purge to a new SeisData object. Alias to `merge(S, purge_only=true)``
"""
purge!(S::T, v::Integer=KW.v) where {T<:GphysData} = merge!(S, purge_only=true, v=v)
function purge(S::T, v::Integer=KW.v) where {T<:GphysData}
U = deepcopy(S)
merge!(U, v=v, purge_only=true)
return U
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1186 | # Addition
#
# commutativity
# S1 + S2 == S2 + S1
# S + C == C + S
# C1 + C2 == C2 + C1
# S + U - U == S (for sorted S)
#
# associativity
# (S1 + S2) + S3 == S1 + (S2 + S3)
# (S1 + S2) + C == S1 + (S2 + C)
function +(S::T, U::T) where {T<:GphysData}
Ω = deepcopy(S)
append!(Ω, U)
merge!(Ω, purge_only=true)
return Ω
end
+(S::SeisData, C::SeisChannel) = +(S, SeisData(C))
+(C::SeisChannel, S::SeisData) = +(S, SeisData(C))
+(C::SeisChannel, D::SeisChannel) = +(SeisData(C), SeisData(D))
# Subtraction
-(S::GphysData, i::Int) = (U = deepcopy(S); deleteat!(U,i); return U) # By channel #
-(S::GphysData, J::Array{Int,1}) = (U = deepcopy(S); deleteat!(U,J); return U) # By array of channel #s
# Multiplication
# distributivity: (S1+S2)*S3) == (S1*S3 + S2*S3)
*(S::SeisData, U::SeisData) = merge(Array{SeisData,1}([S,U]))
*(S::SeisData, C::SeisChannel) = merge(S, SeisData(C))
function *(C::SeisChannel, D::SeisChannel)
s1 = deepcopy(C)
s2 = deepcopy(D)
S = merge(SeisData(s1),SeisData(s2))
return S
end
# Division will happen eventually; for S/U to be logical, we need to extract
# time ranges of data from S that are not in U. This will work like unix `diff`
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2002 | # =====================================================================
function update_ff!(ff::Array{Complex{Float32},1},
zp::Union{PZResp, PZResp64},
f::Array{Float32,1})
Z = zp.z
P = zp.p
i = 0
γ = 0.0f0
g = 0.0f0
d = zero(Complex{Float32})
ϵ = eps(Float32)^2
@inbounds while i < length(f)
i = i + 1
cf = im*f[i]
n = 1.0f0
for z in Z
n *= cf-z
end
for p in P
d = conj(cf-p)
n *= d
n /= max(ϵ, abs2(d))
end
g = abs2(n)
if g > γ
γ = g
end
setindex!(ff, n, i)
end
rmul!(ff, zp.a0)
return sqrt(γ)*zp.a0
end
# frequencies in radians/s from -pi*fs to pi*fs
function fill_f!(f::Array{Float32,1}, fs::Float32, N2::Int64)
N = div(N2, 2)
fn = Float32(pi*fs)
df = fn/Float32(N)
f[:] .= 0:N2-1
for i = N+2:N2
f[i] -= N2
end
rmul!(f, df)
return f
end
function update_resp!(f::AbstractArray,
ff_old::AbstractArray,
ff_new::AbstractArray,
N2::Int64,
fs::Float32,
resp_old::Union{PZResp, PZResp64},
resp_new::Union{PZResp, PZResp64},
wl::Float32)
fill_f!(f, fs, N2)
update_ff!(ff_new, resp_new, f)
γ = update_ff!(ff_old, resp_old, f)
# allows manual watermarking for ill-behaved translations with scaling isssues
wm = γ * wl
for i = 1:N2
setindex!(ff_new, (ff_new[i]*conj(ff_old[i])) / (abs2(ff_old[i]) + wm), i)
end
return nothing
end
function update_resp_vecs!( Xw::Array{Complex{Float32},1},
f::Array{Float32,1},
ff_old::Array{Complex{Float32},1},
ff_new::Array{Complex{Float32},1},
N2::Int64 )
resize!(Xw, N2)
resize!(f, N2)
resize!(ff_old, N2)
resize!(ff_new, N2)
xfl = reinterpret(Float32, Xw)
xre = view(xfl, 1:2:2*N2-1)
return xfl, xre
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 595 | export fctoresp
"""
fctoresp(f)
fctoresp(f, c)
Create PZResp or PZResp64 instrument response from lower corner frequency `f` and damping constant `c`. If no damping constant is supplies, assumes `c = 1/sqrt(2)`.
See also: `PZResp`, `PZResp64`
"""
function fctoresp(f::AbstractFloat, c::AbstractFloat=1.0f0/sqrt(2.0f0))
T = typeof(f)
fxp = T(2.0*pi)*f
r = sqrt(Complex(c^2 - one(T)))
z = zeros(Complex{T}, 1)
p = Complex{T}[r-c, -r-c].*fxp
if T == Float32
return PZResp(f0 = Float32(f), p = p, z = z)
else
return PZResp64(f0 = Float64(f), p = p, z = z)
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1150 | export resp_a0!
@doc """
resp_a0!(R::Union{PZResp, PZResp64})
Update normalization factor `R.a0` from `R.z`, `R.p`, and `R.f0`.
resp_a0!(S::GphysData)
Call `resp_a0!` on each response in S with typeof(S.resp[i]) ∈ [PZResp, PZResp64].
See also: `PZResp`, `PZResp64`
""" resp_a0!
function resp_a0!(resp::Union{PZResp, PZResp64})
T = typeof(resp.a0)
Z = SeisIO.poly(resp.z)
P = SeisIO.poly(resp.p)
s = Complex{T}(2*pi*im*resp.f0)
setfield!(resp, :a0, one(T)/T(abs(SeisIO.polyval(Z, s)/SeisIO.polyval(P, s))))
return nothing
end
function fix_a0!(R::Union{PZResp, PZResp64}, units::String)
Y = typeof(R)
(Y in [PZResp, PZResp64]) || return
T = typeof(R.a0)
resp_a0!(R)
if lowercase(units) == "m/s" && R.a0 > zero(T)
R.a0 *= T(-1.0f0)
end
return nothing
end
function resp_a0!(S::GphysData)
for i = 1:S.n
R = getindex(S.resp, i)
Y = typeof(R)
if Y in [PZResp, PZResp64]
fix_a0!(R, S.units[i])
elseif Y == MultiStageResp
for R in getfield(R, :stage)
if typeof(R) in [PZResp, PZResp64]
fix_a0!(R, S.units[i])
end
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 518 | export resptofc
"""
resptofc(R::Union{PZResp, PZResp64}))
Attempt to guess critical frequency of seismic instrument response R.
Assumes broadband sensors behave roughly like geophones (i.e., as harmonic
oscillators with a single lower corner frequency) at low frequencies.
See also: `fctoresp`, `PZResp`
"""
function resptofc(R::Union{PZResp, PZResp64})
T = typeof(R.a0)
P = R.p
i = argmin(abs.([real(P[j])-imag(P[j]) for j = 1:length(P)]))
return T(rationalize(abs(P[i]) / 2pi, tol=eps(Float32)))
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 9575 | export translate_resp!, translate_resp, remove_resp!, remove_resp
# =====================================================================
@doc """
translate_resp!(S, resp_new[, chans=CC, wl=γ])
translate_resp(S, resp_new[, chans=CC, wl=γ])
Translate the instrument response of seismic data channels `CC` in `S` to
`resp_new`. Replaces field `:resp` with `resp_new` for all affected channels.
remove_resp!(S, chans=CC, wl=γ])
remove_resp(S, chans=CC, wl=γ])
Remove (flatten to DC) the instrument response of seismic data channels `cha`
in `S`. Replaces field `:resp` with the appropriate (all-pass) response.
translate_resp!(Ch, resp_new[, wl=γ])
translate_resp(Ch, resp_new[, wl=γ])
Translate the instrument response of seismic data in SeisChannel object `Ch` to
`resp_new`. Replaces field `:resp` with `resp_new`.
remove_resp!(Ch[, chans=CC, wl=γ])
remove_resp(Ch[, chans=CC, wl=γ])
Remove (flatten to DC) the instrument response of seismic data in `Ch`.
Replaces field `:resp` with the appropriate (all-pass) response.
### Keywords
* **chans=CC** restricts response translation to channel(s) `CC`. By default, all seismic data channels have responses translated to `resp_new`.
* **wl=γ** sets the waterlevel to γ (default: `γ` = eps(Float32) ≈ ~1f-7)
### Interaction with the :resp field
`translate_resp` and `remove_resp` only work on a channel `i` that satisfies `S.resp[i] <: PZResp, PZResp64, MultiStageResp`. In the last case, `S.resp[i].stage[1]` must be a PZResp or PZResp64, only the first stage of the response is changed, and the stage gain is ignored; instead, the sensitivity `S.resp[i].stage[1].a0` is used.
### Poles and zeros should be rad/s
Always check when loading from an unsupported data format. Responses read from station XML are corrected to rad/s automatically (most use rad/s); responses read from a SACPZ or SEED RESP file already use rad/s.
!!! warning
Response translation doesn't guarantee causality; if this is a problem, detrend and taper first!
""" translate_resp!
function translate_resp!(S::GphysData,
resp_new::Union{PZResp, PZResp64};
chans::ChanSpec=Int64[],
wl::Float32=eps(Float32))
# first ensure that there is something to do
chans = mkchans(chans, S, keepirr=false)
@inbounds for i in chans
if S.resp[i] != resp_new
break
end
if i == last(chans)
@info(string(timestamp(), ": nothing done (no valid responses to translate)."))
return nothing
end
end
# remove channels with inappropriate response types
k = Int64[]
for (n,i) in enumerate(chans)
if (typeof(S.resp[i]) <: Union{PZResp, PZResp64, MultiStageResp}) == false
push!(k,n)
end
end
deleteat!(chans, k)
# initialize complex "work" vectors to the largest size we need
Nx = nx_max(S, chans)
(Nx == 0) && return
N2 = nextpow(2, Nx)
Xw = Array{Complex{Float32},1}(undef, N2)
ff_old = Array{Complex{Float32},1}(undef, N2)
ff_new = Array{Complex{Float32},1}(undef, N2)
f = Array{Float32,1}(undef, N2)
GRPS = get_unique(S, ["fs", "resp", "units"], chans)
for grp in GRPS
# get fs, resp
j = grp[1]
RT = typeof(getindex(getfield(S, :resp), j))
# no translating instrument responses unless we're dealing with seismometers
codes = inst_codes(S)
kill = falses(length(grp))
for i = 1:length(grp)
if codes[i] in seis_inst_codes
continue
else
kill[i] = true
end
end
deleteat!(grp, kill)
isempty(grp) && continue
j = grp[1]
uu = lowercase(S.units[j])
# onward
resp_old = RT == MultiStageResp ? deepcopy(S.resp[j].stage[1]) : deepcopy(S.resp[j])
fs = Float32(S.fs[j])
if resp_old != resp_new
# we'll need this for logging
resp_str = string("processing ¦ translate_resp!(S, wl = ", wl, ", resp_old = ", typeof(resp_old),
"(a0=", resp_old.a0, ", f0=", resp_old.f0, ", p=", resp_old.p, ", z=", resp_old.z,
") ¦ instrument response translation")
# for accelerometers, working *in* acceleration units, we check: are there any poles below the nyquist?
if uu == "m/s2"
P = resp_old.p
k = trues(length(P))
for (i,p) in enumerate(P)
if abs(p)/pi ≤ fs # equivalent to abs(p)/2pi ≤ fn/2 which is the true condition
k[i] = false
end
end
deleteat!(P, k)
# Most accelerometers have two complex zeros at the origin, but many XML resp files don't show it.
if isempty(resp_old.z)
resp_old.z = map(eltype(resp_old.z), [0.0 + 0.0im, 0.0 - 0.0im])
end
end
# get views to segments from each target channel
(L,X) = get_views(S, grp)
# initialize Nx, N2, xre
Nx = first(L)
N2 = nextpow(2, Nx)
xfl, xre = update_resp_vecs!(Xw, f, ff_old, ff_new, N2)
update_resp!(f, ff_old, ff_new, N2, fs, resp_old, resp_new, wl)
j = 0
while j < length(L)
j = j + 1
if L[j] != Nx
Nx = L[j]
N2 = nextpow(2, Nx)
xfl, xre = update_resp_vecs!(Xw, f, ff_old, ff_new, N2)
update_resp!(f, ff_old, ff_new, N2, fs, resp_old, resp_new, wl)
end
# copy X[j] to Xw and compute FFT
fill!(Xw, zero(Complex{Float32}))
copyto!(Xw, 1, X[j], 1, Nx)
fft!(Xw)
broadcast!(*, Xw, Xw, ff_new)
ifft!(Xw)
copyto!(X[j], 1, xre, 1, Nx)
end
# post-processing: set resp and log to :notes
for k in grp
if RT == MultiStageResp
S.resp[k].stage[1] = deepcopy(resp_new)
else
setindex!(S.resp, deepcopy(resp_new), k)
end
note!(S, k, resp_str)
end
end
end
return nothing
end
@doc (@doc translate_resp!)
function translate_resp( S::GphysData,
resp_new::Union{PZResp, PZResp64};
chans::ChanSpec=Int64[],
wl::Float32=eps(Float32))
U = deepcopy(S)
translate_resp!(U, resp_new, chans=chans, wl=wl)
return U
end
function translate_resp!(C::GphysChannel,
resp_new::Union{PZResp, PZResp64};
wl::Float32=eps(Float32))
# first ensure that there is something to do
uu = lowercase(C.units)
fs = Float32(C.fs)
if any([C.resp == resp_new,
uu in ("m/s", "m/s2", "m") == false,
fs ≤ 0.0f0,
inst_code(C) in seis_inst_codes == false])
@info(string(timestamp(), ": nothing done (no valid responses to translate)."))
return nothing
end
# initialize complex "work" vectors to the largest size we need
Nx = nx_max(C)
(Nx == 0) && return
N2 = nextpow(2, Nx)
Xw = zeros(Complex{Float32}, N2)
ff_old = Array{Complex{Float32},1}(undef, N2)
ff_new = Array{Complex{Float32},1}(undef, N2)
f = Array{Float32,1}(undef, N2)
# for accelerometers, working *in* acceleration units, we check: are there any poles below the nyquist?
if uu == "m/s2"
P = C.resp.p
k = trues(length(P))
for (i,p) in enumerate(P)
if abs(p)/pi ≤ fs # equivalent to abs(p)/2pi ≤ fn/2 which is the true condition
k[i] = false
end
end
deleteat!(P, k)
# Most accelerometers have two complex zeros at the origin, but many XML resp files don't show it.
if isempty(C.resp.z)
C.resp.z = map(eltype(C.resp.z), [0.0 + 0.0im, 0.0 - 0.0im])
end
end
# Get views
if size(C.t,1) == 2
xfl, xre = update_resp_vecs!(Xw, f, ff_old, ff_new, N2)
update_resp!(f, ff_old, ff_new, N2, fs, C.resp, resp_new, wl)
# copy X[j] to Xw and compute FFT
copyto!(Xw, 1, C.x, 1, Nx)
fft!(Xw)
broadcast!(*, Xw, Xw, ff_new)
ifft!(Xw)
copyto!(C.x, 1, xre, 1, Nx)
else
(L,X) = get_views(C)
xfl = reinterpret(Float32, Xw)
xre = view(xfl, 1:2:2*N2-1)
update_resp!(f, ff_old, ff_new, N2, fs, C.resp, resp_new, wl)
j = 0
while j < length(L)
j = j + 1
if L[j] != Nx
Nx = L[j]
N2 = nextpow(2, Nx)
xfl, xre = update_resp_vecs!(Xw, f, ff_old, ff_new, N2)
update_resp!(f, ff_old, ff_new, N2, fs, C.resp, resp_new, wl)
end
if j > 1
fill!(Xw, zero(Complex{Float32}))
end
copyto!(Xw, 1, X[j], 1, Nx)
fft!(Xw)
broadcast!(*, Xw, Xw, ff_new)
ifft!(Xw)
copyto!(X[j], 1, xre, 1, Nx)
end
end
resp_str = string("processing ¦ translate_resp!(S, wl = ", wl, ", resp_old = ", typeof(C.resp),
"(a0=", C.resp.a0, ", f0=", C.resp.f0, ", p=", C.resp.p, ", z=", C.resp.z,
") ¦ instrument response translation")
C.resp = deepcopy(resp_new)
note!(C, resp_str)
return nothing
end
function translate_resp(C::GphysChannel,
resp_new::Union{PZResp, PZResp64};
wl::Float32=eps(Float32))
U = deepcopy(C)
translate_resp!(U, resp_new, wl=wl)
return U
end
@doc (@doc translate_resp!)
remove_resp!(S::GphysData;
chans::ChanSpec=Int64[],
wl::Float32=eps(Float32)) = translate_resp!(S, flat_resp, chans=chans, wl=wl)
@doc (@doc translate_resp!)
remove_resp(S::GphysData;
chans::ChanSpec=Int64[],
wl::Float32=eps(Float32)) = translate_resp(S, flat_resp, chans=chans, wl=wl)
remove_resp!(Ch::GphysChannel;
wl::Float32=eps(Float32)) = translate_resp!(Ch, flat_resp, wl=wl)
remove_resp(Ch::GphysChannel;
wl::Float32=eps(Float32)) = translate_resp(Ch, flat_resp, wl=wl)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 306 | module ASCII
using Dates, Mmap, Printf, SeisIO, SeisIO.FastIO, SeisIO.Formats
# imports
include("ASCII/imports.jl")
include("ASCII/GeoCSV.jl")
include("ASCII/SLIST.jl")
# exports
export formats,
read_geocsv_file!,
read_geocsv_slist!,
read_geocsv_tspair!,
read_slist!
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4203 | module Formats
using Dates:Date
import Base:show
export formats
mutable struct FmtVer
v::Union{Number, String}
d::Union{Date, String}
s::Union{Bool, Nothing}
FmtVer() = new(0, Date("1970-01-01"), nothing)
FmtVer(v::Union{Number, String},
d::Union{Date, String},
s::Union{Bool, Nothing}) = new(v,d,s)
end
const HistVec = Array{FmtVer,1}
const FmtStatus = Dict{UInt8, String}(
0x00 => "unknown",
0x01 => "in use; maintained",
0x20 => "legacy; maintained but no longer in use",
0xfd => "ostensibly maintained, but they don't answer our emails",
0xfe => "suspected abandoned, can't find contact info",
0xff => "abandoned"
)
mutable struct FormatDesc
name::String
str::String
origin::String
source::String
contact::String
ver::HistVec
desc::Array{String,1}
used::Array{String,1}
docs::Array{String,1}
status::UInt8
function FormatDesc()
return new( "",
"",
"",
"",
"",
HistVec(undef, 0),
String[],
String[],
String[],
0x00,
)
end
function FormatDesc( name::String,
str::String,
origin::String,
source::String,
contact::String,
ver::HistVec,
desc::Array{String,1},
used::Array{String,1},
docs::Array{String,1},
status::UInt8
)
return new(name, str, origin, source, contact, ver, desc, used, docs, status)
end
end
@doc """
formats[fmt]
Show resources for working with data format `fmt`. Returned are:
* name: Data format name
* read_data: String(s) to pass to `read_data` for this format, separated by commas
* origin: Where the data format was created
* source: Where to download external source code for working with data in this format
* contact: Whom to contact with questions
* description: Description of the data format's purpose and any notable issues
* used by: Where the data format is typically encountered
* status: Whether the data format is still in use
* versions: Notable versions or revisions to the standard
* documentation: Any useful documentation related to understanding a format
formats["list"]
List formats with entries.
### How to read "status:"
* "in use" means that a data format is currently used to read, transmit, or archive new data from modern geophysical instruments.
* "legacy" means that a data format is maintained by an official authority but no longer being actively developed or used by new equipment.
* "abandoned" means that a data format is no longer maintained.
### Notes
* `:used` lists locations, programs, and research areas where the format is (or was) used.
* `:ver` format is `number`, `date`, `read/write`. In the third field, "r" means read support, "rw" means read or write support, "-" means no support.
""" formats
const formats = Dict{String, Union{Array{String,1}, FormatDesc}}()
function show(io::IO, F::FormatDesc)
p = 17
println("")
printstyled(lpad("name:", p), color=:cyan, bold=true)
printstyled(" "*getfield(F, :name)*"\n", bold=true)
printstyled(lpad("string:", p), color=:cyan)
printstyled(" "*getfield(F, :str)*"\n")
for i in (:origin, :source, :contact)
printstyled(lpad(string(i)*":", p), color=:cyan)
println(" ", getfield(F, i))
end
printstyled(lpad("description:\n", p+1), color=:cyan)
for i in F.desc
println(" "^(p+1), i)
end
printstyled(lpad("used by:\n", p+1), color=:cyan)
for i in F.used
println(" "^(p+1), i)
end
printstyled(lpad("status:\n", p+1), color=:cyan)
println(" "^(p+1), FmtStatus[F.status])
printstyled(lpad("versions:\n", p+1), color=:cyan)
for i in F.ver
println(" "^(p+1), i.v, ", ", i.d, ", ", i.s == nothing ? "-" : i.s == true ? "rw" : "r")
end
printstyled(lpad("documentation:", p), color=214, bold=true)
println("")
for i in F.docs
println(" "^(p+1), i)
end
end
show(F::FormatDesc) = show(stdout, F)
include("FormatGuide/formats_list.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 578 | module Nodal
using Dates, LinearAlgebra, Mmap, SeisIO, SeisIO.FastIO
path = Base.source_dir()
# Imports
include("Nodal/imports.jl")
# Constants
include("Nodal/Types/TDMSbuf.jl")
include("Nodal/constants.jl")
# Types
include("Nodal/Types/NodalData.jl")
include("Nodal/Types/NodalChannel.jl")
# Formats
for i in ls(path*"/Nodal/Formats/")
if endswith(i, ".jl")
include(i)
end
end
# Utils
for i in ls(path*"/Nodal/Utils/")
if endswith(i, ".jl")
include(i)
end
end
# Wrappers
include("Nodal/Wrappers/read_nodal.jl")
# Exports
include("Nodal/exports.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1348 | module Quake
using Blosc, Dates, DSP, LightXML, LinearAlgebra, Printf, SeisIO, SeisIO.FastIO, Sockets
using HTTP: request, Messages.statustext
Blosc.set_compressor("lz4")
Blosc.set_num_threads(Sys.CPU_THREADS)
path = Base.source_dir()
const tracefields = (:az, :baz, :dist, :id, :loc, :fs, :gain, :misc, :name, :notes, :pha, :resp, :src, :t, :units, :x)
const loc_qual_fields = (:se, :gap, :dmin, :dmax)
const loc_qual_names = ("standardError", "azimuthalGap", "minimumDistance", "maximumDistance")
# imports
include("Quake/imports.jl")
# types for earthquake data
include("Quake/Types/EQLoc.jl")
include("Quake/Types/EQMag.jl")
include("Quake/Types/SourceTime.jl")
include("Quake/Types/SeisSrc.jl")
include("Quake/Types/SeisPha.jl")
include("Quake/Types/PhaseCat.jl")
include("Quake/Types/SeisHdr.jl")
include("Quake/Types/EventTraceData.jl")
include("Quake/Types/EventChannel.jl")
include("Quake/Types/SeisEvent.jl")
# formats
for i in ls(path*"/Quake/Formats/")
if endswith(i, ".jl")
include(i)
end
end
# processing
for i in ls(path*"/Quake/Processing/")
if endswith(i, ".jl")
include(i)
end
end
# utilities
for i in ls(path*"/Quake/Utils/")
if endswith(i, ".jl")
include(i)
end
end
# web
for i in ls(path*"/Quake/Web/")
if endswith(i, ".jl")
include(i)
end
end
# exports
include("Quake/exports.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 405 | module RandSeis
using Random, SeisIO, SeisIO.Quake
include("RandSeis/imports.jl")
if VERSION <= v"1.1.0"
include("RandSeis/constants_1.jl")
else
include("RandSeis/constants.jl")
end
include("RandSeis/iccodes_and_units.jl")
include("RandSeis/utils.jl")
include("RandSeis/randSeisChannel.jl")
include("RandSeis/randSeisData.jl")
include("RandSeis/randSeisEvent.jl")
include("RandSeis/exports.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 960 | module SEED
using Dates, Markdown, Mmap, Printf, SeisIO, SeisIO.FastIO, SeisIO.Formats
path = Base.source_dir()
const id_positions = Int8[11, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
const id_spacer = 0x2e
const steim = reverse(collect(0x00000000:0x00000002:0x0000001e), dims=1)
const responses = Dict{Int64, Any}()
const units_lookup = Dict{Int64, String}()
const comments = Dict{Int64, String}()
const abbrev = Dict{Int64, String}()
# imports
include("SEED/imports.jl")
# files that should be loaded in order
include("SEED/0_seed_read_utils.jl")
include("SEED/1_mSEEDblk.jl")
include("SEED/1_mSEEDdec.jl")
include("SEED/1_dataless_blk.jl")
include("SEED/2_parserec.jl")
# other
include("SEED/dataless.jl")
include("SEED/readmseed.jl")
include("SEED/seed_resp.jl")
include("SEED/seed_support.jl")
# Utils
for i in ls(path*"/SEED/Utils/")
if endswith(i, ".jl")
include(i)
end
end
# exports
include("SEED/exports.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 399 | module SUDS
using Mmap, SeisIO, SeisIO.FastIO, SeisIO.Quake
#=
Submodule for SUDS data format accessories.
=#
include("SUDS/imports.jl")
include("SUDS/SUDSbuf.jl")
include("SUDS/suds_const.jl")
include("SUDS/suds_structs.jl")
include("SUDS/suds_decode.jl")
include("SUDS/suds_aux.jl")
include("SUDS/read_suds.jl")
include("SUDS/desc.jl")
# exports
export formats, readsudsevt, suds_support
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 664 | module SeisHDF
using Dates, HDF5, SeisIO, SeisIO.FastIO, SeisIO.Quake
# imports
include("SeisHDF/imports.jl")
# constants
include("SeisHDF/constants.jl")
# auxiliary functions
include("SeisHDF/load_data.jl")
include("SeisHDF/save_data.jl")
include("SeisHDF/id_match.jl")
include("SeisHDF/get_trace_bounds.jl")
include("SeisHDF/asdf_aux.jl")
# readers
include("SeisHDF/read_asdf.jl")
include("SeisHDF/read_asdf_evt.jl")
# writers
include("SeisHDF/write_asdf.jl")
# wrappers
include("SeisHDF/read_hdf5.jl")
include("SeisHDF/write_hdf5.jl")
# scanners
include("SeisHDF/scan_hdf5.jl")
include("SeisHDF/asdf_qml.jl")
# exports
include("SeisHDF/exports.jl")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 263 | module UW
using Mmap, SeisIO, SeisIO.FastIO, SeisIO.Quake
using Dates: DateTime
include("UW/imports.jl")
include("UW/uwdf.jl")
include("UW/uwpf.jl")
include("UW/uwevt.jl")
include("UW/desc.jl")
# exports
export formats, readuwevt, uwdf, uwdf!, uwpf, uwpf!
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 9697 | function get_sep(v_buf::Array{UInt8,1}, vi::Int8)
z = zero(Int8)
o = one(Int8)
i = z
while i < vi
i += o
y = getindex(v_buf, i)
if y != 0x20
return y
end
end
end
function geocsv_mkid(v_buf::Array{UInt8,1}, vi::Int8)
# SID
i = 0x00
while i < vi
i += 0x01
if getindex(v_buf, i) == 0x5f
setindex!(v_buf, 0x2e, i)
end
end
return String(v_buf[0x01:vi])
end
function geocsv_assign!(C::SeisChannel,
k_buf::Array{UInt8,1},
v_buf::Array{UInt8,1},
ki::Int8,
vi::Int8)
o = one(Int8)
k = String(k_buf[o:ki])
ptr = pointer(v_buf[o:vi])
if k == "sample_rate_hz"
setfield!(C, :fs, parse(Float64, unsafe_string(ptr)))
elseif k == "latitude_deg"
setfield!(C.loc, :lat, parse(Float64, unsafe_string(ptr)))
elseif k == "longitude_deg"
setfield!(C.loc, :lon, parse(Float64, unsafe_string(ptr)))
elseif k == "elevation_m"
setfield!(C.loc, :el, parse(Float64, unsafe_string(ptr)))
elseif k == "azimuth_deg"
setfield!(C.loc, :az, parse(Float64, unsafe_string(ptr)))
elseif k == "dip_deg"
setfield!(C.loc, :inc, 90.0-parse(Float64, unsafe_string(ptr)))
elseif k == "depth_m"
setfield!(C.loc, :dep, parse(Float64, unsafe_string(ptr)))
elseif k == "scale_factor"
setfield!(C, :gain, parse(Float64, unsafe_string(ptr)))
elseif k == "scale_frequency_hz"
C.misc[k] = parse(Float64, unsafe_string(ptr))
elseif k == "scale_units"
setfield!(C, :units, lowercase(unsafe_string(ptr)))
else
C.misc[k] = unsafe_string(ptr)
end
return nothing
end
function mkhdr(io::IO, c::UInt8, k_buf::Array{UInt8,1}, v_buf::Array{UInt8,1})
k = true
o = one(Int8)
i = zero(Int8)
j = zero(Int8)
# skip space after a new line
while c == 0x20; c = fastread(io); end
while c != 0x0a
if c == 0x23
c = fastread(io)
while c == 0x20
c = fastread(io)
end
# transition at 0x3a
elseif c == 0x3a && k == true
k = false
c = fastread(io)
while c == 0x20
c = fastread(io)
end
elseif k
i += o
setindex!(k_buf, c, i)
c = fastread(io)
else
j += o
setindex!(v_buf, c, j)
c = fastread(io)
end
end
return i,j
end
function read_geocsv_slist!(S::GphysData, io::IO)
o = one(Int16)
oo = one(Int64)
z = zero(Int16)
c = 0x00
j = z
sep = 0x2c
t = oo
t_old = oo
t_exp = 5
Δ = oo
Δ_gap = oo
i = oo
x = zero(Float32)
k = ""
v = ""
nx = zero(UInt64)
C = SeisChannel()
C.loc = GeoLoc()
X = Array{Float32,1}(undef, 0)
k_buf = Array{UInt8,1}(undef, 80)
v_buf = Array{UInt8,1}(undef, 80)
t_buf = Array{UInt8,1}(undef, 32)
t_ptr = pointer(t_buf)
reading_data = false
is_float = false
# Time structure
tm = TmStruct()
while !fasteof(io)
c = fastread(io)
# new line ----------------------------------
(c == 0x0a) && continue
# parse header ------------------------------
if c == 0x23
if reading_data == true
# '#' after newline
C.t = vcat(C.t, [i-1 zero(Int64)])
append!(C.x, X)
push!(S, C)
C = SeisChannel()
C.loc = GeoLoc()
i = oo
reading_data = false
end
(ki, vi) = mkhdr(io, c, k_buf, v_buf)
if ki == Int8(9)
sep = get_sep(v_buf, vi)
elseif ki == Int8(12)
if k_buf[1] == 0x73 && k_buf[2] == 0x61
nx = buf_to_uint(v_buf, vi)
else
geocsv_assign!(C, k_buf, v_buf, ki, vi)
end
elseif ki == Int8(3)
id = geocsv_mkid(v_buf, vi)
ii = findid(S, id)
if ii > 0
C = pull(S, ii)
else
C.id = id
end
Nt = size(C.t, 1)
if Nt > 0
t_old = endtime(C.t, round(Int64, sμ/C.fs))
i = C.t[Nt, 1] + 1
if C.t[Nt, 2] == 0
C.t = C.t[1:Nt-1,:]
end
else
t_old = oo
end
else
geocsv_assign!(C, k_buf, v_buf, ki, vi)
end
# any other character after newline
else
if reading_data == false
# transition from header to data
t_str = get(C.misc, "start_time", "1970-01-01T000000.000000Z")
t = String.(split(t_str, ('T', '.')))
ts = (Date(t[1]).instant.periods.value)*86400000000 +
div(Time(t[2]).instant.value, 1000) -
dtconst
#= weirdly, *much* more efficient than *either*
Array{Float32,1}(undef, 0) with push! or
Array{Float32,1}(undef, x) s setindex!
=#
X = Float32[]; sizehint!(X, nx)
Δ = round(Int64, sμ/getfield(C, :fs))
if isempty(C.t)
C.t = [1 ts]
else
C.t = vcat(C.t, [i ts-t_old-Δ])
end
reading_data = true
while is_u8_digit(c) == false
c = fastread(io)
end
end
x = stream_float(io, c)
push!(X, x)
i += 1
end
end
append!(C.x, X)
push!(S, C)
return nothing
end
function read_geocsv_tspair!(S::GphysData, io::IO)
o = one(Int16)
oo = one(Int64)
z = zero(Int16)
c = 0x00
j = z
sep = 0x2c
t = oo
t_old = oo
t_exp = 5
Δ = oo
Δ_gap = oo
i = oo
x = zero(Float32)
k = ""
v = ""
nx = zero(UInt64)
C = SeisChannel()
C.loc = GeoLoc()
X = Array{Float32,1}(undef, 0)
k_buf = Array{UInt8,1}(undef, 80)
v_buf = Array{UInt8,1}(undef, 80)
t_buf = Array{UInt8,1}(undef, 32)
t_ptr = pointer(t_buf)
read_state = 0x00
reading_data = false
is_float = false
# Time structure
tm = TmStruct()
# read_state:
# 0x00 new line
# 0x01 hdr (subroutine)
# 0x02 time
# 0x03 fractional-second
# 0x04 data
while !fasteof(io)
c = fastread(io)
# new line ----------------------------------
if c == 0x0a
read_state = 0x00
# No parsing of c
continue
end
# determine next read state -----------------
if read_state == 0x00
# '#' after newline
if c == 0x23
# transition from data to header
if reading_data == true
# finish current SeisChannel
C.t = vcat(C.t, [i-1 zero(Int64)])
append!(C.x, X)
push!(S, C)
C = SeisChannel()
C.loc = GeoLoc()
i = oo
reading_data = false
end
(ki, vi) = mkhdr(io, c, k_buf, v_buf)
if ki == Int8(9)
sep = get_sep(v_buf, vi)
elseif ki == Int8(12)
if k_buf[1] == 0x73 && k_buf[2] == 0x61
nx = buf_to_uint(v_buf, vi)
else
geocsv_assign!(C, k_buf, v_buf, ki, vi)
end
elseif ki == Int8(3)
id = geocsv_mkid(v_buf, vi)
ii = findid(S, id)
if ii > 0
C = pull(S, ii)
else
C.id = id
end
Nt = size(C.t, 1)
if Nt > 0
t_old = endtime(C.t, round(Int64, sμ/C.fs))
i = C.t[Nt, 1] + 1
if C.t[Nt, 2] == 0
C.t = C.t[1:Nt-1,:]
end
else
t_old = oo
end
else
geocsv_assign!(C, k_buf, v_buf, ki, vi)
end
read_state = 0x00
continue
# any other character after newline
else
read_state = 0x02
fill!(t_buf, 0x20)
j = z
# Requires transition from header to time
if reading_data == false
#= weirdly, *much* more efficient than *either*
Array{Float32,1}(undef, 0) with push! or
Array{Float32,1}(undef, x) s setindex!
=#
X = Float32[]; sizehint!(X, nx)
Δ = round(Int64, sμ/getfield(C, :fs))
Δ_gap = div(3*Δ,2)
# Flag that we're now reading data
reading_data = true
end
end
end
# 0x02 = time state -------------------------
# elseif read_state == 0x02
if read_state == 0x02
# '.'
if c == 0x2e
# only happens at a decimal in a time block
ccall(:strptime, Cstring, (Cstring, Cstring, Ref{TmStruct}), t_ptr, "%FT%T", tm)
t = (Sys.iswindows() ? ccall(:_mkgmtime, Int64, (Ref{TmStruct},), tm) : ccall(:timegm, Int64, (Ref{TmStruct},), tm))* 1000000
read_state = 0x03
continue
else
j += o
setindex!(t_buf, c, j)
end
# 0x03 = fractional-second state ------------
elseif read_state == 0x03
# ',', typically
if c == sep
t_exp = 5
j = z
if t-t_old > Δ_gap
if t_old == oo
C.t = vcat(C.t, [i t])
else
C.t = vcat(C.t, [i t-t_old-Δ])
end
end
t_old = t
read_state = 0x04
continue
# intent: ignore the time zone, users can fix manually
# zero out t_exp when we reach timezone crap
elseif c == 0x02b || c == 0x02d || c == 0x05a
t_exp = 0
elseif t_exp > oo
t += Int64(c-0x30)*10^t_exp
t_exp -= oo
else
continue
end
# 0x04 = data state -------------------------
elseif read_state == 0x04
x = stream_float(io, c)
push!(X, x)
read_state = 0x00
i += 1
else
error("indeterminate read state")
end
end
C.t = vcat(C.t, [i-1 zero(Int64)])
append!(C.x, X)
push!(S, C)
return nothing
end
function read_geocsv_file!(S::GphysData, fname::String, tspair::Bool, memmap::Bool)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
if tspair == true
read_geocsv_tspair!(S, io)
else
read_geocsv_slist!(S, io)
end
close(io)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1237 | function read_slist!(S::GphysData, fname::String, lennartz::Bool, memmap::Bool, strict::Bool, v::Integer)
# file read
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
hdr = readline(io)
mark(io)
nx = countlines(io)
reset(io)
X = Array{Float32,1}(undef, nx)
i = 0
while i < nx
i += 1
y = stream_float(io, 0x00)
setindex!(X, y, i)
end
close(io)
# header
if lennartz
id_sep = "."
h = split(hdr)
sta = replace(h[3], "\'" => "")
cmp = last(split(fname, id_sep))
id = *(id_sep, sta, id_sep, id_sep, cmp)
ts = (Date(h[8]).instant.periods.value)*86400000000 +
div(Time(h[9]).instant.value, 1000) -
dtconst
fs = 1000.0 / parse(Float64, h[5])
else
h = split(hdr, ',')
id = join(split_id(split(h[1])[2], c="_"), ".")
ts = 1000*DateTime(lstrip(h[4])).instant.periods.value - dtconst
fs = parse(Float64, split(h[3])[1])
end
# Check for existing channel with same fs
i = findid(S, id)
if strict
i = channel_match(S, i, fs)
end
if (i > 0)
check_for_gap!(S, i, ts, nx, v)
append!(S.x[i], X)
else
# New channel
push!(S, SeisChannel(id = id, fs = fs, t = mk_t(nx, ts), x = X))
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 157 | import Base.Libc: TmStruct
import SeisIO: buf_to_uint,
check_for_gap!,
dtconst,
endtime,
is_u8_digit,
mk_t,
split_id,
stream_float,
sμ,
μs
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 9572 | AH_fmt = FormatDesc(
"AH (Ad Hoc)",
"\"ah1\" (AH-1), \"ah2\" (AH-2)",
"Columbia University, New York, United States of America",
"ftp://www.orfeus-eu.org/pub/software/mirror/ldeo.columbia/ (defunct)",
"unknown",
HistVec(),
["machine-independent file format using External Data Representation (XDR)"],
["Comprehensive Nuclear Test Ban Tready (CTBT) monitoring",
"earthquake seismology",
"Lamont-Doherty Earth Observatory (Columbia University, NY, USA)",
"Borovoye Geophysical Observatory, Kazakhstan",
"native output format of CORAL by K. Creager (U. Washington, Seattle, WA, USA)"],
["none known"],
0x00
)
AH_fmt.ver = [ FmtVer(2.0, Date("1994-02-20"), false),
FmtVer(1.0, Date("1985-06-11"), false)
]
formats["ah1"] = AH_fmt
formats["ah2"] = AH_fmt
Bottle_fmt = FormatDesc(
"Bottle",
"\"bottle\"",
"UNAVCO, Boulder, Colorado, United States",
"(none)",
"[email protected]",
HistVec(),
["a portable, simple data format designed for short sequences of",
" single-channel time series data"],
["geodesy; raw format of PBO strain meter stations"],
["https://www.unavco.org/data/strain-seismic/bsm-data/lib/docs/bottle_format.pdf"],
0x00
)
Bottle_fmt.ver = [ FmtVer("1", "Unknown", false) ]
formats["bottle"] = Bottle_fmt
DATALESS_fmt = FormatDesc(
"Dataless SEED (instrument metadata) file",
"\"dataless\"",
"International Federation of Digital Seismograph Networks (FDSN)",
"(none)",
"[email protected]",
HistVec(),
["contains Volume, Abbreviation, and Station Control Headers.",
"no Time Span Control Headers or data records.",
"poorly documented; many critical caveats in notes or margins."],
["FDSN data standard; used worldwide"],
["http://www.fdsn.org/pdf/SEEDManual_V2.4.pdf"],
0x01
)
DATALESS_fmt.ver = [ FmtVer("2.4", Date("2012-08-01"), false),
FmtVer("2.3", Date("1992-12-31"), false),
FmtVer("2.2", Date("1991-08-31"), false),
]
formats["dataless"] = DATALESS_fmt
GeoCSV_fmt = FormatDesc(
"GeoCSV",
"\"geocsv\", \"geocsv.slist\"",
"Incorporated Research Institutions for Seismology (IRIS), Washington, DC, United States of America",
"(none)",
"[email protected]",
HistVec(),
["ASCII format intended for both human and machine readability.",
"single-column (geocsv.slist) and two-column (geoscv) formats."],
[ "(unknown)"],
["http://geows.ds.iris.edu/documents/GeoCSV.pdf",
"https://giswiki.hsr.ch/GeoCSV"],
0x01
)
GeoCSV_fmt.ver = [ FmtVer("2.0.4", "2015-07-21", false) ]
formats["geocsv"] = GeoCSV_fmt
formats["geocsv.slist"] = GeoCSV_fmt
formats["geocsv.tspair"] = GeoCSV_fmt
Lennartz_fmt = FormatDesc(
"Lennartz MarsLite ASCII",
"\"lenartzascii\"",
"Lennartz electronic GmbH, Tübingen, Germany",
"(none)",
"[email protected]",
HistVec(),
["SLIST (single-column ASCII) variant recorded by Lennartz MarsLite digitizers."],
[ "Lennartz"],
["(none)"],
0x01
)
Lennartz_fmt.ver = [ FmtVer("", "", false) ]
formats["lenartzascii"] = Lennartz_fmt
mSEED_fmt = FormatDesc(
"SEED (Standard for the Exchange of Earthquake Data)",
"\"mseed\"",
"International Federation of Digital Seismograph Networks (FDSN)",
"(no source code)",
"[email protected]",
HistVec(),
["an omnibus seismic data standard for data archival and detailed network",
" and instrument descriptions",
"mini-SEED is a data-only variant that uses only data blockettes",
"official manual is poorly organized and incomplete at 224 pages length,",
" 7 years since last update",
],
["FDSN data standard; used worldwide"],
["http://www.fdsn.org/pdf/SEEDManual_V2.4.pdf"],
0x01
)
mSEED_fmt.ver = [ FmtVer("2.4", Date("2012-08-01"), false) ]
formats["mseed"] = mSEED_fmt
RESP_fmt = FormatDesc(
"SEED RESP (instrument response) file",
"\"resp\"",
"Incorporated Research Institutions for Seismology (IRIS), Washington, DC, United States of America",
"(no known source code)",
"[email protected]",
HistVec(),
["ASCII instrument responses in a format compatible with SEED blockettes",
"extremely self-incompatible and easy to break, even compared to SEED",
"no low-level format description or API is known to exist"
],
["people who like making extra work for themselves"],
["https://ds.iris.edu/ds/nodes/dmc/data/formats/resp/"],
0x01
)
formats["resp"] = RESP_fmt
SAC_fmt = FormatDesc(
"SAC (Seismic Analysis Code)",
"\"sac\"",
"Lawrence Livermore National Laboratory (LLNL), Livermore, California, United States of America",
"https://ds.iris.edu/ds/nodes/dmc/software/downloads/sac/101-6a/",
"Brian Savage, University of Rhode Island (URI) / Arthur Snoke, Department of Geosciences at Virginia Tech (VT)",
HistVec(),
["machine-independent format for storing geophysical data at 32-bit precision",
"SAC software has distribution restrictions; see https://www.ecfr.gov/cgi-bin/retrieveECFR?n=15y2.1.3.4.30",
],
[ "US Geological Survey (USGS), United States of America",
"Incorporated Research Institutions for Seismology (IRIS), Washington, DC, United States of America",
"widely used in North America, South America, and Japan"
],
["http://ds.iris.edu/files/sac-manual/manual/file_format.html (complete and verified)" ,
],
0x01
)
SAC_fmt.ver = [ FmtVer("101.6a", Date("2012-01-01"), true) ]
formats["sac"] = SAC_fmt
SACPZ_fmt = FormatDesc(
"SACPZ (Seismic Analysis Code Poles and Zeros file)",
"\"sacpz\"",
"Lawrence Livermore National Laboratory (LLNL), Livermore, California, United States of America",
"(no source code)",
"Brian Savage, University of Rhode Island (URI) / Arthur Snoke, Department of Geosciences at Virginia Tech (VT)",
HistVec(),
["ASCII pole-zero file format intended to describe seismic instrument response",
],
[ "US Geological Survey (USGS), United States of America",
"Incorporated Research Institutions for Seismology (IRIS), Washington, DC, United States of America",
"widely used in North America, South America, and Japan"
],
["https://service.iris.edu/irisws/sacpz/docs/1/help/" ,
],
0x01
)
SACPZ_fmt.ver = [ FmtVer("101.6a", Date("2012-01-01"), true) ]
formats["sacpz"] = SACPZ_fmt
SEGY_fmt = FormatDesc(
"SEG Y",
"\"segy\" (SEG Y 1.0 or SEG Y rev 1), \"passcal\" (PASSCAL SEG Y)",
"Society of Exploration Geophysicists, Tulsa, Oklahoma, United States",
"(no source code)",
"https://seg.org/Publications/SEG-Technical-Standards",
HistVec(),
["machine-independent open-standard format for storing geophysical data",
"SEG Y rev 1 and earlier have very few required header variables,",
" creating self-incompatibility issues.",
"PASSCAL is a SEG Y variant with no file header, developed by PASSCAL",
" and New Mexico Tech (USA), used with their equipment through late 2000s."
],
[ "widely used in exploration geophysics",
"petroleum and gas industry",
"some nodal array data",
"Portable Array Seismic Studies of the Continental Lithosphere (PASSCAL) Instrument Center, Socorro, New Mexico, USA",
"New Mexico Institute of Mining and Technology, Socorro, New Mexico, USA"
],
["https://en.wikipedia.org/wiki/SEG-Y",
"https://seg.org/Portals/0/SEG/News%20and%20Resources/Technical%20Standards/seg_y_rev2_0-mar2017.pdf",
"https://www.passcal.nmt.edu/content/passcal-seg-y-trace-header (PASSCAL SEG Y)"
],
0x01
)
SEGY_fmt.ver = [ FmtVer("rev 2", Date("2017-03-01"), nothing),
FmtVer("rev 1", Date("2002-05-01"), false),
FmtVer("PASSCAL", "199?-??-??", false),
FmtVer(1.0, Date("1974-04-01"), false),
]
formats["segy"] = SEGY_fmt
formats["passcal"] = SEGY_fmt
SLIST_fmt = FormatDesc(
"SLIST (ASCII sample list)",
"\"slist\"",
"unknown",
"(no source code)",
"unknown",
HistVec(),
["A one-line ASCII header followed by numbers stored as ASCII strings"],
["unknown"],
["unknown"],
0x00
)
formats["slist"] = SLIST_fmt
SXML_fmt = FormatDesc(
"FDSN Station XML",
"\"sxml\"",
"International Federation of Digital Seismograph Networks (FDSN)",
"(no source code)",
"[email protected]",
HistVec(),
["XML representation of important common structures in SEED 2.4 metadata."],
["FDSN data standard; used worldwide"],
["http://www.fdsn.org/xml/station/",
"http://www.fdsn.org/xml/station/fdsn-station-1.1.xsd",
"http://www.fdsn.org/pdf/SEEDManual_V2.4.pdf"],
0x01
)
formats["FDSN Station XML"] = SXML_fmt
WIN_fmt = FormatDesc(
"WIN",
"\"win32\"",
"Earthquake Research Institute, University of Tokyo, Japan",
"http://wwweic.eri.u-tokyo.ac.jp/WIN/pub/win/ (in Japanese)",
"(unknown)",
HistVec(),
["format for storing multiplexed seismic data in one-minute chunks",
"each data file divides data into one-second segments by channel",
" stored as variable-precision delta-encoded integers",
"channel information must be retrieved from an external file",
"channel files are not strictly controlled by a central authority and",
" inconsistencies in channel parameters have been found by SeisIO developers."
],
[ "used throughout Japan",
"Earthquake Research Institute, University of Tokyo, Japan",
"National Research Institute for Earth Science and Disaster Resilience (NIED), Japan"
],
["http://wwweic.eri.u-tokyo.ac.jp/WIN/ (in Japanese)",
"https://hinetwww11.bosai.go.jp/auth/?LANG=en (login required)"],
0x01
)
WIN_fmt.ver = [ FmtVer("3.0.2", Date("2017-11-20"), false) ]
formats["Win32"] = WIN_fmt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1003 | const LEAD_IN_LENGTH = 0x1c
const DECIMATE_MASK = 0b00100000
const convertible_fields = (:id, :name, :loc, :fs, :gain, :resp, :units, :src, :notes, :misc)
const nodalfields = (:id, :loc, :fs, :gain, :misc, :name, :notes, :resp, :src, :units, :t)
const tdms_dtos = round(Int64, d2u(DateTime("1904-01-01T00:00:00")))
const tdms_codes = Dict{UInt32, Type}(
0x00000000 => UInt8,
0x00000001 => Int8,
0x00000002 => Int16,
0x00000003 => Int32,
0x00000004 => Int64,
0x00000005 => UInt8,
0x00000006 => UInt16,
0x00000007 => UInt32,
0x00000008 => UInt64,
0x00000009 => Float32,
0x0000000a => Float64,
0x00000020 => Char,
0x00000021 => Bool,
0x00000044 => UInt64, # *** convert to date string
)
const unindexed_fields = (:n, :ox, :oy, :oz, :data, :info, :x)
const TDMS = TDMSbuf(
zero(UInt32),
zero(UInt64),
zero(UInt64),
zero(UInt32),
zero(Int64),
zero(Float64),
zero(Float64),
zero(Float64),
zero(Float64),
"",
Dict{String, Any}()
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 77 | export NodalChannel,
NodalData,
info_dump,
read_nodal,
resample!,
resample
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 843 | import Base: ==, -, *, +, append!, convert, copy, delete!, deleteat!,
firstindex, getindex, hash, in, isempty, isequal, lastindex, length,
push!, merge, merge!, read, setindex!, show, size, sizeof, sort!,
sort, summary, write
import SeisIO.Formats: formats, FmtVer, FormatDesc, HistVec
import SeisIO: BUF,
ChanSpec,
FloatArray,
KW,
NodalLoc,
TimeSpec,
checkbuf!,
checkbuf_strict!,
code2loctyp,
code2resptyp,
code2typ,
datafields,
default_fs,
default_gain,
do_trace,
dtchars,
dtconst,
fillx_i16_be!,
flat_resp,
getbandcode,
loctyp2code,
merge_ext!,
mk_t,
mkxstr,
parsetimewin,
proc_note!,
prune!,
read_misc,
read_string_vec,
resample!,
resample,
resptyp2code,
showloc_full,
show_os,
show_str,
show_t,
show_x,
sμ,
typ2code,
write_misc,
write_string_vec,
μs
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 405 | Silixa_fmt = FormatDesc(
"Silixa TDMS",
"\"silixa\"",
"Silixa, Hertfordshire, UK",
"adapted from https://silixa.com/resources/software-downloads/",
"https://silixa.com/about-us/contact/",
HistVec(),
["Silixa variant on NI LabVIEW TDMS file format"],
["Silixa (nodal array data)"],
[""],
0xfd
)
Silixa_fmt.ver = [ FmtVer(1, "2018-06-28", false) ]
formats["Silixa TDMS"] = Silixa_fmt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3248 | function read_nodal_segy(fname::String,
nn::String,
s::TimeSpec,
t::TimeSpec,
chans::ChanSpec,
memmap::Bool)
# Preprocessing
(d0, d1) = parsetimewin(s, t)
t0 = DateTime(d0).instant.periods.value*1000 - dtconst
f = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
trace_fh = Array{Int16, 1}(undef, 3)
shorts = getfield(BUF, :int16_buf)
fhd = Dict{String,Any}()
# ww = Array{String, 1}(undef, 0)
# ==========================================================================
# Read file header
fhd["filehdr"] = fastread(f, 3200)
fhd["jobid"] = bswap(fastread(f, Int32))
fhd["lineid"] = bswap(fastread(f, Int32))
fhd["reelid"] = bswap(fastread(f, Int32))
fast_readbytes!(f, BUF.buf, 48)
fillx_i16_be!(shorts, BUF.buf, 24, 0)
fastskip(f, 240)
for i in 25:27
shorts[i] = read(f, Int16)
end
fastskip(f, 94)
# check endianness; can be inconsistent; 0x0400 is a kludge
# (stands for SEG Y rev 10.24, current is rev 2.0)
# if (unsigned(shorts[25]) > 0x0400) || (shorts[26] < zero(Int16)) || (shorts[26] > one(Int16)) || (shorts[27] < zero(Int16))
# shorts[25:27] .= bswap.(shorts[25:27])
# push!(ww, "Inconsistent file header endianness")
# end
# fastskip(f, 94)
# Process
nh = max(zero(Int16), getindex(shorts, 27))
fhd["exthdr"] = Array{String,1}(undef, nh)
[fhd["exthdr"][i] = fastread(f, 3200) for i in 1:nh]
for (j,i) in enumerate(String[ "ntr", "naux", "filedt", "origdt", "filenx",
"orignx", "fmt", "cdpfold", "trasort", "vsum",
"swst", "swen0", "swlen", "swtyp", "tapnum",
"swtapst", "swtapen", "taptyp", "corrtra", "bgainrec",
"amprec", "msys", "zupdn", "vibpol", "segyver",
"isfixed", "n_exthdr" ])
fhd[i] = shorts[j]
end
nt = getindex(shorts, 1)
trace_fh[1] = getindex(shorts,3)
trace_fh[2] = getindex(shorts,5)
trace_fh[3] = getindex(shorts,7)
# ==========================================================================
# initialize NodalData container; set variables
if isempty(chans)
chans = 1:nt
end
fs = sμ / Float64(trace_fh[1])
data = Array{Float32, 2}(undef, trace_fh[2], nt)
S = NodalData(data, fhd, chans, t0)
net = nn * "."
cha = string("..O", getbandcode(fs), "0")
# ==========================================================================
# Read traces
j = 0
for i = 1:nt
C = do_trace(f, false, true, 0x00, true, trace_fh)
if i in chans
j += 1
S.id[j] = string(net, lpad(i, 5, '0'), cha)
S.name[j] = string(C.misc["rec_no"], "_", i)
S.fs[j] = C.fs
S.gain[j] = C.gain
S.misc[j] = C.misc
S.t[j] = C.t
S.data[:, j] .= C.x
end
end
# TO DO: actually use s, t here
# ==========================================================================
# Cleanup
close(f)
resize!(BUF.buf, 65535)
fill!(S.fs, fs)
fill!(S.src, realpath(fname))
fill!(S.units, "m/m")
# Output warnings to STDOUT
# if !isempty(ww)
# for i = 1:length(ww)
# @warn(ww[i])
# end
# end
# S.info["Warnings"] = ww
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7156 | function tdms_header!(TDMS::TDMSbuf, io::IO, v::Integer)
fastskip(io, 4)
TDMS.flags = fastread(io, UInt32)
fastskip(io, 4)
TDMS.nsos = fastread(io, UInt64) + LEAD_IN_LENGTH
TDMS.rdos = fastread(io, UInt64) + LEAD_IN_LENGTH
TDMS.n_ch = fastread(io, UInt32) - 0x00000002
l_path = fastread(io, UInt32)
fastskip(io, 4 + l_path)
nprops = fastread(io, UInt32)
for i = 1:nprops
# Get variable name and code
L = fastread(io, UInt32)
name = String(fastread(io, L))
# Add dictionaries as needed
D = TDMS.hdr
if occursin(".", name)
k = String.(split(name, "."))
L = length(k)
for j in 1:L-1
if haskey(D, k[j])
D = D[k[j]]
else
D[k[j]] = Dict{String, Any}()
D = D[k[j]]
end
end
name = k[L]
end
pcode = fastread(io, UInt32)
(pcode == 0x00000000) && continue
# Undefined/bad codes
T = get(tdms_codes, pcode, nothing)
(T == nothing) && error("Undefined property type!")
if pcode == 0x00000044
# TDMS documentation has the order of these two variables reversed
nf = fastread(io, UInt64)
ns = fastread(io, Int64)
dt = tdms_dtos + ns + nf*2^(-64)
if name == "GPSTimeStamp"
TDMS.ts = round(Int64, sμ*dt)
else
D[name] = u2d(dt)
(v > 1) && println(name, " = ", D[name])
end
# Will be corrected to UTC by adding SystemInfomation.GPS.UTCOffset
elseif pcode == 0x00000020
# No documentation here either
nn = fastread(io, UInt32)
val = fastread(io, nn)
if name == "name"
TDMS.name = String(val)
else
D[name] = String(val)
(v > 1) && println(name, " = ", D[name])
end
else
if name == "SamplingFrequency[Hz]"
TDMS.fs = fastread(io, T)
elseif name == "Latitude"
TDMS.oy = fastread(io, T)
elseif name == "Longitude"
TDMS.ox = fastread(io, T)
elseif name == "Altitude"
TDMS.oz = fastread(io, T)
else
D[name] = fastread(io, T)
(v > 1) && println(name, " = ", D[name])
end
#=
In which direction is StartPosition[m]?
what about Start Distance (m)?
=#
end
end
return nothing
end
function read_silixa_tdms(file::String, nn::String, s::TimeSpec, t::TimeSpec, chans::ChanSpec, memmap::Bool, v::Integer)
io = memmap ? IOBuffer(Mmap.mmap(file)) : open(file, "r")
tdms_header!(TDMS, io, v)
# not documented; inferred from manufacturer's Matlab script
# uses general formula: mask = 2^(n-1); N & mask != mask
# N = UInt8(data_flags & 0b00100000)
# n = 6 (6th bit = 1 if data are decimated)
decimated = Bool(UInt8(TDMS.flags & DECIMATE_MASK) != 2^5)
# I don't think this is actually part of the standard TDMS data spec
# data header info
fastskip(io, fastread(io, UInt32)+8) # Group information
fastskip(io, fastread(io, UInt32)+4) # first channel path and length
DataType = fastread(io, UInt32)
fastskip(io, 4)
chunk_size = Int64(fastread(io, UInt32))
T = get(tdms_codes, DataType, nothing)
(T == nothing) && error("Unsupported data type!")
seg1_length = Int64(div(div(TDMS.nsos - TDMS.rdos, TDMS.n_ch), sizeof(T)))
if v > 0
println("data type = ", T,
", chunk size = ", chunk_size,
", channel length = ", seg1_length,
", decimated = ", decimated)
end
# parse start time to get zero-indexed si, ei in each channel
if (typeof(s) <: Real) && (typeof(t) <: Real)
si = max(round(Int64, TDMS.fs*s), 0)
ei = min(round(Int64, TDMS.fs*t), seg1_length) - 1
else
Δ = round(Int64, SeisIO.sμ / TDMS.fs)
if s == "0001-01-01T00:00:00"
s = u2d(TDMS.ts*μs)
elseif isa(s, Real)
s = u2d(TDMS.ts*μs + s)
end
if t == "9999-12-31T12:59:59"
t = u2d(TDMS.ts*μs + seg1_length/TDMS.fs)
elseif isa(t, Real)
t = u2d(TDMS.ts*μs + t)
end
(d0, d1) = parsetimewin(s, t)
t0 = DateTime(d0).instant.periods.value*1000 - dtconst
t1 = DateTime(d1).instant.periods.value*1000 - dtconst
(v > 2) && println("t0 = ", t0, ", t1 = ", t1)
si = max(div(t0-TDMS.ts, Δ), 0)
ei = min(div(t1-TDMS.ts, Δ), seg1_length) - 1
end
si = Int64(si)
ei = Int64(ei)
# this prints one-indexed forms
(v > 1) && println("reading from si = ", si+1, " to ei = ", ei+1)
# Chunk bounds
first_chunk = div(si, chunk_size) + 1
last_chunk = div(ei, chunk_size) + 1
n_chunks = last_chunk - first_chunk + 1
# Skip to start of data
nskip = TDMS.rdos - position(io)
if first_chunk > 1
chunk_os = (first_chunk-1)*chunk_size*sizeof(T)*TDMS.n_ch
nskip += chunk_os
(v > 2) && println("To skip: ", chunk_os, " bytes of chunks.")
end
if nskip != 0
(v > 0) && println("Skipping ", nskip, " bytes total.")
fastskip(io, nskip)
end
# Read Data
buf = Array{T, 2}(undef, TDMS.n_ch, chunk_size)
data = Array{Float32, 2}(undef, ei-si+1, TDMS.n_ch)
# ===================================================================
# Read chunks
# sj = starting index within chunk
# ej = ending index within chunk
j = 1
jmax = div(seg1_length, chunk_size)*chunk_size - si
for i in first_chunk:last_chunk
if j > jmax
if VERSION < v"1.4"
buf = Array{T, 2}(undef, TDMS.n_ch, rem(seg1_length, chunk_size))
read!(io, buf)
else
vbuf = view(buf, :, 1:rem(seg1_length, chunk_size))
read!(io, vbuf)
end
else
read!(io, buf)
end
sj = (i == first_chunk ? rem(si+1, chunk_size) : 1)
ej = (i == last_chunk ? rem(ei, chunk_size) + 1 : chunk_size)
nj = ej-sj+1
vx = view(data, j:j+nj-1, :)
vb = view(buf, :, sj:ej)
(v > 2) && println("chunk #", i, "/", n_chunks, ", sj = ", sj, ", ej = ", ej, ", j = ", j, ":", j+nj-1)
transpose!(vx, vb) # converts to Float32 if needed
j += nj
end
# Done with file
close(io)
# ----------------------------------------------------------------
# String values for :name, :id
name = (try
string(TDMS.hdr["SystemInfomation"]["OS"]["HostName"], "_",
TDMS.hdr["SystemInfomation"]["Devices0"]["Model"], "_",
TDMS.hdr["SystemInfomation"]["Devices1"]["Model"])
catch
""
end)
net = nn * "."
cha = string("..O", getbandcode(TDMS.fs), "0")
# -----------------------------------------------------------------
# Time values
utc_os = get(TDMS.hdr, "SystemInfomation.GPS.UTCOffset", zero(Float64))
ts = TDMS.ts + round(Int64, sμ*utc_os)
# =================================================================
# Parse to NodalData
if isempty(chans)
chans = 1:TDMS.n_ch
end
S = NodalData(data, TDMS.hdr, chans, ts)
fill!(S.fs, TDMS.fs)
fill!(S.src, realpath(file))
fill!(S.units, "m/m")
S.ox = TDMS.ox
S.oy = TDMS.oy
S.oz = TDMS.oz
for (i,j) in enumerate(chans)
S.id[i] = string(net, lpad(j, 5, '0'), cha)
S.name[i] = string(name, "_", j)
end
# =================================================================
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6233 | @doc (@doc NodalData)
mutable struct NodalChannel <: GphysChannel
ox::Float64 # origin x
oy::Float64 # origin y
oz::Float64 # origin z
id::String # id
name::String # name
loc::InstrumentPosition # loc
fs::Float64 # fs
gain::Float64 # gain
resp::InstrumentResponse # resp
units::String # units
src::String # src
misc::Dict{String,Any} # misc
notes::Array{String,1} # notes
t::Array{Int64,2} # time
x::FloatArray # data
function NodalChannel()
return new(
default_fs, default_fs, default_fs,
"",
"",
NodalLoc(),
default_fs,
default_gain,
PZResp(),
"",
"",
Dict{String,Any}(),
Array{String,1}(undef,0),
Array{Int64,2}(undef,0,2),
Array{Float32,1}(undef,0)
)
end
function NodalChannel(
ox::Float64,
oy::Float64,
oz::Float64,
id::String , # id
name::String , # name
loc::InstrumentPosition , # loc
fs::Float64 , # fs
gain::Float64 , # gain
resp::InstrumentResponse , # resp
units::String , # units
src::String , # src
misc::Dict{String,Any} , # misc
notes::Array{String,1} , # notes
t::Array{Int64,2} , # time
x::FloatArray
)
return new(ox, oy, oz, id, name, loc, fs, gain, resp, units, src, misc, notes, t, x)
end
end
function sizeof(S::NodalChannel)
s = 120
for f in nodalfields
v = getfield(S, f)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc
k = collect(keys(v))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(v)
s += sizeof(p)
if typeof(p) == Array{String,1}
s += sum([sizeof(j) for j in p])
end
end
end
end
return s
end
function getindex(S::NodalData, j::Int)
C = NodalChannel()
[setfield!(C, f, getfield(S,f)[j]) for f in nodalfields]
C.x = copy(S.data[:,j])
return C
end
function setindex!(S::NodalData, C::NodalChannel, j::Int)
[(getfield(S, f))[j] = getfield(C, f) for f in nodalfields]
S.data[:,j] .= C.x
S.x[j] = view(S.data, :, j)
return nothing
end
function isempty(C::NodalChannel)
q::Bool = C.gain == default_gain
for f in (:ox, :oy, :oz, :fs)
q = min(q, getfield(C, f) == default_fs)
(q == false) && return q
end
for f in (:id, :loc, :misc, :name, :notes, :resp, :src, :units, :t, :x)
q = min(q, isempty(getfield(C, f)))
(q == false) && return q
end
return q
end
function push!(S::NodalData, C::NodalChannel)
for f in nodalfields
push!(getfield(S, f), getfield(C, f))
end
S.data = hcat(S.data, C.x)
S.n += 1
resize!(S.x, S.n+1)
S.x[S.n] = view(S.data, :, S.n)
return nothing
end
function write(io::IO, S::NodalChannel)
write(io, S.ox) # ox
write(io, S.oy) # oy
write(io, S.oz) # oz
write(io, Int64(sizeof(S.id)))
write(io, S.id) # id
write(io, Int64(sizeof(S.name)))
write(io, S.name) # name
write(io, loctyp2code(S.loc))
write(io, S.loc) # loc
write(io, S.fs) # fs
write(io, S.gain) # gain
write(io, resptyp2code(S.resp))
write(io, S.resp) # resp
write(io, Int64(sizeof(S.units)))
write(io, S.units) # units
write(io, Int64(sizeof(S.src)))
write(io, S.src) # src
write_misc(io, S.misc) # misc
write_string_vec(io, S.notes) # notes
write(io, Int64(size(S.t,1)))
write(io, S.t) # t
write(io, typ2code(eltype(S.x)))
write(io, Int64(length(S.x)))
write(io, S.x) # x
return nothing
end
read(io::IO, ::Type{NodalChannel}) = NodalChannel(
fastread(io, Float64), # ox
fastread(io, Float64), # oy
fastread(io, Float64), # oz
String(fastread(io, fastread(io, Int64))), # id
String(fastread(io, fastread(io, Int64))), # name
read(io, code2loctyp(fastread(io))), # loc
fastread(io, Float64), # fs
fastread(io, Float64), # gain
read(io, code2resptyp(fastread(io))), # resp
String(fastread(io, fastread(io, Int64))), # units
String(fastread(io, fastread(io, Int64))), # src
read_misc(io, getfield(BUF, :buf)), # misc
read_string_vec(io, getfield(BUF, :buf)), # notes
read!(io, Array{Int64, 2}(undef, fastread(io, Int64), 2)), # t
read!(io, Array{code2typ(read(io, UInt8)), 1}(undef, read(io, Int64))), # x
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 14298 | # This is type-stable for S = NodalData() but not for keyword args
@doc """
NodalData
SeisData variant for multichannel nodal array data.
NodalChannel
SeisChannel variant for a channel from a nodal array.
## Fields
| **Field** | **Description** |
|:-------|:------ |
| :n | Number of sensors |
| :ox | Origin longitude |
| :oy | Origin latitude |
| :oz | Origin elevation |
| :info | Critical array info, shared by all sensors. [^1] |
| :id | Channel id. Uses NET.STA.LOC.CHA format when possible |
| :name | Freeform channel name |
| :loc | Location (position) vector; only accepts NodalLoc |
| :fs | Sampling frequency in Hz |
| :gain | Scalar gain |
| :resp | Instrument response |
| :units | String describing data units. UCUM standards are assumed. |
| :src | Freeform string describing data source. |
| :misc | Dictionary for non-critical information. |
| :notes | Timestamped notes; includes automatically-logged information. |
| :t | Matrix of time gaps in integer μs, formatted [Sample# Length] |
| :data | Matrix underlying time-series data |
| :x | Views into :data corresponding to each channel |
[^1] Not present in, or retained by, NodalChannel objects.
See also: `SeisData`, `InstrumentPosition`, `InstrumentResponse`
""" NodalData
mutable struct NodalData <: GphysData
n::Int64
ox::Float64 # origin x
oy::Float64 # origin y
oz::Float64 # origin z
info::Dict{String,Any} # info
id::Array{String,1} # id
name::Array{String,1} # name
loc::Array{InstrumentPosition,1} # loc
fs::Array{Float64,1} # fs
gain::Array{Float64,1} # gain
resp::Array{InstrumentResponse,1} # resp
units::Array{String,1} # units
src::Array{String,1} # src
misc::Array{Dict{String,Any},1} # misc
notes::Array{Array{String,1},1} # notes
t::Array{Array{Int64,2},1} # time
data::AbstractArray{Float32, 2} # actual data
x::Array{FloatArray,1} # views to data
function NodalData()
return new(zero(Int64),
0.0, 0.0, 0.0,
Dict{String,Any}(),
Array{String,1}(undef,0),
Array{String,1}(undef,0),
Array{InstrumentPosition,1}(undef,0),
Array{Float64,1}(undef,0),
Array{Float64,1}(undef,0),
Array{InstrumentResponse,1}(undef,0),
Array{String,1}(undef,0),
Array{String,1}(undef,0),
Array{Dict{String,Any},1}(undef,0),
Array{Array{String,1},1}(undef,0),
Array{Array{Int64,2},1}(undef,0),
Array{Float32,2}(undef, 0, 0),
Array{FloatArray,1}(undef,0)
)
end
function NodalData( n::Int64,
ox::Float64,
oy::Float64,
oz::Float64,
info::Dict{String,Any} , # info
id::Array{String,1} , # id
name::Array{String,1} , # name
loc::Array{InstrumentPosition,1} , # loc
fs::Array{Float64,1} , # fs
gain::Array{Float64,1} , # gain
resp::Array{InstrumentResponse,1} , # resp
units::Array{String,1} , # units
src::Array{String,1} , # src
misc::Array{Dict{String,Any},1} , # misc
notes::Array{Array{String,1},1} , # notes
t::Array{Array{Int64,2},1} , # time
data::AbstractArray{Float32, 2} , # data
)
S = new(n, ox, oy, oz, info, id, name, loc, fs, gain, resp, units, src, misc, notes, t, data, Array{FloatArray, 1}(undef, n))
for i in 1:n
S.x[i] = view(S.data, :, i)
end
return S
end
function NodalData(data::AbstractArray{Float32, 2}, info::Dict{String, Any}, chans::ChanSpec, ts::Int64)
dims = size(data)
m = dims[1]
n₀ = dims[2]
if isempty(chans)
chans = 1:n₀
elseif isa(chans, Integer)
chans = [chans]
end
n = length(chans)
S = new(n,
zero(Float64),
zero(Float64),
zero(Float64),
deepcopy(info),
Array{String, 1}(undef, n),
Array{String, 1}(undef, n),
Array{InstrumentPosition, 1}(undef, n),
Array{Float64, 1}(undef, n),
Array{Float64, 1}(undef, n),
Array{InstrumentResponse, 1}(undef, n),
Array{String, 1}(undef, n),
Array{String, 1}(undef, n),
Array{Dict{String, Any}, 1}(undef, n),
Array{Array{String, 1}, 1}(undef, n),
Array{Array{Int64, 2}, 1}(undef, n),
data[:, chans],
Array{FloatArray, 1}(undef, n)
)
# Fill these fields with something to prevent undefined reference errors
fill!(S.id, "") # id
fill!(S.name, "") # name
fill!(S.src, "") # src
fill!(S.units, "") # units
fill!(S.fs, 0.0) # fs
fill!(S.gain, 1.0) # gain
t = mk_t(m, ts)
for i = 1:n
S.notes[i] = Array{String,1}(undef, 0) # notes
S.misc[i] = Dict{String,Any}() # misc
S.t[i] = copy(t) # t
S.x[i] = view(S.data, :, i) # x
S.loc[i] = NodalLoc() # loc
S.resp[i] = deepcopy(flat_resp) # resp
end
return S
end
end
function sizeof(S::NodalData)
s = 168 + sizeof(getfield(S, :data))
# The :info Dictionary uses only simple objects, no string arrays
k = collect(keys(S.info))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(S.info)
s += sizeof(p)
end
for f in nodalfields
V = getfield(S, f)
s += sizeof(V)
(f in unindexed_fields) && continue
for i = 1:S.n
v = getindex(V, i)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc
k = collect(keys(v))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(v)
s += sizeof(p)
if typeof(p) == Array{String,1}
s += sum([sizeof(j) for j in p])
end
end
end
end
end
return s
end
function write(io::IO, S::NodalData)
N = getfield(S, :n)
LOC = getfield(S, :loc)
RESP = getfield(S, :resp)
T = getfield(S, :t)
X = getfield(S, :data)
MISC = getfield(S, :misc)
NOTES = getfield(S, :notes)
codes = Array{UInt8,1}(undef, 2N+1) # sizeof(c) = 2N+1
L = Array{Int64,1}(undef, N) # sizeof(L) = 8N
# write begins ------------------------------------------------------
write(io, N)
p = fastpos(io)
fastskip(io, 10N+1)
write(io, S.ox, S.oy, S.oz) # ox, oy, oz
write_misc(io, S.info) # info
write_string_vec(io, S.id) # id
write_string_vec(io, S.name) # name
i = 0 # loc
while i < N
i = i + 1
loc = getindex(LOC, i)
setindex!(codes, loctyp2code(loc), i)
write(io, loc)
end
write(io, S.fs) # fs
write(io, S.gain) # gain
i = 0 # resp
while i < N
i = i + 1
resp = getindex(RESP, i)
setindex!(codes, resptyp2code(resp), N+i)
write(io, resp)
end
write_string_vec(io, S.units) # units
write_string_vec(io, S.src) # src
for i = 1:N; write_misc(io, getindex(MISC, i)); end # misc
for i = 1:N; write_string_vec(io, getindex(NOTES, i)); end # notes
i = 0 # t
while i < N
i = i + 1
t = getindex(T, i)
setindex!(L, size(t,1), i)
write(io, t)
end
sz = size(X)
write(io, Int64(sz[1]), Int64(sz[2])) # data
write(io, X)
setindex!(codes, typ2code(eltype(X)), 2N+1)
q = fastpos(io)
fastseek(io, p)
write(io, codes)
write(io, L)
fastseek(io, q)
# write ends ------------------------------------------------------
return nothing
end
function read(io::IO, ::Type{NodalData})
Z = getfield(BUF, :buf)
L = getfield(BUF, :int64_buf)
# read begins ------------------------------------------------------
N = fastread(io, Int64)
checkbuf_strict!(L, N)
fast_readbytes!(io, Z, 2N+1)
read!(io, L)
c1 = copy(Z[1:N])
c2 = copy(Z[N+1:2N])
y = code2typ(getindex(Z, 2N+1))
return NodalData(N,
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
read_misc(io, Z),
read_string_vec(io, Z),
read_string_vec(io, Z),
InstrumentPosition[read(io, code2loctyp(getindex(c1, i))) for i = 1:N],
fastread(io, Float64, N),
fastread(io, Float64, N),
InstrumentResponse[read(io, code2resptyp(getindex(c2, i))) for i = 1:N],
read_string_vec(io, Z),
read_string_vec(io, Z),
[read_misc(io, Z) for i = 1:N],
[read_string_vec(io, Z) for i = 1:N],
[read!(io, Array{Int64, 2}(undef, getindex(L, i), 2)) for i = 1:N],
read!(io, Array{y, 2}(undef, fastread(io, Int64), fastread(io, Int64)))
)
end
function show(io::IO, S::NodalData)
W = max(80, displaysize(io)[2]) - show_os
nc = getfield(S, :n)
w = min(W, 35)
N = min(nc, div(W-1, w))
M = min(N+1, nc)
println(io, "NodalData with ", nc, " channels (", N, " shown)")
F = fieldnames(NodalData)
for f in F
if ((f in unindexed_fields) == false) || (f == :x)
targ = getfield(S, f)
t = typeof(targ)
fstr = uppercase(String(f))
print(io, lpad(fstr, show_os-2), ": ")
if t == Array{String,1}
show_str(io, targ, w, N)
elseif f == :notes || f == :misc
show_str(io, String[string(length(getindex(targ, i)), " entries") for i = 1:M], w, N)
elseif f == :t
show_t(io, targ, w, N, S.fs)
elseif f == :x
x_str = mkxstr(N, getfield(S, :x))
show_x(io, x_str, w, N<nc)
else
show_str(io, String[repr("text/plain", targ[i], context=:compact=>true) for i = 1:M], w, N)
end
elseif f == :ox
print(io, "COORDS: X = ", repr("text/plain", getfield(S, f), context=:compact=>true), ", ")
elseif f == :oy
print(io, "Y = ", repr("text/plain", getfield(S, f), context=:compact=>true), ", ")
elseif f == :oz
print(io, "Z = ", repr("text/plain", getfield(S, f), context=:compact=>true), "\n")
elseif f == :info
print(io, " INFO: ", length(S.info), " entries\n")
end
end
return nothing
end
show(S::NodalData) = show(stdout, S)
function getindex(S::NodalData, J::Array{Int,1})
n = getfield(S, :n)
U = NodalData()
U.n = length(J)
# indexed fields
for f in nodalfields
setfield!(U, f, getindex(getfield(S, f), J))
end
# :data
U.data = S.data[:, J]
U.x = Array{FloatArray, 1}(undef, U.n)
for i in 1:U.n
U.x[i] = view(U.data, :, i)
end
# origin
for f in (:ox, :oy, :oz)
setfield!(U, f, getindex(getfield(S, f)))
end
U.info = copy(S.info)
return U
end
refresh_x!(S::NodalData) = ([S.x[i] = view(S.data, :, i) for i in 1:S.n]);
function setindex!(S::NodalData, U::NodalData, J::Array{Int,1})
typeof(S) == typeof(U) || throw(MethodError)
length(J) == U.n || throw(BoundsError)
# set indexed fields
for f in nodalfields
if (f in unindexed_fields) == false
setindex!(getfield(S, f), getfield(U, f), J)
end
end
# set :data
for (i,j) in enumerate(J)
S.data[:,j] .= U.data[:,i]
S.x[j] = view(S.data, :, j)
end
return nothing
end
setindex!(S::NodalData, U::NodalData, J::UnitRange) = setindex!(S, U, collect(J))
function sort!(S::NodalData; rev=false::Bool)
j = sortperm(getfield(S, :id), rev=rev)
for f in nodalfields
setfield!(S, f, getfield(S,f)[j])
end
# computationally expensive
S.data = S.data[:, j]
refresh_x!(S)
return nothing
end
# Append, add, delete, sort
function append!(S::NodalData, U::NodalData)
F = fieldnames(NodalData)
S.data = hcat(S.data, U.data)
for f in F
if (f in unindexed_fields) == false
append!(getfield(S, f), getfield(U, f))
end
end
# append views to S.x
resize!(S.x, S.n+U.n)
for i = S.n+1:S.n+U.n
S.x[i] = view(S.data, :, i)
end
# merge :info
merge!(S.info, U.info)
# increment S.n
S.n += U.n
return nothing
end
# ============================================================================
# deleteat!
function deleteat!(S::NodalData, j::Int)
for f in nodalfields
deleteat!(getfield(S, f), j)
end
S.data = S.data[:, setdiff(collect(1:S.n), j)]
S.n -= 1
refresh_x!(S)
return nothing
end
function deleteat!(S::NodalData, J::Array{Int,1})
sort!(J)
for f in nodalfields
deleteat!(getfield(S, f), J)
end
S.data = S.data[:, setdiff(collect(1:S.n), J)]
S.n -= length(J)
refresh_x!(S)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 523 | mutable struct TDMSbuf
flags::UInt32
nsos::UInt64
rdos::UInt64
n_ch::UInt32
ts::Int64
fs::Float64
ox::Float64
oy::Float64
oz::Float64
name::String
hdr::Dict{String, Any}
TDMSbuf(
flags::UInt32,
nsos::UInt64,
rdos::UInt64,
n_ch::UInt32,
ts::Int64,
fs::Float64,
ox::Float64,
oy::Float64,
oz::Float64,
name::String,
hdr::Dict{String, Any}
) = new(flags, nsos, rdos, n_ch, ts, fs, ox, oy, oz, name, hdr)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1714 | function convert(::Type{NodalData}, S::T) where T <: GphysData
(T == NodalData) && (return deepcopy(S))
@assert minimum(S.fs) == maximum(S.fs)
if T != SeisData
S = convert(SeisData, S)
end
sync!(S, s="last", t="first")
L = [length(i) for i in S.x]
L0 = minimum(L)
@assert L0 == maximum(L)
# clear everything from buffer TDMS
reset_tdms()
ts = S.t[1][1,2]
data = Array{Float32, 2}(undef, L0, S.n)
for i in 1:S.n
data[:, i] .= S.x[i]
end
TD = NodalData(data, TDMS.hdr, 1:S.n, ts)
for f in convertible_fields
setfield!(TD, f, deepcopy(getfield(S, f)))
end
return TD
end
NodalData(S::T) where T <: GphysData = convert(NodalData, S)
function convert(::Type{SeisData}, TD::NodalData)
S = SeisData(getfield(TD, :n))
nx = size(TD.data, 1)
# convertible_fields plus :t are directly copied
for f in convertible_fields
setfield!(S, f, deepcopy(getfield(TD, f)))
end
setfield!(S, :t, deepcopy(getfield(TD, :t)))
# :x is set by copying from :data, to prevent GC problems if TD is cleared
for i in 1:S.n
S.x[i] = copy(TD.data[:, i])
end
return S
end
function convert(::Type{SeisChannel}, D::NodalChannel)
C = SeisChannel()
for f in datafields
setfield!(C, f, deepcopy(getfield(D, f)))
end
return C
end
function convert(::Type{NodalChannel}, C::T) where T<:GphysChannel
(T == NodalChannel) && (return deepcopy(C))
if T != SeisChannel
C = convert(SeisChannel, C)
end
D = NodalChannel()
for f in datafields
setfield!(D, f, deepcopy(getfield(C, f)))
end
return D
end
NodalChannel(C::T) where T <: GphysChannel = convert(NodalChannel, C)
push!(TD::NodalData, C::SeisChannel) = push!(TD, convert(NodalChannel, C))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 574 | function info_dump(D::Dict{String, Any}, level::Int)
p = 38-level
w = max(40, displaysize(stdout)[2]-40)
K = sort(collect(keys(D)))
subdicts = String[]
for k in K
if isa(D[k], Dict{String, Any})
push!(subdicts, k)
else
println(lpad(k, p), ": ", strip(string(D[k])))
end
end
for k in subdicts
(level > 0) && print(" "^(2*level))
printstyled(k * "\n", color=level+1, bold=true)
info_dump(D[k], level+1)
end
return nothing
end
info_dump(S::NodalData) = (printstyled(":info\n", color=14, bold=true); info_dump(S.info, 1))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 190 | merge!(S::NodalData) = error("NodalData cannot be merged!")
# This is probably unreachable and perhaps should be deleted
merge_ext!(S::NodalData, Ω::Int64, rest::Array{Int64, 1}) = nothing
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1255 | function nodal_resample(x::AbstractArray{T,2}, fs_new::Float64, fs_old::Float64) where T <: AbstractFloat
r = fs_new/fs_old
Nrows, Ncols = size(x)
ei = ceil(Int64, Nrows * r)
# resize x if we're upsampling
if (r > 1.0)
x = vcat(x,zeros(eltype(x),ei-Nrows,Ncols))
end
# this op copies each column because of DSP type-instability
for i = 1:Ncols
x[1:ei,i] = resample(x[1:Nrows,i], r)
end
# resize S.x if we downsampled
if fs_new < fs_old
x = x[1:ei,:]
end
return x
end
function resample!(N::NodalData, f0::Float64)
all(N.fs .> 0.0) || error("Can't resample non-timeseries data!")
all(N.fs .== f0) && return nothing
@assert f0 > 0.0
proc_str = string("resample!(N, fs=",
repr("text/plain", f0, context=:compact=>true), ")")
N.data = nodal_resample(N.data,f0,N.fs[1])
nx_out = size(N.data,1)
refresh_x!(N)
desc_str = string("resampled from ", N.fs[1], " to ", f0, "Hz")
for i = 1:N.n
proc_note!(N, i, proc_str, desc_str)
N.fs[i] = f0
N.t[i][2] = nx_out
end
return nothing
end
function resample(N::NodalData, f0::Float64)
U = deepcopy(N)
resample!(U, f0)
return U
end | SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 331 | function reset_tdms()
TDMS.flags = zero(UInt32)
TDMS.nsos = zero(UInt32)
TDMS.rdos = zero(UInt64)
TDMS.n_ch = zero(UInt32)
TDMS.ts = zero(Int64)
TDMS.fs = zero(Float64)
TDMS.ox = zero(Float64)
TDMS.oy = zero(Float64)
TDMS.oz = zero(Float64)
TDMS.name = ""
TDMS.hdr = Dict{String, Any}()
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1627 | @doc """
S = read_nodal(fmt, filestr [, keywords])
Read nodal data from file `filestr` into new NodalData object `S`.
## Keywords
|KW | Type | Default | Used By | Meaning |
|:--- |:--- |:--- |:--- |:--- |
| chans | ChanSpec | Int64[] | all | channel numbers to read in |
| nn | String | "N0" | all | network name in `:id` |
| s | TimeSpec | | silixa | start time [^1] |
| t | TimeSpec | | silixa | end time |
| v | Integer | 0 | silixa | verbosity |
[^1] Special behavior: Real values supplied to `s=` and `t=` are treated as seconds *from file begin*; most SeisIO functions treat Real as seconds relative to current time.
See also: `TimeSpec`, `parsetimewin`, `read_data`
""" read_nodal
function read_nodal(fmt::String, fstr::String;
chans ::ChanSpec = Int64[] , # channels to proess
memmap ::Bool = false , # use mmap? (DANGEROUS)
nn ::String = "N0" , # network name
s ::TimeSpec = "0001-01-01T00:00:00" , # Start
t ::TimeSpec = "9999-12-31T12:59:59" , # End or Length (s)
v ::Integer = KW.v , # verbosity
)
if fmt == "silixa"
S = read_silixa_tdms(fstr, nn, s, t, chans, memmap, v)
elseif fmt == "segy"
S = read_nodal_segy(fstr, nn, s, t, chans, memmap)
else
error("Unrecognized format String!")
end
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 189 | export EQLoc,
EQMag,
EventChannel,
EventTraceData,
FDSNevq,
FDSNevt,
PhaseCat,
SeisEvent,
SeisHdr,
SeisPha,
SeisSrc,
SourceTime,
distaz!,
gcdist,
get_pha!,
read_qml,
show_phases,
write_qml
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 916 | import Base: ==, -, *, +, append!, convert, copy, delete!, deleteat!,
firstindex, getindex, hash, in, isempty, isequal, lastindex, length, merge, merge!,
push!, read, setindex!, show, size, sizeof, sort!, sort, summary, write
import DSP: filtfilt
import ..SeisIO: BUF,
ChanOpts,
FDSNget!,
FiltDefs,
FloatArray,
InstrumentPosition,
KW,
SeisData,
checkbuf!,
checkbuf_8!,
checkbuf_strict!,
clear_notes!,
code2loctyp,
code2resptyp,
code2typ,
d2u,
datafields,
default_fs,
default_gain,
dtconst,
fdsn_chp,
fdsn_uhead,
get_http_req,
loctyp2code,
merge_ext!,
note!,
read_misc,
read_string_vec,
reset_sacbuf,
resptyp2code,
sac_nul_c,
sac_nul_d,
sac_nul_f,
sac_nul_i,
should_bswap,
show_os,
split_id,
str_trunc,
sμ,
tnote,
typ2code,
u2d,
unindexed_fields,
webhdr,
write_misc,
write_sac_channel,
write_string_vec,
writesac,
μs
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 26315 | function get_RealQuantity(xmle::LightXML.XMLElement, str::String)
dx = 0.0
x = 0.0
for z in child_elements(xmle)
if name(z) == str
for y in child_elements(z)
if name(y) == "value"
x = parse(Float64, content(y))
elseif name(y) == "uncertainty"
dx = parse(Float64, content(y))
end
end
end
end
return (x, dx)
end
function parse_qml(evt::XMLElement)
MT = Array{SeisSrc,1}(undef, 0)
evt_src = attribute(evt, "publicID")
evt_id = split(evt_src, r"[;:/=]")[end]
preferredOriginID = ""
preferredMagnitudeID = ""
preferredFocalMechanismID = ""
mags = Array{XMLElement,1}(undef,0)
origs = Array{XMLElement,1}(undef,0)
mechs = Array{XMLElement,1}(undef,0)
eqtype = ""
for ch in child_elements(evt)
cn = name(ch)
if cn == "preferredOriginID"
preferredOriginID = split(content(ch), r"[;:/=]")[end]
elseif cn == "preferredMagnitudeID"
preferredMagnitudeID = split(content(ch), r"[;:/=]")[end]
elseif cn == "preferredFocalMechanismID"
preferredFocalMechanismID = split(content(ch), r"[;:/=]")[end]
elseif cn == "type"
eqtype = content(ch)
elseif cn == "magnitude"
push!(mags, ch)
elseif cn == "origin"
push!(origs, ch)
elseif cn == "focalMechanism"
push!(mechs, ch)
end
end
# ==========================================================================
# Focal Mechanism
n = 0
while n < length(mechs)
n = n+1
auth = ""
mech = getindex(mechs, n)
np = zeros(Float64, 3, 2)
pax = zeros(Float64, 3, 3)
mt = zeros(Float64, 6)
dm = fill!(zeros(Float64, 6), Inf)
# generate SeisSrc object
S = SeisSrc()
ssrc = attribute(mech, "publicID")
setfield!(S, :id, String(split(ssrc, r"[;:/=]")[end]))
for child in child_elements(mech)
cname = name(child)
if cname == "creationInfo"
for grandchild in child_elements(child)
if name(grandchild) == "author"
auth = content(grandchild)
end
end
elseif cname == "azimuthalGap"
setfield!(S, :gap, parse(Float64, content(child)))
elseif cname == "stationPolarityCount"
setfield!(S, :npol, parse(Int64, content(child)))
elseif cname == "nodalPlanes"
j = 0
for np2 in child_elements(child) # nodalPlane1, nodalPlane2
if name(np2) != "preferredPlane"
for grandchild in child_elements(np2) # strike, dip, rake
for greatgrandchild in child_elements(grandchild)
if name(greatgrandchild) == "value"
j = j+1
setindex!(np, parse(Float64, content(greatgrandchild)), j)
end
end
end
end
end
S.misc["planes_desc"] = "strike, dip, rake"
elseif cname == "principalAxes"
j = 0
for grandchild in child_elements(child) # tAxis, pAxis, nAxis
for greatgrandchild in child_elements(grandchild) # azimuth, plunge, length
for redheadedstepchild in child_elements(greatgrandchild)
if name(redheadedstepchild) == "value"
j = j+1
setindex!(pax, parse(Float64, content(greatgrandchild)), j)
end
end
end
end
S.misc["pax_desc"] = "azimuth, plunge, length"
elseif cname == "momentTensor"
S.misc["mt_id"] = attribute(child, "publicID")
for a in child_elements(child)
aname = name(a)
if aname == "scalarMoment"
setfield!(S, :m0, parse(Float64, content(a)))
elseif aname == "tensor"
j = 0
for k in ("Mrr", "Mtt", "Mpp", "Mrt", "Mrp", "Mtp")
j = j+1
(mt[j], dm[j]) = get_RealQuantity(a, k)
end
elseif aname == "sourceTimeFunction"
for b in child_elements(a)
if name(b) == "type"
S.st.desc = content(b)
elseif name(b) == "duration"
S.st.dur = parse(Float64, content(b))
elseif name(b) == "riseTime"
S.st.rise = parse(Float64, content(b))
elseif name(b) == "decayTime"
S.st.decay = parse(Float64, content(b))
end
end
elseif aname == "derivedOriginID"
S.misc["derivedOriginID"] = String(split(content(a), r"[;:/=]")[end])
S.misc["xmt_derivedOriginID"] = string(a)
else
S.misc["xmt_" * aname] = string(a)
end
end
elseif cname == "methodID"
S.misc["methodID"] = content(child)
else
S.misc["xmech_" * cname] = string(child)
end
end
src_str = ssrc * "," * auth
setfield!(S, :planes, np)
setfield!(S, :pax, pax)
setfield!(S, :mt, mt)
setfield!(S, :dm, dm)
setfield!(S, :src, src_str)
note!(S, "+origin ¦ " * src_str)
push!(MT, S)
end
# ==========================================================================
# Choose a focal mechanism to retain; then determine mag_id and loc_id
mag_id = ""
loc_id = ""
if length(MT) > 0
# Store moment tensor with lowest M_err
if isempty(preferredFocalMechanismID)
sort!(MT, by=x->sum(abs.(x.dm)))
R = getindex(MT, 1)
else
R = getindex(MT, findfirst([occursin(getfield(m, :id), preferredFocalMechanismID) for m in MT]))
end
# Set ID
if isempty(preferredMagnitudeID)
mag_id = get(getfield(R, :misc), "derivedOriginID", "")
else
mag_id = preferredMagnitudeID
end
loc_id = isempty(preferredOriginID) ? mag_id : preferredOriginID
else
R = SeisSrc()
end
# ==========================================================================
# Magnitude
H = SeisHdr()
setfield!(R, :eid, identity(getfield(H, :id)))
n = 0
oid = ""
pid = ""
auth = ""
m = -5.0f0
msc = ""
nst = 0
gap = 0.0
while n < length(mags)
n = n+1
mag = getindex(mags, n)
oid = ""
pid = ""
auth = ""
m = -5.0f0
msc = ""
nst = 0
gap = 0.0
# src
originID = get_elements_by_tagname(mag, "originID")
pid = attribute(mag, "publicID")
oid = isempty(originID) ? "" : content(first(originID))
# msc
mtype = get_elements_by_tagname(mag, "type")
msc = isempty(mtype) ? "" : content(first(mtype))
if occursin(oid, mag_id) || n == 1 || startswith(lowercase(msc), "mw")
for child in child_elements(mag)
cname = name(child)
if cname == "mag"
m = parse(Float32, content(first(get_elements_by_tagname(child, "value"))))
elseif cname == "stationCount"
nst = parse(Int64, content(child))
elseif cname == "azimuthalGap"
gap = parse(Float64, content(child))
elseif cname == "creationInfo"
for grandchild in child_elements(child)
if name(grandchild) == "author"
auth = content(grandchild)
end
end
end
end
end
end
MAG = EQMag(m, msc, nst, gap, pid * "," * oid * "," * auth)
if MAG.src == ",,"
MAG.src = ""
end
# ==========================================================================
# Location
n = 0
nst = 0
ot = "1970-01-01T00:00:00"
gap = 0.0
auth = ""
ltyp = ""
loc_src = ""
locflags = Array{Char, 1}(undef,8)
fill!(locflags, '0')
loc = zeros(Float64, 12)
while n < length(origs)
n = n+1
orig = getindex(origs, n)
# _______________________________________________________
# Only parse locations corresponding to a desirable ID
if occursin(attribute(orig, "publicID"), loc_id) || (n == 1)
loc_src = attribute(orig, "publicID")
# Reset temp variables
nst = 0
gap = 0.0
auth = ""
ltyp = ""
ot = "1970-01-01T00:00:00"
fill!(locflags, '0')
fill!(loc, zero(Float64))
# Try to set location first
j = 0
for str in ("latitude", "longitude", "depth")
j = j + 1
(loc[j], dloc) = get_RealQuantity(orig, str)
if j == 1
loc[5] = dloc
elseif j == 2
loc[4] = dloc
elseif j == 3
setindex!(loc, getindex(loc, 3)*0.001, 3)
loc[6] = dloc
end
end
# Now loop
for child in child_elements(orig)
cname = name(child)
#= originUncertainty removed 2019-11-01
No documentation of originUncertainty is known to exist;
can't ascertain intended meaning/use of originUncertainty;
can't tell if observatories use it uniformly
=#
if cname == "type"
ltyp_tmp = content(child)
if ltyp_tmp != ""
ltyp = ltyp_tmp
end
elseif cname == "time"
for grandchild in child_elements(child)
gcname = name(grandchild)
if gcname == "value"
ot = content(grandchild)
elseif gcname == "uncertainty"
loc[7] = parse(Float64, content(grandchild))
end
end
elseif cname == "quality"
for grandchild in child_elements(child)
gcname = name(grandchild)
if gcname == "standardError"
loc[8] = parse(Float64, content(grandchild))
elseif gcname == "azimuthalGap"
loc[10] = parse(Float64, content(grandchild))
elseif gcname == "minimumDistance"
loc[11] = parse(Float64, content(grandchild))
elseif gcname == "maximumDistance"
loc[12] = parse(Float64, content(grandchild))
elseif gcname == "associatedStationCount"
nst = parse(Int64, content(grandchild))
end
end
elseif cname == "epicenterFixed"
locflags[1] = content(child)[1]
locflags[2] = locflags[1]
elseif cname == "depthType"
if content(child) == "operator assigned"
locflags[3] = '1'
end
elseif cname == "timeFixed"
locflags[4] = content(child)[1]
elseif cname == "creationInfo"
for grandchild in child_elements(child)
if name(grandchild) == "author"
auth = content(grandchild)
end
end
elseif (cname in ("latitude", "longitude", "depth")) == false
H.misc["xloc_" * cname] = string(child)
end
end
# _______________________________________________________
end
end
LOC = EQLoc(loc..., nst, parse(UInt8, join(locflags), base=2), "", ltyp, "", loc_src * "," * auth )
setfield!(H, :id, String(evt_id))
setfield!(H, :loc, LOC)
setfield!(H, :ot, DateTime(replace(ot, r"[A-S,U-Z,a-z]" => "")[1:min(end,23)]))
setfield!(H, :mag, MAG)
setfield!(H, :typ, eqtype)
setfield!(H, :src, evt_src)
setfield!(R, :eid, String(evt_id))
note!(H, "+origin ¦ " * evt_src)
# Post-process: ensure these are empty if not initialized
if R.mt == zeros(Float64,6)
R.mt = Float64[]
end
if R.dm == Inf64.*ones(Float64,6)
R.dm = Float64[]
end
if R.pax == zeros(Float64, 3, 3)
R.pax = Array{Float64, 2}(undef, 0, 0)
end
if R.planes == zeros(Float64, 3, 2)
R.planes = Array{Float64, 2}(undef, 0, 0)
end
if R.src == ","
R.src = ""
end
return H, R
end
function event_xml!(EvCat::Array{SeisHdr,1}, EvSrc::Array{SeisSrc, 1}, xdoc::XMLDocument)
qxml = first(child_elements(root(xdoc)))
elts = child_elements(qxml)
for elt in elts
if name(elt) == "event"
H, R = parse_qml(elt)
push!(EvCat, H)
push!(EvSrc, R)
end
end
return nothing
end
"""
EvCat, EvSrc = read_qml(fpat::String)
Read QuakeML files matching string pattern `fpat`. Returns an array of `SeisHdr`
objects as `EvCat` and an array of `SeisSrc` objects as `EvSrc`, such that
`EvCat[i]` and `EvSrc[i]` describe the preferred location (origin) and
preferred event source (focal mechanism or moment tensor) of event `i`.
"""
function read_qml(fpat::String)
files = safe_isfile(fpat) ? [fpat] : ls(fpat)
EvCat = Array{SeisHdr,1}()
EvSrc = Array{SeisSrc,1}()
for file in files
xdoc = parse_file(file)
k = length(EvCat)
event_xml!(EvCat, EvSrc, xdoc)
L = length(EvCat)
fstr = abspath(file)
for i in k+1:L
setfield!(EvCat[i], :src, fstr)
note!(EvCat[i], "+source ¦ " * fstr)
setfield!(EvSrc[i], :src, fstr)
note!(EvSrc[i], "+source ¦ " * fstr)
end
free(xdoc)
end
return EvCat, EvSrc
end
function is_qml(k::String, s::String)
(startswith(s, "<") && endswith(s, ">")) || return false
i1 = first(findnext("<", s,1))
j1 = last(findnext(">", s,1))
s1 = s[nextind(s, i1):prevind(s, j1)]
(split(k, "_")[2] == s1) || return false
i2 = first(findlast("<", s))
j2 = last(findlast(">", s))
s2 = s[nextind(s, i2):prevind(s, j2)]
("/" * s1) == s2 || return false
return true
end
function new_qml!(io::IO)
write(io, "<?xml version=\"1.0\" encoding=\"UTF-8\"?>
<quakeml xmlns=\"http://quakeml.org/xmlns/quakeml/1.2\">\n <eventParameters publicID=\"smi:SeisIO.jl\">\n <creationInfo>\n <agencyID>SeisIO</agencyID>\n <creationTime>")
print(io, now())
write(io, "</creationTime>\n </creationInfo>\n")
return nothing
end
function write_misc(io::IO, D::Dict{String,Any}, pref::String, p::Int64)
for k in keys(D)
if startswith(k, pref)
xv = D[k]
if is_qml(k, xv)
write(io, " "^2p)
write(io, xv)
write(io, "\n")
end
end
end
return nothing
end
function write_real(io::IO, str::String, x::Union{DateTime,AbstractFloat}, p::Int64)
write(io, " "^2p, "<")
write(io, str)
write(io, ">\n", " "^(2p+2), "<value>")
print(io, x)
write(io, "</value>\n", " "^2p, "</")
write(io, str)
write(io, ">\n")
return nothing
end
function write_real(io::IO, str::String, x::Union{DateTime,AbstractFloat}, dx::AbstractFloat, p::Int64)
write(io, " "^2p, "<")
write(io, str)
write(io, ">\n", " "^(2p+2), "<value>")
print(io, x)
write(io, "</value>\n", " "^(2p+2), "<uncertainty>")
print(io, dx)
write(io, "</uncertainty>\n", " "^2p, "</")
write(io, str)
write(io, ">\n")
return nothing
end
function write_pax(io::IO, pax::Array{Float64,2})
str = ("t", "p", "n")
nP = size(pax, 2)
write(io, " <principalAxes>\n")
for i in 1:nP
write(io, " <", str[i], "Axis>\n")
write(io, " <azimuth>\n <value>")
print(io, pax[1,i])
write(io, "</value>\n </azimuth>\n")
write(io, " <plunge>\n <value>")
print(io, pax[2,i])
write(io, "</value>\n </plunge>\n")
write(io, " <length>\n <value>")
print(io, pax[3,i])
write(io, "</value>\n </length>\n")
write(io, " </", str[i], "Axis>\n")
end
write(io, " </principalAxes>\n")
return nothing
end
function write_qml!(io::IO, HDR::Array{SeisHdr,1}, SRC::Array{SeisSrc,1}, v::Integer)
Ri = zeros(Int64, length(HDR))
R_id = Array{String,1}(undef, length(SRC))
for i in 1:length(SRC)
R_id[i] = getfield(SRC[i], :eid)
end
for i in 1:length(HDR)
id = getfield(HDR[i], :id)
for j in 1:length(R_id)
if id == R_id[j]
Ri[i] = j
break
end
end
end
for i in 1:length(HDR)
H = HDR[i]
(v > 0) && println("Writing event ", H.id)
i_id = findfirst([occursin("origin", n) for n in H.notes])
id_str = String(split(i_id == nothing ? " ¦ ¦ " : H.notes[i_id], " ¦ ")[end])
write(io, " <event publicID=\"")
print(io, id_str)
write(io, "\">\n")
if isempty(H.loc)
(v > 0) && println(" Skipped location (H.loc empty)")
else
loc_orig, loc_auth, xx = split_id(H.loc.src, c=",")
write(io, " <preferredOriginID>")
write(io, H.id)
write(io, "</preferredOriginID>\n <type>")
write(io, H.typ)
write(io, "</type>\n")
# ---------------------------------------------------
# Origin
write(io, " <origin publicID=\"", loc_orig, "\">\n")
if H.loc.dt > 0.0
write_real(io, "time", H.ot, H.loc.dt, 4)
else
write_real(io, "time", H.ot, 4)
end
L = H.loc
# lat, lon, dep
if L.dy == 0.0
write_real(io, "latitude", L.lat, 4)
else
write_real(io, "latitude", L.lat, L.dy, 4)
end
if L.dx == 0.0
write_real(io, "longitude", L.lon, 4)
else
write_real(io, "longitude", L.lon, L.dx, 4)
end
if L.dz == 0.0
write_real(io, "depth", L.dep*1000.0, 4)
else
write_real(io, "depth", L.dep*1000.0, L.dz, 4)
end
# flags
flags = falses(4)
for n = 1:4
flags[n] = >>(<<(H.loc.flags, n-1),7)
end
if flags[3]
write(io, " <depthType>operator assigned</depthType>\n")
end
if flags[4]
write(io, " <timeFixed>1</timeFixed>\n")
end
if flags[1] || flags[2]
write(io, " <epicenterFixed>1</epicenterFixed>\n")
end
# Location quality
do_qual = false
for f in loc_qual_fields
if getfield(L, f) != 0.0
do_qual = true
break
end
end
if do_qual || (L.nst > 0)
write(io, " <quality>\n")
if L.nst > 0
write(io, " <associatedStationCount>")
print(io, L.nst)
write(io, "</associatedStationCount>\n")
end
for (i,f) in enumerate(loc_qual_fields)
j = getfield(L, f)
if j != 0.0
write(io, " <")
write(io, loc_qual_names[i])
write(io, ">")
print(io, j)
write(io, "</")
write(io, loc_qual_names[i])
write(io, ">\n")
end
end
write(io, " </quality>\n")
end
if !isempty(L.typ)
write(io, " <type>")
print(io, L.typ)
write(io, "</type>\n")
end
# Author
if !isempty(loc_auth)
write(io, " <creationInfo>\n <author>")
print(io, loc_auth)
write(io, "</author>\n </creationInfo>\n")
end
# other location properties
write_misc(io, H.misc, "xloc_", 4)
# done Origin
write(io, " </origin>\n")
end
# ---------------------------------------------------
# Focal Mechanism
if Ri[i] == 0
(v > 0) && println(" Skipped focal mechanism (no R.eid matches)\n Skipped moment tensor (fields empty)")
else
j = Ri[i]
R = SRC[j]
i_src = findfirst([occursin("origin", n) for n in R.notes])
src_str = String(split(i_src == nothing ? " ¦ ¦ " : R.notes[i_src], " ¦ ")[end])
foc_orig, foc_auth, xx = split_id(src_str, c=",")
write(io, " <focalMechanism publicID=\"")
write(io, foc_orig)
write(io, "\">\n")
# Nodal planes
if !isempty(R.planes)
write(io, " <nodalPlanes>\n")
nP = size(R.planes, 2)
for i in 1:nP
write(io, " <nodalPlane")
print(io, i)
write(io, ">\n")
write(io, " <strike>\n <value>")
print(io, R.planes[1,i])
write(io, "</value>\n </strike>\n")
write(io, " <dip>\n <value>")
print(io, R.planes[2,i])
write(io, "</value>\n </dip>\n")
write(io, " <rake>\n <value>")
print(io, R.planes[3,i])
write(io, "</value>\n </rake>\n")
write(io, " </nodalPlane")
print(io, i)
write(io, ">\n")
end
write(io, " </nodalPlanes>\n")
end
# Principal axes
if !isempty(R.pax)
write_pax(io, R.pax)
end
# Azimuthal gap
if R.gap != 0.0
write(io, " <azimuthalGap>")
print(io, R.gap)
write(io, "</azimuthalGap>\n")
end
# Polarity count
if R.npol != 0
write(io, " <stationPolarityCount>")
print(io, R.npol)
write(io, "</stationPolarityCount>\n")
end
# methodID
if haskey(R.misc, "methodID")
write(io, " <methodID>")
write(io, get(R.misc, "methodID", ""))
write(io, "</methodID>\n")
end
# Author
if !isempty(foc_auth)
write(io, " <creationInfo>\n <author>")
write(io, foc_auth)
write(io, "</author>\n </creationInfo>\n")
end
# Moment Tensor
if (isempty(R.mt) && (R.m0 == 0.0) && isempty(R.st))
(v > 0) && println(" Skipped moment tensor (fields empty)")
else
write(io, " <momentTensor publicID=\"")
mt_id = haskey(R.misc, "mt_id") ? R.misc["mt_id"] : "smi:SeisIO/moment_tensor;fmid=" * R.id
write(io, mt_id)
write(io, "\">\n")
if R.m0 != 0.0
write_real(io, "scalarMoment", R.m0, 5)
end
if isempty(R.mt) == false
write(io, " <tensor>\n")
mt_strings = ("Mrr", "Mtt", "Mpp", "Mrt", "Mrp", "Mtp")
for i = 1:length(R.mt)
write_real(io, mt_strings[i], R.mt[i], R.dm[i], 6)
end
write(io, " </tensor>\n")
write_misc(io, R.misc, "xmt_", 5)
end
if !isempty(R.st)
write(io, " <sourceTimeFunction>\n")
write(io, " <type>")
write(io, R.st.desc)
write(io, "</type>\n")
write(io, " <duration>")
print(io, R.st.dur)
write(io, "</duration>\n")
write(io, " <riseTime>")
print(io, R.st.rise)
write(io, "</riseTime>\n")
write(io, " <decayTime>")
print(io, R.st.decay)
write(io, "</decayTime>\n")
write(io, " </sourceTimeFunction>\n")
end
write(io, " </momentTensor>\n")
end
write(io, " </focalMechanism>\n")
end
# ---------------------------------------------------
# Magnitude
if isempty(H.mag)
(v > 0) && println(" Skipped magnitude (H.mag empty)")
else
mag_pid, mag_orig, mag_auth, xx = split_id(H.mag.src, c=",")
write(io, " <magnitude publicID=\"")
write(io, mag_pid)
write(io, "\">\n")
write_real(io, "mag", H.mag.val, 4)
write(io, " <type>", H.mag.scale, "</type>\n")
isempty(mag_orig) || write(io, " <originID>", mag_orig, "</originID>\n")
if !isempty(mag_auth)
write(io, " <creationInfo>\n <author>", mag_auth, "</author>\n </creationInfo>\n")
end
if H.mag.gap != 0.0
write(io, " <azimuthalGap>")
print(io, H.mag.gap)
write(io, "</azimuthalGap>\n")
end
if H.mag.nst > 0
write(io, " <stationCount>")
print(io, H.mag.nst)
write(io, "</stationCount>\n")
end
write(io, " </magnitude>\n")
end
write(io, " </event>\n")
end
write(io, "</eventParameters>\n</quakeml>\n")
return nothing
end
@doc """
write_qml(fname, Ev::SeisEvent; v::Integer=0)
Write event metadata from SeisEvent `Ev` to file `fname`.
write_qml(fname, SHDR::SeisHdr; v::Integer=0)
write_qml(fname, SHDR::Array{SeisHdr,1}; v::Integer=0)
Write QML to file `fname` from `SHDR`.
If `fname` exists, and is QuakeML, SeisIO appends the existing XML. If the
file exists, but is NOT QuakeML, an error is thrown; the file isn't overwritten.
write_qml(fname, SHDR::SeisHdr, SSRC::SeisSrc; v::Integer=0)
write_qml(fname, SHDR::Array{SeisHdr,1}, SSRC::Array{SeisSrc,1}; v::Integer=0)
Write QML to file `fname` from `SHDR` and `SSRC`.
!!! warning
To write data from `R ∈ SSRC`, it must be true that `R.eid == H.id` for some `H ∈ SHDR`.
""" write_qml
function write_qml(fname::String, HDR::Array{SeisHdr,1}, SRC::Array{SeisSrc,1}; v::Integer=0)
H0 = SeisHdr[]
R0 = SeisSrc[]
if safe_isfile(fname)
io = open(fname, "a+")
# test whether file can be appended
fastseekend(io)
fastskip(io, -30)
test_str = String(read(io))
if test_str == "</eventParameters>\n</quakeml>\n"
# behavior for files that are QuakeXML as produced by SeisIO
fastskip(io, -30)
else
try
# file exists and is readable QuakeXML but not produced by SeisIO
seekstart(io)
fstart = String(fastread(io, 5))
if fstart == "<?xml"
close(io)
append!(H0, HDR)
append!(R0, SRC)
(H1, R1) = read_qml(fname)
@assert (isempty(H1) == false)
@assert (isempty(R1) == false)
append!(H0, H1)
append!(R0, R1)
else
error("incompatible file type!")
end
catch err
# file exists but isn't QuakeXML
@warn(string(fname, " isn't valid QuakeXML; can't append, exit with error!"))
rethrow(err)
end
io = open(fname, "w")
new_qml!(io)
end
else
# new file
io = open(fname, "w")
new_qml!(io)
end
opts = string(", v=", v)
if isempty(H0) && isempty(R0)
write_qml!(io, HDR, SRC, v)
for h in HDR
fwrite_note_quake!(h, "asdf_wqml", fname, opts)
end
for r in SRC
fwrite_note_quake!(r, "asdf_wqml", fname, opts)
end
else
write_qml!(io, H0, R0, v)
for h in H0
fwrite_note_quake!(h, "asdf_wqml", fname, opts)
end
for r in R0
fwrite_note_quake!(r, "asdf_wqml", fname, opts)
end
end
close(io)
return nothing
end
write_qml(fname::String, H::SeisHdr, R::SeisSrc; v::Integer=0) = write_qml(fname, [H], [R], v=v)
write_qml(fname::String, HDR::Array{SeisHdr,1}; v::Integer=0) = write_qml(fname, HDR, SeisSrc[], v=v)
write_qml(fname::String, H::SeisHdr; v::Integer=0) = write_qml(fname, [H], SeisSrc[], v=v)
write_qml(fname::String, Ev::SeisEvent; v::Integer=0) = write_qml(fname, [Ev.hdr], [Ev.source], v=v)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3151 | export fill_sac_evh!
"""
writesac(W::SeisEvent[, v=0])
Write all data in SeisEvent structure `W` to auto-generated SAC files. Event
header information is written from W.hdr; W.source is not used as there is no
standard header position for event source information.
"""
function writesac(S::SeisEvent;
nvhdr::Integer=6,
v::Integer=KW.v)
tdata = Array{Float32, 1}(undef, 0)
reset_sacbuf()
ev_id = codeunits(S.hdr.id == "" ? "-12345 " : S.hdr.id)
ev_id = ev_id[1:min(length(ev_id),16)]
BUF.sac_cv[9:length(ev_id)+8] .= ev_id
# Values from event header
if S.hdr.loc.lat != 0.0
setindex!(BUF.sac_fv, Float32(S.hdr.loc.lat), 36)
setindex!(BUF.sac_dv, S.hdr.loc.lat, 18)
end
if S.hdr.loc.lon != 0.0
setindex!(BUF.sac_fv, Float32(S.hdr.loc.lon), 37)
setindex!(BUF.sac_dv, S.hdr.loc.lon, 17)
end
S.hdr.loc.dep == 0.0 || setindex!(BUF.sac_fv, Float32(S.hdr.loc.dep), 39)
S.hdr.mag.val == -5.0f0 || setindex!(BUF.sac_fv, Float32(S.hdr.mag.val), 40)
BUF.sac_cv[9:length(ev_id)+8] .= ev_id
t_evt = d2u(S.hdr.ot)
# Ints
BUF.sac_iv[7] = Int32(7)
try
BUF.sac_iv[9] = parse(Int32, S.hdr.id)
catch err
@warn(string("Can't write non-integer event ID ", S.hdr.id, " to SAC."))
end
for i = 1:S.data.n
BUF.sac_fv[8] = Float32(t_evt - S.data.t[i][1,2]*μs)
BUF.sac_dv[2] = t_evt - S.data.t[i][1,2]*μs
write_sac_channel(S.data, i, nvhdr, "", v)
end
return nothing
end
mk_ot!(Ev::SeisEvent, i::Int, o::T) where T<:AbstractFloat = (Ev.hdr.ot =
u2d(o + Ev.data.t[i][1,2]*μs))
"""
fill_sac_evh!(Ev::SeisEvent, fname::String; k=i)
Fill (overwrite) values in `Ev.hdr` with data from SAC file `fname`. Keyword
`k=i` specifies the reference channel `i` from which the absolute origin time
`Ev.hdr.ot` is set. Potentially affects header fields `:id`, `:loc` (subfields
.lat, .lon, .dep), and `:ot`.
"""
function fill_sac_evh!(Ev::SeisEvent, fname::String; k::Int=1)
reset_sacbuf()
io = open(fname, "r")
swap = should_bswap(io)
fv = BUF.sac_fv
iv = BUF.sac_iv
cv = BUF.sac_cv
# read
seekstart(io)
fastread!(io, fv)
fastread!(io, iv)
fastread!(io, cv)
if swap == true
fv .= bswap.(fv)
iv .= bswap.(iv)
end
sac_v = getindex(iv, 7)
(iv[9] == sac_nul_i) || (Ev.hdr.id = string(iv[9])) # id
(fv[8] == sac_nul_f) || (mk_ot!(Ev, k, fv[8])) # ot
(fv[36] == sac_nul_f) || (Ev.hdr.loc.lat = Float64(fv[36])) # lat
(fv[37] == sac_nul_f) || (Ev.hdr.loc.lon = Float64(fv[37])) # lon
(fv[39] == sac_nul_f) || (Ev.hdr.loc.dep = Float64(fv[39])) # dep
(fv[40] == sac_nul_f) || (Ev.hdr.mag.val = fv[40]) # mag
if sac_v > 6
fastskip(io, 4*getindex(iv, 10))
dv = BUF.sac_dv
fastread!(io, dv)
swap && (dv .= bswap.(dv))
# parse doubles 4 (o), 17 (evlo), 18 (evla)
(dv[4] == sac_nul_d) || mk_ot!(Ev, k, dv[4]) # ot
(dv[17] == sac_nul_d) || (Ev.hdr.loc.lon = Float64(dv[17])) # lon
(dv[18] == sac_nul_d) || (Ev.hdr.loc.lat = Float64(dv[18])) # lat
end
reset_sacbuf()
close(io)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2571 | # extension of merge to EventTraceData fields
function merge_ext!(S::EventTraceData, Ω::Int64, rest::Array{Int64, 1})
pha = PhaseCat()
az = getindex(getfield(S, :az), Ω)
baz = getindex(getfield(S, :baz), Ω)
dist = getindex(getfield(S, :dist), Ω)
for i in rest
if az == 0.0
θ = getindex(getfield(S, :az), i)
(θ != 0.0) && (az = θ)
end
if baz == 0.0
β = getindex(getfield(S, :baz), i)
(β != 0.0) && (baz = β)
end
if dist == 0.0
# Δ is already in use, so...
d = getindex(getfield(S, :dist), i)
(d != 0.0) && (dist = d)
end
merge!(pha, getindex(getfield(S, :pha), i))
end
# This guarantees that the phase catalog of Ω overwrites others
merge!(pha, getindex(getfield(S, :pha), Ω))
setindex!(getfield(S, :az), az, Ω)
setindex!(getfield(S, :baz), baz, Ω)
setindex!(getfield(S, :dist), dist, Ω)
setindex!(getfield(S, :pha), pha, Ω)
return nothing
end
# Home of all extended merge! methods
merge(S::EventTraceData; v::Integer=KW.v) = (U = deepcopy(S); merge!(U, v=v); return U)
merge!(S::EventTraceData, U::EventTraceData; v::Integer=KW.v) = ([append!(getfield(S, f), getfield(U, f)) for f in tracefields]; S.n += U.n; merge!(S; v=v))
merge!(S::EventTraceData, C::EventChannel; v::Integer=KW.v) = merge!(S, EventTraceData(C), v=v)
function merge(A::Array{EventTraceData,1}; v::Integer=KW.v)
L::Int64 = length(A)
n = sum([A[i].n for i = 1:L])
T = EventTraceData(n)
[setfield!(T, f, vcat([getfield(A[i],f) for i = 1:L]...)) for f in SeisIO.datafields]
merge!(T, v=v)
return T
end
merge(S::EventTraceData, U::EventTraceData; v::Integer=KW.v) = merge(Array{EventTraceData,1}([S,U]), v=v)
merge(S::EventTraceData, C::EventChannel; v::Integer=KW.v) = merge(S, EventTraceData(C), v=v)
merge(C::EventChannel, S::EventTraceData; v::Integer=KW.v) = merge(EventTraceData(C), S, v=v)
merge(C::EventChannel, D::EventChannel; v::Integer=KW.v) = (S = EventTraceData(C,D); merge!(S, v=v); return S)
+(S::EventTraceData, C::EventChannel) = +(S, EventTraceData(C))
+(C::EventChannel, S::EventTraceData) = +(S, EventTraceData(C))
+(C::EventChannel, D::EventChannel) = +(EventTraceData(C), EventTraceData(D))
# Multiplication
# distributivity: (S1+S2)*S3) == (S1*S3 + S2*S3)
*(S::EventTraceData, U::EventTraceData) = merge(Array{EventTraceData,1}([S,U]))
*(S::EventTraceData, C::EventChannel) = merge(S, EventTraceData(C))
function *(C::EventChannel, D::EventChannel)
s1 = deepcopy(C)
s2 = deepcopy(D)
S = merge(EventTraceData(s1),EventTraceData(s2))
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6164 | export EQLoc
abstract type ComputedLoc end
function show(io::IO, Loc::T) where {T <: ComputedLoc}
if get(io, :compact, false) == false
println(io, T, " with fields:")
for f in fieldnames(T)
fn = lpad(String(f), 5, " ")
if f == :flags
println(io, fn, ": ", bitstring(getfield(Loc, f)))
else
println(io, fn, ": ", getfield(Loc, f))
end
end
else
c = :compact => true
print(io, repr(getfield(Loc, :lat), context=c), " N, ",
repr(getfield(Loc, :lon), context=c), " E, ",
repr(getfield(Loc, :dep), context=c), " km")
end
return nothing
end
isequal(Loc1::T, Loc2::T) where {T <: ComputedLoc} = minimum(
[isequal(getfield(Loc1, f), getfield(Loc2, f)) for f in fieldnames(T)] )
==(Loc1::T, Loc2::T) where {T <: ComputedLoc} = isequal(Loc1, Loc2)
function hash(Loc::T) where {T<:ComputedLoc}
h = hash(zero(UInt64))
for f in fieldnames(T)
h = hash(getfield(Loc, f), h)
end
return h
end
"""
EQLoc
QuakeML-compliant earthquake location
Field | Type | Meaning | SeisIO conventions/behavior
--------: |:------- |:--------------| :----------
lat | Float64 | latitude | °N = +
lon | Float64 | longitude | °E = +
dep | Float64 | depth | km; down = +
dx | Float64 | x error | uses units of data source (typically km)
dy | Float64 | y error | uses units of data source (typically km)
dz | Float64 | z error | uses units of data source (typically km)
dt | Float64 | ot error | uses units of data source (typically s)
se | Float64 | std error | uses units of data source (typically s)
rms | Float64 | rms pick err | uses units of data source (typically s)
gap | Float64 | azimuthal gap | uses units of data source (typically °)
dmin | Float64 | min sta dist | uses units of data source (typically km)
dmax | Float64 | max sta dist | uses units of data source (typically km)
nst | Int64 | # of stations |
flags | UInt8 | boolean flags | access flag[n] with >>(<<(flags,n-1),7)
datum | String | geog. datum |
typ | String | location type | freeform (e.g. "centroid", "hypocenter")
sig | String | significance | freeform (e.g. "95%", "2σ")
| | / confidence |
src | String | source | freeform (e.g. "HYPOELLIPSE", "HypoDD")
flags (0x01 = true, 0x00 = false)
1. x fixed?
2. y fixed?
3. z fixed?
4. t fixed?
"""
mutable struct EQLoc <: ComputedLoc
lat ::Float64
lon ::Float64
dep ::Float64
dx ::Float64
dy ::Float64
dz ::Float64
dt ::Float64
se ::Float64
rms ::Float64
gap ::Float64
dmin ::Float64
dmax ::Float64
nst ::Int64
flags ::UInt8
datum ::String
typ ::String
sig ::String
src ::String
function EQLoc(
lat ::Float64,
lon ::Float64,
dep ::Float64,
dx ::Float64,
dy ::Float64,
dz ::Float64,
dt ::Float64,
se ::Float64,
rms ::Float64,
gap ::Float64,
dmin ::Float64,
dmax ::Float64,
nst ::Int64,
flags ::UInt8,
datum ::String,
typ ::String,
sig ::String,
src ::String,
)
return new(lat, lon, dep, dx, dy, dz, dt, se, rms, gap, dmin, dmax, nst, flags, datum, typ, sig, src)
end
end
EQLoc(;
lat ::Float64 = zero(Float64),
lon ::Float64 = zero(Float64),
dep ::Float64 = zero(Float64),
dx ::Float64 = zero(Float64),
dy ::Float64 = zero(Float64),
dz ::Float64 = zero(Float64),
dt ::Float64 = zero(Float64),
se ::Float64 = zero(Float64),
rms ::Float64 = zero(Float64),
gap ::Float64 = zero(Float64),
dmin ::Float64 = zero(Float64),
dmax ::Float64 = zero(Float64),
nst ::Int64 = zero(Int64),
flags ::UInt8 = 0x00,
datum ::String = "",
typ ::String = "",
sig ::String = "",
src ::String = "",
) = EQLoc(lat, lon, dep, dx, dy, dz, dt, se, rms, gap, dmin, dmax, nst, flags, datum, typ, sig, src)
function write(io::IO, Loc::EQLoc)
for f in (:lat, :lon, :dep, :dx, :dy, :dz, :dt, :se, :rms, :gap, :dmin, :dmax, :nst, :flags)
write(io, getfield(Loc, f))
end
write(io, sizeof(Loc.datum))
write(io, Loc.datum)
write(io, sizeof(Loc.typ))
write(io, Loc.typ)
write(io, sizeof(Loc.sig))
write(io, Loc.sig)
write(io, sizeof(Loc.src))
write(io, Loc.src)
return nothing
end
read(io::IO, ::Type{EQLoc}) = EQLoc(read!(io, Array{Float64,1}(undef, 12))...,
fastread(io, Int64),
fastread(io),
String(fastread(io, fastread(io, Int64))),
String(fastread(io, fastread(io, Int64))),
String(fastread(io, fastread(io, Int64))),
String(fastread(io, fastread(io, Int64)))
)
function isempty(Loc::EQLoc)
q::Bool = min(isempty(getfield(Loc, :datum)),
isempty(getfield(Loc, :typ)),
isempty(getfield(Loc, :sig)),
isempty(getfield(Loc, :src)),
getfield(Loc, :nst) == zero(Int64),
getfield(Loc, :flags) == 0x00)
for f in (:lat, :lon, :dep, :dx, :dy, :dz, :dt, :se, :rms, :gap, :dmin, :dmax)
q = min(q, getfield(Loc, f) == zero(Float64))
end
return q
end
sizeof(Loc::EQLoc) = 233 +
sizeof(getfield(Loc, :datum)) +
sizeof(getfield(Loc, :typ)) +
sizeof(getfield(Loc, :sig)) +
sizeof(getfield(Loc, :src))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2959 | export EQMag
"""
EQMag
Earthquake magnitude container object
Field | Type | Meaning
--------: |:------- |:--------------
val | Float32 | numeric magnitude value (note: Float32!)
scale | String | magnitude scale (freeform)
gap | Float64 | azimuthal gap (°)
nst | Int64 | number of stations used in magnitude calculation
src | String | magnitude source
"""
mutable struct EQMag
val ::Float32
scale ::String
nst ::Int64
gap ::Float64
src ::String
function EQMag( val ::Float32,
scale ::String,
nst ::Int64,
gap ::Float64,
src ::String
)
return new(val, scale, nst, gap, src)
end
end
EQMag(;
val ::Float32 = -5.0f0,
scale ::String = "",
nst ::Int64 = zero(Int64),
gap ::Float64 = zero(Float64),
src ::String = ""
) = EQMag(val, scale, nst, gap, src)
isempty(Mag::EQMag) = min(getfield(Mag, :val) == -5.0f0,
getfield(Mag, :gap) == zero(Float64),
getfield(Mag, :nst) == zero(Int64),
isempty(getfield(Mag, :scale)),
isempty(getfield(Mag, :src)))
function hash(Mag::EQMag)
h = hash(getfield(Mag, :val))
for f in (:scale, :nst, :gap, :src)
h = hash(getfield(Mag, f), h)
end
return h
end
function isequal(S::EQMag, U::EQMag)
q::Bool = isequal(getfield(S, :val), getfield(U, :val))
for f in (:scale, :nst, :gap, :src)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
==(S::EQMag, U::EQMag) = isequal(S, U)
sizeof(Mag::EQMag) = 52 + sizeof(Mag.src) + sizeof(Mag.scale)
function write(io::IO, M::EQMag)
write(io, getfield(M, :val))
write(io, getfield(M, :gap))
write(io, getfield(M, :nst))
scale = codeunits(getfield(M, :scale))
write(io, Int64(length(scale)))
write(io, scale)
src = codeunits(getfield(M, :src))
write(io, Int64(length(src)))
write(io, src)
return nothing
end
function read(io::IO, ::Type{EQMag})
M = EQMag()
setfield!(M, :val, fastread(io, Float32))
setfield!(M, :gap, fastread(io, Float64))
setfield!(M, :nst, fastread(io, Int64))
L = fastread(io, Int64)
setfield!(M, :scale, String(fastread(io, L)))
L = fastread(io, Int64)
setfield!(M, :src, String(fastread(io, L)))
return M
end
function show(io::IO, Mag::EQMag)
if get(io, :compact, false) == false
println(io, "EQMag with fields:")
for f in (:val, :scale, :nst, :gap, :src)
fn = lpad(String(f), 5, " ")
println(io, fn, ": ", getfield(Mag, f))
end
else
c = :compact => true
print(io, getfield(Mag, :scale), " ",
repr(getfield(Mag, :val), context=c), " ",
"(g ", repr(getfield(Mag, :gap), context=c), "°, ",
"n ", getfield(Mag, :nst), ")")
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7278 | export EventChannel
@doc (@doc EventTraceData)
mutable struct EventChannel <: GphysChannel
id ::String # id
name ::String # name
loc ::InstrumentPosition # loc
fs ::Float64 # fs
gain ::Float64 # gain
resp ::InstrumentResponse # resp
units ::String # units
az ::Float64 # source azimuth
baz ::Float64 # backazimuth
dist ::Float64 # distance
pha ::PhaseCat # phase catalog
src ::String # src
misc ::Dict{String,Any} # misc
notes ::Array{String,1} # notes
t ::Array{Int64,2} # time
x ::FloatArray # data
function EventChannel(
id ::String, # id
name ::String, # name
loc ::InstrumentPosition, # loc
fs ::Float64, # fs
gain ::Float64, # gain
resp ::InstrumentResponse, # resp
units ::String, # units
az ::Float64, # source azimuth
baz ::Float64, # backazimuth
dist ::Float64, # distance
pha ::PhaseCat, # phase catalog
src ::String, # src
misc ::Dict{String,Any}, # misc
notes ::Array{String,1}, # notes
t ::Array{Int64,2}, # time
x ::FloatArray # data
)
return new(id, name, loc, fs, gain, resp, units, az, baz, dist, pha, src, misc, notes, t, x)
end
end
EventChannel(;
id ::String = "",
name ::String = "",
loc ::InstrumentPosition = GeoLoc(),
fs ::Float64 = default_fs,
gain ::Float64 = default_gain,
resp ::InstrumentResponse = PZResp(),
units ::String = "",
az ::Float64 = default_fs, # source azimuth
baz ::Float64 = default_fs, # backazimuth
dist ::Float64 = default_fs, # distance
pha ::PhaseCat = PhaseCat(),
src ::String = "",
misc ::Dict{String,Any} = Dict{String,Any}(),
notes ::Array{String,1} = Array{String,1}(undef, 0),
t ::Array{Int64,2} = Array{Int64,2}(undef, 0, 2),
x ::FloatArray = Array{Float32,1}(undef, 0)
) = EventChannel(id, name, loc, fs, gain, resp, units, az, baz, dist, pha, src, misc, notes, t, x)
function getindex(S::EventTraceData, j::Int)
C = EventChannel()
[setfield!(C, f, getfield(S,f)[j]) for f in tracefields]
return C
end
setindex!(S::EventTraceData, C::EventChannel, j::Int) = (
[(getfield(S, f))[j] = getfield(C, f) for f in tracefields];
return S)
function isempty(Ch::EventChannel)
q::Bool = Ch.gain == default_gain
for f in (:az, :baz, :dist, :fs)
q = min(q, getfield(Ch, f) == default_fs)
(q == false) && return q
end
for f in (:id, :loc, :misc, :name, :notes, :pha, :resp, :src, :t, :units, :x)
q = min(q, isempty(getfield(Ch, f)))
(q == false) && return q
end
return q
end
# ============================================================================
# Conversion and push to EventTraceData
function EventTraceData(C::EventChannel)
S = EventTraceData(1)
for f in tracefields
setindex!(getfield(S, f), getfield(C, f), 1)
end
return S
end
function push!(S::EventTraceData, C::EventChannel)
for i in tracefields
push!(getfield(S,i), getfield(C,i))
end
S.n += 1
return nothing
end
function sizeof(C::EventChannel)
s = 136
for f in tracefields
v = getfield(C,f)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc || f == :pha
k = collect(keys(v))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(v)
s += sizeof(p)
if typeof(p) == Array{String,1}
s += sum([sizeof(j) for j in p])
end
end
end
end
return s
end
function write(io::IO, S::EventChannel)
write(io, Int64(sizeof(S.id)))
write(io, S.id) # id
write(io, Int64(sizeof(S.name)))
write(io, S.name) # name
write(io, loctyp2code(S.loc))
write(io, S.loc) # loc
write(io, S.fs) # fs
write(io, S.gain) # gain
write(io, resptyp2code(S.resp))
write(io, S.resp) # resp
write(io, Int64(sizeof(S.units)))
write(io, S.units) # units
write(io, S.az) # az
write(io, S.baz) # baz
write(io, S.dist) # dist
write(io, S.pha) # pha
write(io, Int64(sizeof(S.src)))
write(io, S.src) # src
write_misc(io, S.misc) # misc
write_string_vec(io, S.notes) # notes
write(io, Int64(size(S.t,1)))
write(io, S.t) # t
write(io, typ2code(eltype(S.x)))
write(io, Int64(length(S.x)))
write(io, S.x) # x
return nothing
end
read(io::IO, ::Type{EventChannel}) = EventChannel(
String(fastread(io, fastread(io, Int64))), # id
String(fastread(io, fastread(io, Int64))), # name
read(io, code2loctyp(fastread(io))), # loc
fastread(io, Float64), # fs
fastread(io, Float64), # gain
read(io, code2resptyp(fastread(io))), # resp
String(fastread(io, fastread(io, Int64))), # units
fastread(io, Float64), # az
fastread(io, Float64), # baz
fastread(io, Float64), # dist
read(io, PhaseCat), # pha
String(fastread(io, fastread(io, Int64))), # src
read_misc(io, getfield(BUF, :buf)), # misc
read_string_vec(io, getfield(BUF, :buf)), # notes
read!(io, Array{Int64, 2}(undef, fastread(io, Int64), 2)), # t
read!(io, Array{code2typ(fastread(io)),1}(undef, fastread(io, Int64))), # x
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 12797 | export EventTraceData
@doc """
EventTraceData
A custom structure designed to describe trace data (digital seismograms)
associated with a discrete event (earthquake).
EventChannel
A single channel of trace data (digital seismograms) associated with a
discrete event (earthquake).
## Fields: EventTraceData, EventChannel, SeisEvent.data
| **Field** | **Description** |
|:-------|:------ |
| :n | Number of channels [^1] |
| :id | Channel id. Uses NET.STA.LOC.CHA format when possible |
| :name | Freeform channel name |
| :loc | Location (position) vector; any subtype of InstrumentPosition |
| :fs | Sampling frequency in Hz; fs=0.0 for irregularly-sampled data. |
| :gain | Scalar gain |
| :resp | Instrument response; any subtype of InstrumentResponse |
| :units | String describing data units. UCUM standards are assumed. |
| :az | Source azimuth |
| :baz | Backazimuth to source |
| :dist | Source-receiver distance |
| :pha | Seismic phase catalog |
| :src | Freeform string describing data source. |
| :misc | Dictionary for non-critical information. |
| :notes | Timestamped notes; includes automatically-logged information. |
| :t | Matrix of time gaps in integer μs, formatted [Sample# Length] |
| :x | Time-series data |
[^1]: Not present in EventChannel objects.
See also: `PhaseCat`, `SeisPha`, `SeisData`
""" EventTraceData
mutable struct EventTraceData <: GphysData
n ::Int64 # number of channels
id ::Array{String,1} # id
name ::Array{String,1} # name
loc ::Array{InstrumentPosition,1} # loc
fs ::Array{Float64,1} # fs
gain ::Array{Float64,1} # gain
resp ::Array{InstrumentResponse,1} # resp
units ::Array{String,1} # units
az ::Array{Float64,1} # source azimuth
baz ::Array{Float64,1} # backazimuth
dist ::Array{Float64,1} # distance
pha ::Array{PhaseCat,1} # phase catalog
src ::Array{String,1} # src
misc ::Array{Dict{String,Any},1} # misc
notes ::Array{Array{String,1},1} # notes
t ::Array{Array{Int64,2},1} # time
x ::Array{FloatArray,1} # data
function EventTraceData()
return new( 0, # n
Array{String,1}(undef,0), # id
Array{String,1}(undef,0), # name
Array{InstrumentPosition,1}(undef,0), # loc
Array{Float64,1}(undef,0), # fs
Array{Float64,1}(undef,0), # gain
Array{InstrumentResponse,1}(undef,0), # resp
Array{String,1}(undef,0), # units
Array{Float64,1}(undef,0), # az
Array{Float64,1}(undef,0), # baz
Array{Float64,1}(undef,0), # dist
Array{PhaseCat,1}(undef,0), # pha
Array{String,1}(undef,0), # src
Array{Dict{String,Any},1}(undef,0), # misc
Array{Array{String,1},1}(undef,0), # notes
Array{Array{Int64,2},1}(undef,0), # t
Array{FloatArray,1}(undef,0) # x
)
end
function EventTraceData(n::Int64,
id::Array{String,1} , # id
name::Array{String,1} , # name
loc::Array{InstrumentPosition,1} , # loc
fs::Array{Float64,1} , # fs
gain::Array{Float64,1} , # gain
resp::Array{InstrumentResponse,1} , # resp
units::Array{String,1} , # units
az::Array{Float64,1} , # az
baz::Array{Float64,1} , # baz
dist::Array{Float64,1} , # dist
pha::Array{PhaseCat,1} , # pha
src::Array{String,1} , # src
misc::Array{Dict{String,Any},1} , # misc
notes::Array{Array{String,1},1} , # notes
t::Array{Array{Int64,2},1} , # time
x::Array{FloatArray,1})
return new(n,
id, name, loc, fs, gain, resp, units, az, baz, dist, pha, src, misc, notes, t, x)
end
function EventTraceData(n::UInt)
TD = new( n, # n
Array{String,1}(undef,n), # id
Array{String,1}(undef,n), # name
Array{InstrumentPosition,1}(undef,n), # loc
Array{Float64,1}(undef,n), # fs
Array{Float64,1}(undef,n), # gain
Array{InstrumentResponse,1}(undef,n), # resp
Array{String,1}(undef,n), # units
Array{Float64,1}(undef,n), # az
Array{Float64,1}(undef,n), # baz
Array{Float64,1}(undef,n), # dist
Array{PhaseCat,1}(undef,n), # pha
Array{String,1}(undef,n), # src
Array{Dict{String,Any},1}(undef,n), # misc
Array{Array{String,1},1}(undef,n), # notes
Array{Array{Int64,2},1}(undef,n), # t
Array{FloatArray,1}(undef,n) # x
)
# Fill these fields with something to prevent undefined reference errors
fill!(TD.az, 0.0) # az
fill!(TD.baz, 0.0) # baz
fill!(TD.dist, 0.0) # dist
fill!(TD.fs, 0.0) # fs
fill!(TD.gain, 1.0) # gain
fill!(TD.id, "") # id
fill!(TD.name, "") # name
fill!(TD.src, "") # src
fill!(TD.units, "") # units
for i = 1:n
TD.loc[i] = GeoLoc() # loc
TD.misc[i] = Dict{String,Any}() # misc
TD.notes[i] = Array{String,1}(undef,0) # notes
TD.resp[i] = PZResp() # resp
TD.pha[i] = PhaseCat() # pha
TD.t[i] = Array{Int64,2}(undef,0,2) # t
TD.x[i] = Array{Float32,1}(undef,0) # x
end
return TD
end
EventTraceData(n::Int) = n > 0 ? EventTraceData(UInt(n)) : EventTraceData()
end
function sizeof(TD::EventTraceData)
s = 144
for f in tracefields
if (f in unindexed_fields) == false
V = getfield(TD, f)
s += sizeof(V)
for i = 1:TD.n
v = getindex(V, i)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc || f == :pha
for i in values(v)
s += sizeof(i)
end
s += sizeof(collect(keys(v)))
end
end
end
end
return s
end
# SeisData
function write(io::IO, S::EventTraceData)
N = getfield(S, :n)
LOC = getfield(S, :loc)
RESP = getfield(S, :resp)
T = getfield(S, :t)
X = getfield(S, :x)
MISC = getfield(S, :misc)
NOTES = getfield(S, :notes)
cmp = false
if KW.comp != 0x00
nx_max = maximum([sizeof(getindex(X, i)) for i = 1:S.n])
if (nx_max > KW.n_zip) || (KW.comp == 0x02)
cmp = true
Z = getfield(BUF, :buf)
checkbuf_8!(Z, nx_max)
end
end
codes = Array{UInt8,1}(undef, 3*N)
L = Array{Int64,1}(undef, 2*N)
# Write begins -----------------------------------------------------
write(io, N)
p = fastpos(io)
fastskip(io, 19*N+1)
write_string_vec(io, S.id) # id
write_string_vec(io, S.name) # name
i = 0 # loc
while i < N
i = i + 1
loc = getindex(LOC, i)
setindex!(codes, loctyp2code(loc), i)
write(io, loc)
end
write(io, S.fs) # fs
write(io, S.gain) # gain
i = 0 # resp
while i < N
i = i + 1
resp = getindex(RESP, i)
setindex!(codes, resptyp2code(resp), N+i)
write(io, resp)
end
write_string_vec(io, S.units) # units
write(io, S.az) # az
write(io, S.baz) # baz
write(io, S.dist) # dist
for i = 1:N; write(io, S.pha[i]); end # pha
write_string_vec(io, S.src) # src
for i = 1:N; write_misc(io, getindex(MISC, i)); end # misc
for i = 1:N; write_string_vec(io, getindex(NOTES, i)); end # notes
i = 0 # t
while i < N
i = i + 1
t = getindex(T, i)
setindex!(L, size(t,1), i)
write(io, t)
end
i = 0 # x
while i < N
i = i + 1
x = getindex(X, i)
nx = lastindex(x)
if cmp
l = zero(Int64)
while l == zero(Int64)
l = Blosc.compress!(Z, x, level=5)
(l > zero(Int64)) && break
nx_max = nextpow(2, nx_max)
checkbuf_8!(Z, nx_max)
@warn(string("Compression ratio > 1.0 for channel ", i, "; are data OK?"))
end
xc = view(Z, 1:l)
write(io, xc)
setindex!(L, l, N+i)
else
write(io, x)
setindex!(L, nx, N+i)
end
setindex!(codes, typ2code(eltype(x)), 2*N+i)
end
q = fastpos(io)
fastseek(io, p)
write(io, codes)
write(io, cmp)
write(io, L)
fastseek(io, q)
return nothing
end
# read
function read(io::IO, ::Type{EventTraceData})
Z = getfield(BUF, :buf)
L = getfield(BUF, :int64_buf)
N = fastread(io, Int64)
checkbuf_strict!(L, 2*N)
fast_readbytes!(io, Z, 3*N)
c1 = copy(Z[1:N])
c2 = copy(Z[N+1:2*N])
y = code2typ.(getindex(Z, 2*N+1:3*N))
cmp = read(io, Bool)
read!(io, L)
nx = getindex(L, N+1:2*N)
if cmp
checkbuf_8!(Z, maximum(nx))
end
return EventTraceData(
N,
read_string_vec(io, Z),
read_string_vec(io, Z),
InstrumentPosition[read(io, code2loctyp(getindex(c1, i))) for i = 1:N],
read!(io, Array{Float64, 1}(undef, N)),
read!(io, Array{Float64, 1}(undef, N)),
InstrumentResponse[read(io, code2resptyp(getindex(c2, i))) for i = 1:N],
read_string_vec(io, Z),
read!(io, Array{Float64, 1}(undef, N)),
read!(io, Array{Float64, 1}(undef, N)),
read!(io, Array{Float64, 1}(undef, N)),
PhaseCat[read(io, PhaseCat) for i = 1:N],
read_string_vec(io, Z),
[read_misc(io, Z) for i = 1:N],
[read_string_vec(io, Z) for i = 1:N],
[read!(io, Array{Int64, 2}(undef, getindex(L, i), 2)) for i = 1:N],
FloatArray[cmp ?
(fast_readbytes!(io, Z, getindex(nx, i)); Blosc.decompress(getindex(y,i), Z)) :
read!(io, Array{getindex(y,i), 1}(undef, getindex(nx, i)))
for i = 1:N]
)
end
function EventTraceData(U...)
TD = EventTraceData()
for i = 1:length(U)
Y = getindex(U,i)
if typeof(Y) == SeisChannel
push!(TD, convert(EventChannel, Y))
elseif typeof(Y) == EventChannel
push!(TD, Y)
elseif typeof(Y) == SeisData
append!(TD, convert(EventTraceData, Y))
elseif typeof(Y) == SeisEvent
append!(TD, getfield(Y, :data))
elseif typeof(Y) == EventTraceData
append!(TD, Y)
else
@warn(string("Tried to join incompatible type into SeisData at arg ", i, "; skipped."))
end
end
return TD
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1730 | export PhaseCat, show_phases
# PhaseCat
"Type alias of Dict{String, SeisPha}; see ?SeisPha() for field definitions."
const PhaseCat = Dict{String, SeisPha}
function write(io::IO, PC::PhaseCat)
L = Int64(length(PC))
write(io, L)
if L != zero(Int64)
K = keys(PC)
write_string_vec(io, collect(K))
for V in values(PC)
write(io, V)
end
end
return nothing
end
function read(io::IO, ::Type{PhaseCat})
PC = PhaseCat()
L = fastread(io, Int64)
if L != zero(Int64)
u = getfield(BUF, :buf)
checkbuf!(u, 65535)
K = read_string_vec(io, u)
for k in K
PC[k] = read(io, SeisPha)
end
end
return PC
end
function show_phases(io::IO, PC::PhaseCat)
phase_names = sort(collect(keys(PC)))
npha = length(phase_names)
w = 10
ww = 16
c = :compact => true
F = (:amp, :d, :ia, :res, :rp, :ta, :tt, :unc, :pol, :qual)
print(io, lpad("Phase", ww))
print(io, lpad("Amplitude", w))
print(io, lpad("Distance", w))
print(io, lpad("Incidence", w))
print(io, lpad("Residual", w))
print(io, lpad("Ray Param", w))
print(io, lpad("Takeoff", w))
print(io, lpad("Time", w))
print(io, lpad("Unc", w))
print(io, " P Q\n")
print(io, "="^ww, "+")
for j = 1:8
print(io, "="^(w-1), "+")
end
print(io, "==+==", "\n")
for i = 1:npha
pha = phase_names[i]
print(io, rpad(string("\"", pha, "\""), ww))
Pha = get(PC, pha, SeisPha())
j = 0
for f in F
j += 1
if j < 9
@printf(io, "%10.3g", getfield(Pha, f))
elseif j == 9
print(io, " ", string(getfield(Pha, f)))
else
print(io, " ", string(getfield(Pha, f), "\n"))
end
end
end
end
show_phases(PC::PhaseCat) = show_phases(stdout, PC)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2194 | export SeisEvent
@doc """
SeisEvent
A structure for discrete seismic events, comprising three structures:
* :hdr, a SeisHdr for the event descriptor
* :source, a SeisSrc for descrition of the seismic source process
* :data, an EventTraceData structure for channel data, including phases
See also: `SeisHdr`, `SeisSrc`, `EventTraceData`
"""
mutable struct SeisEvent
hdr::SeisHdr
source::SeisSrc
data::EventTraceData
SeisEvent(hdr::SeisHdr, source::SeisSrc, data::EventTraceData) = new(hdr, source, data)
end
function SeisEvent(;
hdr ::SeisHdr = SeisHdr(),
source::SeisSrc = SeisSrc(),
data::T = EventTraceData()
) where {T<:GphysData}
if T != EventTraceData
return SeisEvent(hdr, source, convert(EventTraceData, data))
else
return SeisEvent(hdr, source, data)
end
end
# =============================================================================
# Methods from Base
isequal(S::SeisEvent, T::SeisEvent) = min(isequal(S.hdr, T.hdr),
isequal(S.source, T.source),
isequal(S.data, T.data))
==(S::SeisEvent, T::SeisEvent) = isequal(S,T)
sizeof(Ev::SeisEvent) = 24 + sizeof(Ev.hdr) + sizeof(Ev.source) + sizeof(Ev.data)
# SeisEvent
write(io::IO, W::SeisEvent) = ( write(io, getfield(W, :hdr));
write(io, getfield(W, :source));
write(io, getfield(W, :data))
)
read(io::IO, ::Type{SeisEvent}) = SeisEvent(read(io, SeisHdr),
read(io, SeisSrc),
read(io, EventTraceData))
summary(V::SeisEvent) = string("Event ", V.hdr.id, ": SeisEvent with ",
V.data.n, " channel", V.data.n == 1 ? "" : "s")
function show(io::IO, S::SeisEvent)
println(io, summary(S))
println(io, "\n(.hdr)")
show(io, getfield(S, :hdr))
println(io, "\n(.source)")
show(io, getfield(S, :source))
println(io, "\n(.data)")
println(io, summary(getfield(S, :data)))
return nothing
end
show(S::SeisEvent) = show(stdout, S)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4568 | export SeisHdr
"""
SeisHdr: header information for seismic events
S = SeisHdr()
Initialize an empty SeisHdr object. Fields can be initialized at creation with
keywords, e.g., SeisHdr(ot=DateTime("2012-01-03T03:49:45"), int=(0x02, "MMI")).
| Field | Default | Type | Meaning |
|:---- |:----- |:----- |:-------- |
| id | "" | String | Event ID |
| int | (0x00, "") | Tuple | (Intensity, Intensity Scale) |
| loc | () | EQLoc | Hypocenter data |
| mag | () | EQMag | Magnitude data |
| misc | () | Dict{String,Any}() | Non-essential info |
| ot | (unix epoch) | DateTime | Origin time |
| notes | [] | Array{String,1} | Timestamped notes, logging |
| src | "" | String | Data source (URL/filename) |
| typ | "" | String | Event type |
See also: `EQLoc`, `EQMag`
"""
mutable struct SeisHdr
id ::String
int ::Tuple{UInt8,String}
loc ::EQLoc
mag ::EQMag
misc ::Dict{String,Any}
notes ::Array{String,1}
ot ::DateTime
src ::String
typ ::String
function SeisHdr(
id ::String,
int ::Tuple{UInt8,String},
loc ::EQLoc,
mag ::EQMag,
misc ::Dict{String,Any},
notes ::Array{String,1},
ot ::DateTime,
src ::String,
typ ::String
)
return new(id, int, loc, mag, misc, notes, ot, src, typ)
end
end
SeisHdr(;
id ::String = "",
int ::Tuple{UInt8,String} = (0x00, ""),
loc ::EQLoc = EQLoc(),
mag ::EQMag = EQMag(),
misc ::Dict{String,Any} = Dict{String,Any}(),
notes ::Array{String,1} = Array{String,1}(undef, 0),
ot ::DateTime = u2d(0),
src ::String = "",
typ ::String = "",
) = SeisHdr(id, int, loc, mag, misc, notes, ot, src, typ)
# =============================================================================
# Methods from Base
sizeof(H::SeisHdr) = sum([sizeof(getfield(H,i)) for i in fieldnames(SeisHdr)])
function isempty(H::SeisHdr)
q = min(getfield(H, :ot) == u2d(0),
getfield(H, :int) == (0x00, ""))
if q == true
for f in (:id, :loc, :mag, :misc, :notes, :src, :typ)
q = min(q, isempty(getfield(H, f)))
end
end
return q
end
function isequal(H::SeisHdr, K::SeisHdr)
q::Bool = true
for i in fieldnames(SeisHdr)
if i != :notes
q = min(q, isequal(getfield(H,i), getfield(K,i)))
end
end
return q
end
==(H1::SeisHdr, H2::SeisHdr) = isequal(H1, H2)
function write(io::IO, H::SeisHdr)
write(io, Int64(sizeof(H.id)))
write(io, H.id)
write(io, H.int[1])
write(io, Int64(sizeof(H.int[2])))
write(io, H.int[2])
write(io, H.loc)
write(io, H.mag)
write_misc(io, H.misc)
write_string_vec(io, H.notes)
write(io, round(Int64, d2u(getfield(H, :ot))*sμ))
write(io, Int64(sizeof(H.src)))
write(io, H.src)
write(io, Int64(sizeof(H.typ)))
write(io, H.typ)
return nothing
end
read(io::IO, ::Type{SeisHdr}) = SeisHdr(
String(fastread(io, fastread(io, Int64))),
(fastread(io), String(fastread(io, fastread(io, Int64)))),
read(io, EQLoc),
read(io, EQMag),
read_misc(io, BUF.buf),
read_string_vec(io, BUF.buf),
u2d(fastread(io, Int64)*μs),
String(fastread(io, fastread(io, Int64))),
String(fastread(io, fastread(io, Int64)))
)
summary(H::SeisHdr) = string(typeof(H), ", ",
repr("text/plain", H.loc, context=:compact=>true), ", ",
repr("text/plain", H.mag, context=:compact=>true), ", ",
H.int[2], " ", H.int[1])
function show(io::IO, H::SeisHdr)
W = max(80, displaysize(io)[2]-2)-show_os
println(io, " ID: ", H.id)
println(io, " INT: ", H.int[2], " ", H.int[1])
println(io, " LOC: ", repr("text/plain", H.loc, context=:compact=>true))
println(io, " MAG: ", repr("text/plain", H.mag, context=:compact=>true))
println(io, " OT: ", H.ot)
println(io, " SRC: ", str_trunc(H.src, W))
println(io, " TYP: ", str_trunc(H.typ, W))
println(io, " MISC: ", length(H.misc), " items")
println(io, " NOTES: ", length(H.notes), " entries")
return nothing
end
show(H::SeisHdr) = show(stdout, H)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3422 | export SeisPha
# ===========================================================================
# SeisPha
"""
SeisPha()
IRIS-style seismic phase and pick container
Field | Type | Meaning | SeisIO conventions/behavior
--------: |:------- |:-------------- | :----------
amp | Float64 | amplitude | uses units of data source
d | Float64 | distance | no unit conversion; can be m, km, or °
ia | Float64 | incidence angle | uses units of data source
res | Float64 | pick residual |
rp | Float64 | ray parameter |
ta | Float64 | takeoff angle |
tt | Float64 | travel time |
unc | Float64 | uncertainty |
pol | Char | polarity |
qual | Char | pick quality | not (re)calculated
"""
mutable struct SeisPha
amp ::Float64 # amplitude
d ::Float64 # distance
ia ::Float64 # incidence angle
res ::Float64 # residual
rp ::Float64 # ray parameter
ta ::Float64 # takeoff angle
tt ::Float64 # travel time
unc ::Float64 # uncertainty
pol ::Char # polarity
qual::Char # quality
function SeisPha(
amp ::Float64 , # amplitude
d ::Float64 , # distance
ia ::Float64 , # incidence angle
res ::Float64 , # residual
rp ::Float64 , # ray parameter
ta ::Float64 , # takeoff angle
tt ::Float64 , # travel time
unc ::Float64 , # uncertainty
pol ::Char , # polarity
qual::Char # quality
)
return new(amp, d, ia, res, rp, ta, tt, unc, pol, qual)
end
end
SeisPha( ;
amp ::Float64 = zero(Float64),
d ::Float64 = zero(Float64),
ia ::Float64 = zero(Float64),
res ::Float64 = zero(Float64),
rp ::Float64 = zero(Float64),
ta ::Float64 = zero(Float64),
tt ::Float64 = zero(Float64),
unc ::Float64 = zero(Float64),
pol ::Char = ' ',
qual::Char = ' '
) = SeisPha(amp, d, ia, res, rp, ta, tt, unc, pol, qual)
function write(io::IO, Pha::SeisPha)
write(io, Pha.amp)
write(io, Pha.d)
write(io, Pha.ia)
write(io, Pha.res)
write(io, Pha.rp)
write(io, Pha.ta)
write(io, Pha.tt)
write(io, Pha.unc)
write(io, Pha.pol)
write(io, Pha.qual)
return nothing
end
read(io::IO, ::Type{SeisPha}) =
SeisPha(fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
read(io, Char),
read(io, Char)
)
function isempty(Pha::SeisPha)
q::Bool = min(getfield(Pha, :pol) == ' ', getfield(Pha, :qual) == ' ')
if q == true
for f in (:amp, :d, :ia, :res, :rp, :ta, :tt, :unc)
q = min(q, getfield(Pha, f) == zero(Float64))
end
end
return q
end
function isequal(S::SeisPha, U::SeisPha)
q::Bool = isequal(getfield(S, :pol), getfield(U, :pol))
if q == true
for f in (:amp, :d, :ia, :res, :rp, :ta, :tt, :unc, :pol, :qual)
q = min(q, getfield(S,f) == getfield(U,f))
end
end
return q
end
==(S::SeisPha, U::SeisPha) = isequal(S, U)
sizeof(P::SeisPha) = 146
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7716 | export SeisSrc
"""
SeisSrc: container for descriptions of a seismic source process
S = SeisSrc()
Initialize an empty SeisSrc object. Fields can be initialized at creation with keywords; for example, S = SeisSrc(m0 = 1.6e22).
| Field | Type | Meaning |
|:---- |:----- |:-------- |
| id | String | source process ID |
| eid | String | event ID (note, generally :id != :eid) |
| m0 | Float64 | scalar seismic moment |
| mt | Array{Float64,1} | seismic moment tensor |
| dm | Array{Float64,1} | seismic moment tensor misfit |
| npol | Int64 | number of polarities in focal mechanism |
| gap | Float64 | max azimuthal gap in focal mechanism |
| pax | Array{Float64,2} | principal axes |
| planes | Array{Float64,2} | nodal planes |
| src | String | data source string (filename or URL) |
| st | SourceTime | source-time subfield |
| misc | Dict{String,Any} | dictionary of non-essential information |
| notes | Array{String,1} | notes and automated logging |
See also: `EQLoc`, `EQMag`, `SourceTime`
"""
mutable struct SeisSrc
id ::String
eid ::String
m0 ::Float64
mt ::Array{Float64,1}
dm ::Array{Float64,1}
npol ::Int64
gap ::Float64
pax ::Array{Float64,2}
planes ::Array{Float64,2}
src ::String
st ::SourceTime
misc ::Dict{String,Any}
notes ::Array{String,1}
function SeisSrc(
id ::String,
eid ::String,
m0 ::Float64,
mt ::Array{Float64,1},
dm ::Array{Float64,1},
npol ::Int64,
gap ::Float64,
pax ::Array{Float64,2},
planes ::Array{Float64,2},
src ::String,
st ::SourceTime,
misc ::Dict{String,Any},
notes ::Array{String,1},
)
return new(id, eid, m0, mt, dm, npol, gap, pax, planes, src, st, misc, notes)
end
end
SeisSrc(;
id ::String = "",
eid ::String = "",
m0 ::Float64 = zero(Float64),
mt ::Array{Float64,1} = Float64[],
dm ::Array{Float64,1} = Float64[],
npol ::Int64 = zero(Int64),
gap ::Float64 = zero(Float64),
pax ::Array{Float64,2} = Array{Float64, 2}(undef, 0, 0),
planes ::Array{Float64,2} = Array{Float64, 2}(undef, 0, 0),
src ::String = "",
st ::SourceTime = SourceTime(),
misc ::Dict{String,Any} = Dict{String,Any}(),
notes ::Array{String,1} = String[],
) = SeisSrc(id, eid, m0, mt, dm, npol, gap, pax, planes, src, st, misc, notes)
function isempty(S::SeisSrc)
q::Bool = min((getfield(S, :m0) == zero(Float64)),
(getfield(S, :gap) == zero(Float64)),
(getfield(S, :npol) == zero(Int64)))
for f in (:id, :eid, :misc, :mt, :dm, :notes, :pax, :planes, :src, :st)
q = min(q, isempty(getfield(S, f)))
end
return q
end
function isequal(S::SeisSrc, U::SeisSrc)
q::Bool = true
for f in fieldnames(SeisSrc)
if f != :notes
q = min(q, getfield(S,f) == getfield(U,f))
end
end
return q
end
==(S::SeisSrc, U::SeisSrc) = isequal(S, U)
sizeof(S::SeisSrc) = 136 +
sum([sizeof(getfield(S, f)) for f in (:id, :eid, :mt, :dm, :pax, :planes, :src, :st, :misc, :notes)])
function show(io::IO, S::SeisSrc)
if get(io, :compact, false) == false
for f in fieldnames(SeisSrc)
fn = lpad(uppercase(String(f)), 6, " ")
if f == :misc
println(io, fn, ": ", length(getfield(S, :misc)), " items")
elseif f == :notes
println(io, fn, ": ", length(getfield(S, :notes)), " entries")
elseif f == :id || f == :src
println(io, fn, ": ", getfield(S, f))
else
println(io, fn, ": ", repr(getfield(S, f), context=:compact=>true))
end
end
else
c = :compact => true
# Order of preference: mt, np, pax
mech_str = string("m₀ = ", repr(getfield(S, :m0), context=c), "; S = ",
repr(getfield(S, :mt), context=c), "; ",
"NP = ", repr(getfield(S, :planes), context=c), "; ",
"PAX = ", repr(getfield(S, :pax), context=c))
L = length(mech_str)
L_max = displaysize(io)[2]
if L > L_max
print(io, mech_str[1:L_max-1], "…")
else
print(io, mech_str)
end
end
return nothing
end
show(S::SeisSrc) = show(stdout, S)
# SeisSrc
function write(io::IO, S::SeisSrc)
id = codeunits(getfield(S, :id))
eid = codeunits(getfield(S, :eid))
src = codeunits(getfield(S, :src))
# Write begins ------------------------------------------------------
write(io, Int64(length(id)))
write(io, id) # id
write(io, Int64(length(eid)))
write(io, eid) # eid
write(io, S.m0) # m0
write(io, Int64(length(S.mt)))
write(io, S.mt) # mt
write(io, Int64(length(S.dm)))
write(io, S.dm) # dm
write(io, S.npol) # npol
write(io, S.gap) # gap
r, c = size(S.pax)
write(io, Int64(r), Int64(c)),
write(io, S.pax) # pax
r, c = size(S.planes)
write(io, Int64(r), Int64(c)),
write(io, S.planes) # planes
write(io, Int64(length(src)))
write(io, src) # src
write(io, getfield(S, :st)) # st
write_misc(io, getfield(S, :misc)) # misc
write_string_vec(io, getfield(S, :notes)) # notes
# Write ends --------------------------------------------------------
return nothing
end
function read(io::IO, ::Type{SeisSrc})
u = getfield(BUF, :buf)
return SeisSrc( String(fastread(io, fastread(io, Int64))), # :id
String(fastread(io, fastread(io, Int64))), # :eid
fastread(io, Float64), # :m0
read!(io, Array{Float64, 1}(undef, fastread(io, Int64))), # :mt
read!(io, Array{Float64, 1}(undef, fastread(io, Int64))), # :dm
fastread(io, Int64), # :npol
fastread(io, Float64), # :gap
read!(io, Array{Float64, 2}(undef,
fastread(io, Int64), fastread(io, Int64))), # :pax
read!(io, Array{Float64, 2}(undef,
fastread(io, Int64), fastread(io, Int64))), # :planes
String(fastread(io, fastread(io, Int64))), # :src
read(io, SourceTime), # :st
read_misc(io, u), # :misc
read_string_vec(io, u) ) # :notes
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2883 | export SourceTime
"""
SourceTime()
QuakeML-compliant seismic source-time parameterization.
Field | Type | Meaning | SeisIO conventions/behavior
--------: |:------- |:-------------- | :----------
desc | String | description |
dur | Float64 | duration |
rise | Float64 | rise time |
decay | Float64 | decay time |
"""
mutable struct SourceTime
desc ::String # description
dur ::Real # duration
rise ::Real # rise time
decay ::Real # decay time
function SourceTime(
desc ::String, # description
dur ::Real, # duration
rise ::Real, # rise time
decay ::Real, # decay time
)
return new(desc, dur, rise, decay)
end
end
SourceTime(;
desc ::String = "",
dur ::Real = zero(Float64),
rise ::Real = zero(Float64),
decay ::Real = zero(Float64),
) = SourceTime(desc, dur, rise, decay)
isempty(ST::SourceTime) = min(getfield(ST, :dur) == zero(Float64),
getfield(ST, :rise) == zero(Float64),
getfield(ST, :decay) == zero(Float64),
isempty(getfield(ST, :desc)))
function hash(ST::SourceTime)
h = hash(zero(UInt64))
for f in fieldnames(SourceTime)
h = hash(getfield(ST, f), h)
end
return h
end
function isequal(S::SourceTime, U::SourceTime)
q::Bool = isequal(getfield(S, :desc), getfield(U, :desc))
for f in (:dur, :rise, :decay)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
==(S::SourceTime, U::SourceTime) = isequal(S, U)
sizeof(ST::SourceTime) = 56 + sizeof(getfield(ST, :desc))
function write(io::IO, ST::SourceTime)
desc = getfield(ST, :desc)
write(io, Int64(sizeof(desc)))
write(io, desc)
for f in (:dur, :rise, :decay)
write(io, getfield(ST, f))
end
return nothing
end
function read(io::IO, ::Type{SourceTime})
ST = SourceTime()
L = fastread(io, Int64)
setfield!(ST, :desc, String(fastread(io, L)))
for f in (:dur, :rise, :decay)
setfield!(ST, f, fastread(io, Float64))
end
return ST
end
function show(io::IO, ST::SourceTime)
if get(io, :compact, false) == false
println(io, "SourceTime with fields:")
for f in (:desc, :dur, :rise, :decay)
fn = lpad(String(f), 5, " ")
println(io, fn, ": ", getfield(ST, f))
end
else
c = :compact => true
st_str = string("dur ", repr(getfield(ST, :dur), context=c),
", rise ", repr(getfield(ST, :rise), context=c),
", decay ", repr(getfield(ST, :decay), context=c))
print(io, str_trunc(st_str, max(80, displaysize(io)[2]-2) - show_os))
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1781 | convert(::Type{EventTraceData}, C::EventChannel) = EventTraceData(C)
function convert(::Type{EventTraceData}, S::T) where T <: GphysData
(T == EventTraceData) && (return deepcopy(S))
if T != SeisData
S = convert(SeisData, S)
end
TD = EventTraceData(getfield(S, :n))
for f in datafields
if (f in unindexed_fields) == false
setfield!(TD, f, deepcopy(getfield(S, f)))
end
end
return TD
end
EventTraceData(S::T) where T <: GphysData = convert(EventTraceData, S)
function convert(::Type{SeisData}, TD::EventTraceData)
S = SeisData(getfield(TD, :n))
for f in datafields
if (f in unindexed_fields) == false
setfield!(S, f, deepcopy(getfield(TD, f)))
end
end
return S
end
function convert(::Type{SeisChannel}, D::EventChannel)
C = SeisChannel()
for f in datafields
setfield!(C, f, deepcopy(getfield(D, f)))
end
return C
end
function convert(::Type{EventChannel}, C::T) where T <: GphysChannel
(T == EventChannel) && (return deepcopy(C))
if T != SeisChannel
S = convert(SeisChannel, S)
end
D = EventChannel()
for f in datafields
setfield!(D, f, deepcopy(getfield(C, f)))
end
return D
end
EventChannel(C::T) where T <: GphysChannel = convert(EventChannel, C)
function unsafe_convert(::Type{SeisData}, TD::EventTraceData)
S = SeisData(getfield(TD, :n))
for f in datafields
if (f in unindexed_fields) == false
setfield!(S, f, getfield(TD, f))
end
end
return S
end
function unsafe_convert(::Type{EventTraceData}, S::SeisData)
TD = EventTraceData(getfield(S, :n))
for f in datafields
if (f in unindexed_fields) == false
setfield!(TD, f, getfield(S, f))
end
end
return TD
end
push!(TD::EventTraceData, C::SeisChannel) = push!(TD, convert(EventChannel, C))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 818 | """
distaz!(Ev::SeisEvent)
Compute Δ, Θ by the Haversine formula. Compute Δ, Θ by the Haversine formula.
Updates `Ev.data` with distance, azimuth, and backazimuth for each channel,
written to Ev.data.dist, Ev.data.az, and Ev.data.baz, respectively.
"""
function distaz!(S::SeisEvent)
TD = getfield(S, :data)
ChanLoc = getfield(TD, :loc)
SrcLoc = getfield(getfield(S, :hdr), :loc)
N = getfield(TD, :n)
rec = Array{Float64, 2}(undef, N, 2)
for i = 1:N
loc = getindex(ChanLoc, i)
if typeof(loc) == GeoLoc
rec[i,:] = [loc.lat loc.lon]
else
error(string(":loc for channel ", i, " is not a GeoLoc!"))
end
end
D = gcdist(SrcLoc.lat, SrcLoc.lon, rec)
@assert size(D,1) == N
TD.dist = D[:,1]
TD.az = D[:,2]
TD.baz = D[:,3]
note!(TD, "distaz!")
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2119 | gc_ctr(lat::Array{Float64,1}, lon::Array{Float64,1}) = (atan.(tan.(deg2rad.(lat)).*0.9933056), deg2rad.(lon))
gc_unwrap!(t::Array{Float64,1}) = (t[t .< 0] .+= (2.0*Float64(π)); return t)
@doc """
G = gcdist(src, rec)
Compute great circle distance, azimuth, and backazimuth from single source `s`
with coordinates `[s_lat, s_lon]` to receivers `r` with coordinates `[r_lat r_lon].`
For a single source, pass `src` as a Float64 vector of the form `[s_lat, s_lon]`;
gcdist will return an Array{Float64,2} of the form
[Δ₁ θ₁ β₁
Δ₂ θ₂ β₂
⋮ ⋮ ⋮
Δn θn βn]
for receivers `1:n`.
For multiple sources, pass `src` as an Array{Float64,2} with each row
containing one (lat, lon) pair. This returns a three-dimensional matrix where
each two-dimensional slice takes the form
[Δᵢ₁ θᵢ₁ βᵢ₁
⋮ ⋮ ⋮
Δᵢn θᵢn βᵢn]
for source `i` at receivers `1:n`.
""" gcdist
function gcdist(src::Array{Float64,1}, rec::Array{Float64,2})
N = size(rec, 1)
lat_src = repeat([src[1]], N)
lon_src = repeat([src[2]], N)
lat_rec = rec[:,1]
lon_rec = rec[:,2]
ϕ1, λ1 = gc_ctr(lat_src, lon_src)
ϕ2, λ2 = gc_ctr(lat_rec, lon_rec)
Δϕ = ϕ2 - ϕ1
Δλ = λ2 - λ1
a = sin.(Δϕ/2.0) .* sin.(Δϕ/2.0) + cos.(ϕ1) .* cos.(ϕ2) .* sin.(Δλ/2.0) .* sin.(Δλ/2.0)
Δ = 2.0 .* atan.(sqrt.(a), sqrt.(1.0 .- a))
A = atan.(sin.(Δλ).*cos.(ϕ2), cos.(ϕ1).*sin.(ϕ2) - sin.(ϕ1).*cos.(ϕ2).*cos.(Δλ))
B = atan.(-1.0.*sin.(Δλ).*cos.(ϕ1), cos.(ϕ2).*sin.(ϕ1) - sin.(ϕ2).*cos.(ϕ1).*cos.(Δλ))
# convert to degrees
return rad2deg.(hcat(Δ, gc_unwrap!(A), gc_unwrap!(B)))
end
gcdist(lat0::Float64, lon0::Float64, rec::Array{Float64,2}) = gcdist([lat0, lon0], rec)
gcdist(lat0::Float64, lon0::Float64, lat1::Float64, lon1::Float64) = gcdist([lat0, lon0], [lat1 lon1])
function gcdist(src::Array{Float64,2}, rec::Array{Float64,2})
N_src = size(src,1)
N_rec = size(rec,1)
G = Array{Float64,3}(undef, N_rec, 3, N_src)
for i = 1:N_src
G[:,:,i] = gcdist(src[i,:], rec)
end
return G
end
gcdist(src::Array{Float64,2}, rec::Array{Float64,1}) = gcdist(src, [rec[1] rec[2]])
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 238 | function fwrite_note_quake!(C::Union{SeisHdr, SeisSrc}, method::String, fname::String, opts::String)
wstr = string(timestamp(), " ¦ write ¦ ", method, "(H", opts, ") ¦ wrote to file ", fname)
push!(C.notes, wstr)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 346 | note!(R::SeisSrc, s::String) = push!(R.notes, tnote(s))
note!(H::SeisHdr, s::String) = push!(H.notes, tnote(s))
clear_notes!(U::Union{SeisHdr,SeisSrc}) = (U.notes = Array{String,1}(undef,1); U.notes[1] = tnote("notes cleared."); return nothing)
clear_notes!(Ev::SeisEvent) = (clear_notes!(Ev.hdr); clear_notes!(Ev.source); clear_notes!(Ev.data))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8139 | export FDSNevq, FDSNevt
"""
(H,R) = FDSNevq(ot)
Multi-server query for the events with the closest origin time to `ot`.
Returns an Array{SeisHdr,1} in H with event headers and an Array{SeisSrc,1}
in R in H with corresponding source process info.
### Keywords
| KW | Default | T [^1] | Meaning |
|----------|:----------- |:----------|:-------------------------------|
| evw | [600., 600.] | Float64 | search window in seconds [^2] |
| mag | [6.0, 9.9] | Float64 | search magitude range |
| nev | 0 | Integer | events per query [^3] |
| rad | [] | Float64 | radius search |
| reg | [] | Float64 | geographic search region |
| src [^4] | "IRIS" | String | data source; `?seis_www` lists |
| to | 30 | Int64 | timeout (s) for web requests |
| v | 0 | Integer | verbosity |
[^1]: `Array{T, 1}` for `evw`, `mag`, `rad`, `reg`; `T` for others
[^2]: search range is always `ot-|evw[1]| ≤ t ≤ ot+|evw[2]|`
[^3]: if `nev=0`, all matches are returned.
[^4]: In an event query, keyword `src` can be a comma-delineated list, like `"IRIS, INGV, NCEDC"`.
See also: `SeisIO.KW`, `?seis_www`
"""
function FDSNevq(ot::String;
evw::Array{Float64,1} = [600.0, 600.0],
mag::Array{Float64,1} = [6.0, 9.9],
nev::Integer = 0,
rad::Array{Float64,1} = KW.rad,
reg::Array{Float64,1} = KW.reg,
src::String = KW.src,
to::Int = KW.to,
v::Integer = KW.v)
if isempty(reg) && !isempty(rad)
if length(rad) == 4
append!(rad, [-30.0, 700.0])
end
search_coords = string( "&latitude=", rad[1], "&longitude=", rad[2],
"&minradius=", rad[3], "&maxradius=", rad[4],
"&mindepth=", rad[5], "&maxdepth=", rad[6] )
else
if isempty(reg)
reg = Float64[-90.0, 90.0, -180.0, 180.0, -30.0, 700.0]
elseif length(reg) == 4
append!(reg, [-30.0, 700.0])
end
search_coords = string("&minlat=", reg[1], "&maxlat=", reg[2],
"&minlon=", reg[3], "&maxlon=", reg[4],
"&mindepth=", reg[5], "&maxdepth=", reg[6])
end
# Determine time window
ot2::Float64 = try
d2u(DateTime(ot))
catch
if length(ot) <= 14
ot0 = string(ot[1:4],"-",ot[5:6],"-",ot[7:8],"T",ot[9:10],":",ot[11:12])
if length(ot) > 12
ot1 = string(ot0, ":", ot[13:14])
else
ot1 = string(ot0, ":00")
end
end
d2u(DateTime(ot1))
end
d0 = string(u2d(ot2 - abs(evw[1])))
d1 = string(u2d(ot2 + abs(evw[2])))
oti = round(Int64, ot2*sμ)
# multi-server query (most FDSN servers do NOT have an event service)
sources = String.(strip.(split(lowercase(src) == "all" ? "EMSC, INGV, IRIS, LMU, NCEDC, NIEP, ORFEUS, SCEDC, USGS, USP" : src, ",")))
sources = [strip(i) for i in sources]
EvCat = Array{SeisHdr,1}(undef, 0)
EvSrc = Array{SeisSrc,1}(undef, 0)
origin_times = Array{Int64,1}(undef, 0)
for k in sources
v > 1 && println(stdout, "Querying ", k)
url = string(fdsn_uhead(String(k)), "event/1/query?",
"starttime=", d0, "&endtime=", d1,
search_coords,
"&minmag=", mag[1], "&maxmag=", mag[2],
"&format=xml")
v > 0 && println(stdout, "URL = ", url)
req_info_str = "\nFDSN event query:"
(R, parsable) = get_http_req(url, req_info_str, to)
if parsable
str_req = String(R)
v > 1 && println(stdout, "REQUEST BODY:\n", str_req)
xdoc = parse_string(str_req)
event_xml!(EvCat, EvSrc, xdoc)
v > 1 && println(stdout, "CATALOG:\n", EvCat)
end
end
if nev > 0
# Sort based on earliest origin time
# sort!(EvCat, by = H -> abs(round(Int64, d2u(getfield(H, :ot))*sμ) - oti))
for H in EvCat
# push!(origin_times, round(Int64, d2u(getfield(H, :ot))*sμ))
push!(origin_times, abs(round(Int64, d2u(getfield(H, :ot))*sμ) - oti))
end
# k = sortperm(abs.(origin_times.-oti))
k = sortperm(origin_times)
n0 = min(length(EvCat), nev)
n0 < nev && @warn(string("Catalog only contains ", n0, " events (original request was ", nev,")"))
return EvCat[k[1:n0]], EvSrc[k[1:n0]]
# return EvCat[1:n0], EvSrc[1:n0]
else
return EvCat, EvSrc
end
end
"""
FDSNevt(ot::String, chans::String)
Get header and trace data for the event closest to origin time `ot` on channels
`chans`. Returns a SeisEvent structure.
### Keywords
| KW | Default | T [^1] | Meaning |
|----------|:----------- |:----------|:-------------------------------|
| evw | [600., 600.] | Float64 | search window in seconds [^2] |
| fmt | "miniseed" | String | request data format |
| len | 120.0 | Float64 | desired trace length [s] |
| mag | [6.0, 9.9] | Float64 | search magitude range |
| model | "iasp91" | String | velocity model for phases |
| nd | 1 | Real | number of days per subrequest |
| opts | "" | String | user-specified options[^3] |
| pha | "P" | String | phases to get [^4] |
| rad | [] | Float64 | radius search |
| reg | [] | Float64 | geographic search region |
| src | "IRIS" | String | data source; `?seis_www` lists |
| to | 30 | Int64 | timeout (s) for web requests |
| v | 0 | Integer | verbosity |
| w | false | Bool | write requests to disk? |
[^1]: KW is `Array{T, 1}` for `evw`, `mag`, `rad`, `reg`, type `T` for others
[^2]: Search range is always `ot-|evw[1]| ≤ t ≤ ot+|evw[2]|`
[^3]: Format like an http request string, e.g. "szsrecs=true&repo=realtime" for FDSN. String shouldn't begin with an ampersand.
[^4]: Comma-separated String, like `"P, pP"`; use `"ttall"` for all phases
See also: `distaz!`, `FDSNevq`, `FDSNsta`
"""
function FDSNevt(ot::String, chans::ChanOpts;
evw::Array{Float64,1} = [600.0, 600.0],
fmt::String = KW.fmt,
len::Real = 120.0,
mag::Array{Float64,1} = [6.0, 9.9],
model::String = "iasp91",
nd::Real = KW.nd,
opts::String = KW.opts,
pha::String = "P",
rad::Array{Float64,1} = KW.rad,
reg::Array{Float64,1} = KW.reg,
src::String = KW.src,
to::Int64 = KW.to,
v::Integer = KW.v,
w::Bool = KW.w
)
C = fdsn_chp(chans, v)
# Create header
v > 0 && println(stdout, now(), ": event query begins.")
(H,R) = FDSNevq(ot, nev=1,
rad=rad,
reg=reg,
mag=mag,
src=src,
to=to,
evw=evw,
v=v
)
H = H[1]
R = R[1]
# Create channel data
v > 0 && println(stdout, now(), ": data query begins.")
s = H.ot # Start time for FDSNsta is event origin time
t = u2d(d2u(s) + 60*len) # End time is len minutes later
(d0, d1) = parsetimewin(s,t)
S = SeisData()
# FDSNget!(S, C, fmt=fmt, nd=nd, rad=rad, reg=reg, s=d0, si=true, src=src, t=d1, to=to, v=v, w=w)
FDSNget!(S, C, d0, d1, false, fmt, false, nd, "", rad, reg, true, src, to, v, w, "FDSNsta.xml", false)
v > 1 && println(stdout, now(), ": channels initialized.")
v > 2 && println(stdout, S)
# Initialize SeisEvent structure
Ev = SeisEvent(hdr = H, source = R, data = S)
v > 1 && println(stdout, now(), ": SeisEvent created.")
v > 1 && println(stdout, S)
# Update Ev with distance, azimuth
distaz!(Ev)
v > 1 && println(stdout, now(), ": Δ,Θ updated.")
# Add phase arrivals to :data
v > 0 && println(stdout, now(), ": phase query begins.")
get_pha!(Ev, pha=pha, model=model, to=to, v=v)
return Ev
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5741 | export get_pha!
"""
get_pha!(Ev::SeisEvent[, keywords])
Command-line interface to IRIS online travel time calculator, which calls TauP [1-2]. Returns a matrix of strings.
Keywords:
* pha: comma-separated String of phases ("P, S, SP")
* model: velocity model ("iasp91")
* to: timeout in seconds
* v: verbosity
### References
[1] TauP manual: http://www.seis.sc.edu/downloads/TauP/taup.pdf
[2] Crotwell, H. P., Owens, T. J., & Ritsema, J. (1999). The TauP Toolkit:
Flexible seismic travel-time and ray-path utilities, SRL 70(2), 154-160.
"""
function get_pha!(Ev::SeisEvent;
pha::String = KW.pha,
model::String = "iasp91",
to::Int64 = KW.to,
v::Integer = KW.v
)
# Check that distaz has been done
TD = getfield(Ev, :data)
N = getfield(TD, :n)
z = zeros(Float64, N)
if (TD.az == z) && (TD.baz == z) && (TD.dist == z)
v > 0 && println(stdout, "az, baz, and dist are unset; calling distaz!...")
distaz!(Ev)
end
# Generate URL and do web query
src_dep = getfield(getfield(getfield(Ev, :hdr), :loc), :dep)
if isempty(pha) || pha == "all"
pq = "&phases=ttall"
else
pq = string("&phases=", pha)
end
url_tail = string("&evdepth=", src_dep, pq, "&model=", model, "&mintimeonly=true&noheader=true")
# Loop begins
dist = getfield(TD, :dist)
PC = getfield(TD, :pha)
for i = 1:N
Δ = getindex(dist, i)
pcat = getindex(PC, i)
url = string("http://service.iris.edu/irisws/traveltime/1/query?", "distdeg=", Δ, url_tail)
v > 1 && println(stdout, "url = ", url)
req_info_str = string("\nIRIS travel time request:\nΔ = ", Δ, "\nDepth = ", z, "\nPhases = ", pq, "\nmodel = ", model)
(R, parsable) = get_http_req(url, req_info_str, to)
# Parse results
if parsable
req = String(take!(copy(IOBuffer(R))))
pdat = split(req, '\n')
deleteat!(pdat, findall(isempty, pdat)) # can have trailing blank line
npha = length(pdat)
for j = 1:npha
pha = split(pdat[j], keepempty=false)
pcat[pha[10]] = SeisPha(0.0, # a
parse(Float64, pha[8]), # d
parse(Float64, pha[7]), # ia
0.0, # res
parse(Float64, pha[5]), # rp
parse(Float64, pha[6]), # ta
parse(Float64, pha[4]), # tt
0.0, # unc
' ', # pol
' ', # qual
)
end
end
end
return nothing
end
# "Model: iasp91"
# "Distance Depth Phase Travel Ray Param Takeoff Incident Purist Purist "
# " (deg) (km) Name Time (s) p (s/deg) (deg) (deg) Distance Name "
# "--------------------------------------------------------------------------------------------"
# " 66.83 19.7 P 650.37 6.375 19.49 19.42 66.83 = P"
# " 66.83 19.7 pP 656.77 6.385 160.48 19.45 66.83 = pP"
# " 66.83 19.7 sP 659.32 6.383 168.84 19.45 66.83 = sP"
# " 66.83 19.7 PcP 678.82 4.167 12.59 12.55 66.83 = PcP"
# " 66.83 19.7 PP 797.49 8.699 27.08 26.98 66.83 = PP"
# " 66.83 19.7 PKiKP 1038.70 1.352 4.06 4.04 66.83 = PKiKP"
# " 66.83 19.7 pPKiKP 1045.47 1.352 175.94 4.04 66.83 = pPKiKP"
# " 66.83 19.7 sPKiKP 1047.95 1.352 177.65 4.04 66.83 = sPKiKP"
# " 66.83 19.7 S 1182.56 12.085 21.49 21.42 66.83 = S"
# " 66.83 19.7 pS 1190.65 12.109 140.68 21.46 66.83 = pS"
# " 66.83 19.7 sS 1193.48 12.101 158.48 21.45 66.83 = sS"
# " 66.83 19.7 SP 1201.90 13.645 24.43 45.38 66.83 = SP"
# " 66.83 19.7 PS 1204.86 13.644 45.55 24.35 66.83 = PS"
# " 66.83 19.7 SKS 1246.03 7.572 13.27 13.23 66.83 = SKS"
# " 66.83 19.7 SKKS 1246.05 7.585 13.29 13.25 66.83 = SKKS"
# " 66.83 19.7 ScS 1246.40 7.761 13.61 13.56 66.83 = ScS"
# " 66.83 19.7 SKiKP 1250.70 1.408 2.45 4.21 66.83 = SKiKP"
# " 66.83 19.7 pSKS 1254.86 7.573 156.66 13.23 66.83 = pSKS"
# " 66.83 19.7 sSKS 1257.44 7.573 166.73 13.23 66.83 = sSKS"
# " 66.83 19.7 SS 1441.81 15.478 27.98 27.88 66.83 = SS"
# " 66.83 19.7 PKIKKIKP 1860.33 1.411 4.23 4.22 293.17 = PKIKKIKP"
# " 66.83 19.7 SKIKKIKP 2072.34 1.352 2.35 4.05 293.17 = SKIKKIKP"
# " 66.83 19.7 PKIKKIKS 2074.81 1.352 4.06 2.34 293.17 = PKIKKIKS"
# " 66.83 19.7 SKIKKIKS 2286.62 1.297 2.25 2.25 293.17 = SKIKKIKS"
# " 66.83 19.7 PKIKPPKIKP 2359.03 1.666 5.00 4.98 293.17 = PKIKPPKIKP"
# " 66.83 19.7 PKPPKP 2361.68 2.976 8.96 8.93 293.17 = PKPPKP"
# " 66.83 19.7 PKPPKP 2364.24 3.928 11.86 11.82 293.17 = PKPPKP"
# " 66.83 19.7 SKIKSSKIKS 3216.94 1.426 2.48 2.47 293.17 = SKIKSSKIKS"
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4532 | const h_crit = Float32(1/sqrt(2))
const fc_vals = Tuple(vcat(Float64[1/120, 1/60], # STS-2, CMG-3T
repeat([1/30], 2), # CMG-40
[0.2], # Lennartz LE-3D
repeat([1.0], 5), # ...everything...
repeat([2.0], 2), # passive geophones
[4.5])) # passive industry geophones
const fs_vals = (0.1, 1.0, 2.0, 5.0, 10.0, 20.0, 25.0, 40.0, 50.0, 60.0, 62.5, 80.0, 100.0)
const irregular_units = ("%",
"%{cloud_cover}",
"{direction_vector}",
"Cel",
"K",
"{none}",
"Pa",
"T",
"V",
"W",
"m",
"m/m",
"m/s",
"m/s2",
"m3/m3",
"rad",
"rad/s",
"rad/s2",
"t{SO_2}")
# Acceptable type codes in :misc
const OK = ( 0x00, 0x01,
0x10, 0x11, 0x12, 0x13, 0x14,
0x20, 0x21, 0x22, 0x23, 0x24,
0x30, 0x31, 0x32,
0x50, 0x51, 0x52, 0x53, 0x54,
0x60, 0x61, 0x62, 0x63, 0x64,
0x70, 0x71, 0x72,
0x80, 0x81,
0x90, 0x91, 0x92, 0x93, 0x94,
0xa0, 0xa1, 0xa2, 0xa3, 0xa4,
0xb0, 0xb1, 0xb2,
0xd0, 0xd1, 0xd2, 0xd3, 0xd4,
0xe0, 0xe1, 0xe2, 0xe3, 0xe4,
0xf0, 0xf1, 0xf2 )
const evtypes = ( "not_existing",
"not_reported",
"anthropogenic_event",
"collapse",
"cavity_collapse",
"mine_collapse",
"building_collapse",
"explosion",
"accidental_explosion",
"chemical_explosion",
"controlled_explosion",
"experimental_explosion",
"industrial_explosion",
"mining_explosion",
"quarry_blast",
"road_cut",
"blasting_levee",
"nuclear_explosion",
"induced_or_triggered_event",
"rock_burst",
"reservoir_loading",
"fluid_injection",
"fluid_extraction",
"crash",
"plane_crash",
"train_crash",
"boat_crash",
"other_event",
"atmospheric_event",
"sonic_boom",
"sonic_blast",
"acoustic_noise",
"thunder",
"avalanche",
"snow_avalanche",
"debris_avalanche",
"hydroacoustic_event",
"ice_quake",
"slide",
"landslide",
"rockslide",
"meteorite",
"volcanic_eruption" )
const phase_list = ("P",
"PKIKKIKP",
"PKIKKIKS",
"PKIKPPKIKP",
"PKPPKP",
"PKiKP",
"PP",
"PS",
"PcP",
"S",
"SKIKKIKP",
"SKIKKIKS",
"SKIKSSKIKS",
"SKKS",
"SKS",
"SKiKP",
"SP",
"SS",
"ScS",
"pP",
"pPKiKP",
"pS",
"pSKS",
"sP",
"sPKiKP",
"sS",
"sSKS")
const pol_list = ('U', 'D', '-', '+', '_', ' ')
const loc_types = ("HYPOCENTER", "CENTROID", "AMPLITUDE", "MACROSEISMIC", "RUPTURE_START", "RUPTURE_END")
const loc_methods = ("HYPOELLIPSE", "HypoDD", "Velest", "centroid")
const hln = ('H','L','N')
const iclist = ('A','B','D','F','G','I','J','K','M','O','P','Q','R','S','T','U','V','W','Z')
const oid = ('O','I','D')
const zne = ('Z','N','E')
const nvc = ('A','B','C','1','2','3','U','V','W')
const oidfhu = ('O','I','D','F','H','U')
const icfo = ('I','C','F','O')
const junits = ("rad", "rad/s", "rad/s2")
const geodetic_datum = ("ETRS89", "GRS 80", "JGD2011")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4531 | const h_crit = Float32(1/sqrt(2))
const fc_vals = vcat(Float64[1/120, 1/60], # STS-2, CMG-3T
repeat([1/30], 2), # CMG-40
[0.2], # Lennartz LE-3D
repeat([1.0], 5), # ...everything...
repeat([2.0], 2), # passive geophones
[4.5]) # passive industry geophones
const fs_vals = [0.1, 1.0, 2.0, 5.0, 10.0, 20.0, 25.0, 40.0, 50.0, 60.0, 62.5, 80.0, 100.0]
const irregular_units = ["%",
"%{cloud_cover}",
"{direction_vector}",
"Cel",
"K",
"{none}",
"Pa",
"T",
"V",
"W",
"m",
"m/m",
"m/s",
"m/s2",
"m3/m3",
"rad",
"rad/s",
"rad/s2",
"t{SO_2}"]
# Acceptable type codes in :misc
const OK = UInt8[0x00, 0x01,
0x10, 0x11, 0x12, 0x13, 0x14,
0x20, 0x21, 0x22, 0x23, 0x24,
0x30, 0x31, 0x32,
0x50, 0x51, 0x52, 0x53, 0x54,
0x60, 0x61, 0x62, 0x63, 0x64,
0x70, 0x71, 0x72,
0x80, 0x81,
0x90, 0x91, 0x92, 0x93, 0x94,
0xa0, 0xa1, 0xa2, 0xa3, 0xa4,
0xb0, 0xb1, 0xb2,
0xd0, 0xd1, 0xd2, 0xd3, 0xd4,
0xe0, 0xe1, 0xe2, 0xe3, 0xe4,
0xf0, 0xf1, 0xf2 ]
const evtypes = [ "not_existing",
"not_reported",
"anthropogenic_event",
"collapse",
"cavity_collapse",
"mine_collapse",
"building_collapse",
"explosion",
"accidental_explosion",
"chemical_explosion",
"controlled_explosion",
"experimental_explosion",
"industrial_explosion",
"mining_explosion",
"quarry_blast",
"road_cut",
"blasting_levee",
"nuclear_explosion",
"induced_or_triggered_event",
"rock_burst",
"reservoir_loading",
"fluid_injection",
"fluid_extraction",
"crash",
"plane_crash",
"train_crash",
"boat_crash",
"other_event",
"atmospheric_event",
"sonic_boom",
"sonic_blast",
"acoustic_noise",
"thunder",
"avalanche",
"snow_avalanche",
"debris_avalanche",
"hydroacoustic_event",
"ice_quake",
"slide",
"landslide",
"rockslide",
"meteorite",
"volcanic_eruption" ]
const phase_list = ["P",
"PKIKKIKP",
"PKIKKIKS",
"PKIKPPKIKP",
"PKPPKP",
"PKiKP",
"PP",
"PS",
"PcP",
"S",
"SKIKKIKP",
"SKIKKIKS",
"SKIKSSKIKS",
"SKKS",
"SKS",
"SKiKP",
"SP",
"SS",
"ScS",
"pP",
"pPKiKP",
"pS",
"pSKS",
"sP",
"sPKiKP",
"sS",
"sSKS"]
const pol_list = ['U', 'D', '-', '+', '_', ' ']
const loc_types = ["HYPOCENTER", "CENTROID", "AMPLITUDE", "MACROSEISMIC", "RUPTURE_START", "RUPTURE_END"]
const loc_methods = ["HYPOELLIPSE", "HypoDD", "Velest", "centroid"]
const hln = ['H','L','N']
const iclist = ['A','B','D','F','G','I','J','K','M','O','P','Q','R','S','T','U','V','W','Z']
const oid = ['O','I','D']
const zne = ['Z','N','E']
const nvc = ['A','B','C','1','2','3','U','V','W']
const oidfhu = ['O','I','D','F','H','U']
const icfo = ['I','C','F','O']
const junits = ["rad", "rad/s", "rad/s2"]
const geodetic_datum = ["ETRS89", "GRS 80", "JGD2011"]
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.