licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 105 | export randSeisChannel,
randSeisData,
randSeisEvent,
randSeisHdr,
randPhaseCat,
randSeisSrc
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1677 | rand_seis_unit() = rand() > 0.5 ? "m" : "m/s"
rand_seis_cc() = rand(rand() > 0.2 ? zne : nvc)
const cDict = Dict{Char, Function}(
'H' => i->rand_seis_cc(),
'L' => i->rand_seis_cc(),
'N' => i->rand_seis_cc(),
'A' => i->rand() > 0.5 ? 'N' : 'E',
'B' => i->'_',
'D' => i->rand(oidfhu),
'F' => i->rand(zne),
'G' => i->rand(nvc),
'I' => i->rand(oid),
'J' => i->rand(nvc),
'K' => i->rand(oid),
'M' => i->rand(nvc),
'O' => i->'_',
'P' => i->rand(zne),
'Q' => i->'_',
'R' => i->'_',
'S' => i->rand(zne),
'T' => i->'Z',
'U' => i->'_',
'V' => i->'_',
'W' => i->rand() > 0.5 ? 'S' : 'D',
'Z' => i->rand(icfo),
)
const uDict = Dict{Char, Function}(
'H' => i->rand_seis_unit(),
'N' => i->"m/s2",
'L' => i->rand_seis_unit(),
'A' => i->"rad",
'B' => i->"m",
'D' => i->"Pa",
'F' => i->"T",
'G' => i->"m/s2",
'I' => i->"%",
'J' => i->rand(junits),
'K' => i->rand() > 0.5 ? "Cel" : "K",
'M' => i->"m",
'O' => i->"m/s",
'P' => i->rand_seis_unit(),
'Q' => i->"V",
'R' => i->rand_seis_unit(),
'S' => i->"m/m",
'T' => i->"m",
'U' => i->"%{cloud_cover}",
'V' => i->"m3/m3",
'W' => i->i=='S' ? "m/s" : "{direction_vector}",
'Z' => i->rand_seis_unit(),
)
"""
(cha, u) = iccodes_and_units(b::Char, s::Bool)
Using band code `b`, generate quasi-sane random instrument code `i` and channel code `c`, returning channel string `cha` = `b`*`i`*`c` and unit string `u`. If `s=true`, use only seismic intrument codes.
"""
function iccodes_and_units(b::Char, s::Bool)
i = rand(s ? hln : iclist)
c = cDict[i](b)
u = uDict[i](c)
return string(b, i, c), u
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 112 | import Dates: now
import SeisIO: InstrumentPosition,
code2typ,
fctoresp,
getbandcode,
note!,
sμ,
μs
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5733 | rand_net() = uppercase(randstring(2))
rand_sta() = randstring('A':'Z', rand(3:5))
rand_loc() = rand() < 0.5 ? "" : lpad(rand(0:9), 2, "0")
rand_tmax() = rand(1200:7200)
rand_ts() = round(Int64, sμ*(time() - 86400.0 + randn()))
rand_irr_id() = string(rand_net(), ".", rand_sta(), ".", rand_loc(), ".OY", rand('A':'Z'))
rand_hc() = (rand() > 0.8) ? h_crit : 1.0f0
function mk_fc(fs::Float64)
f_min = 0.5*fs
fc = rand(fc_vals)
while f_min ≤ fc
fc = rand(fc_vals)
end
return fc
end
function rand_reg_id(fs::Float64, s::Bool)
bc = getbandcode(fs)
cha, units = iccodes_and_units(bc, s)
# faster and less memory than join(), but looks clumsy
id = string(rand_net(), ".", randstring('A':'Z', rand(3:5)), ".", rand_loc(), ".", cha)
return id, units
end
function rand_loc(randomize_loctype::Bool)
y = randomize_loctype == true ? rand() : 1.0
datum = rand_datum()
loc = [ rand_lat(),
rand_lon(),
1000.0 * rand(),
1000.0 * rand(),
rand_lat(),
90.0 * rand() ]
if y > 0.25
return GeoLoc(datum, loc...)
else
g = GenLoc(loc)
g.datum = datum
return g
end
end
function rand_resp(fc::Float64, n::Int64)
if n > 0
T = Float64
i = max(1, div(n,2))
else
T = rand() < 0.5 ? Float32 : Float64
i = rand(2:2:8)
end
pstub = T(10.0).*rand(T, i)
z = zeros(complex(T), 2*i)
p = vcat(pstub .+ pstub.*im, pstub .- pstub*im)
if T == Float32
resp = PZResp(1.0f0, Float32(fc), p, z)
else
resp = PZResp64(1.0, fc, p, z)
end
return resp
end
# function rand_t(fs::Float64, nx::Int64)
function rand_t(fs::Float64, nx::Int64, n::Int64, gs::Int64)
ts = rand_ts()
ngaps = n < 0 ? rand(0:9) : n
L = ngaps + 2
Δ = sμ/fs
δ = 0.5*Δ + 1.0
t = zeros(Int64, L, 2)
# first row is always start time; no gap in last row for now
t[1,1] = 1
t[1,2] = ts
t[L,1] = nx
# rest are random-length time gaps
if ngaps > 0
r = ((gs > 1) && ((nx-1)/gs > ngaps)) ? range(gs, step=gs, stop=nx-1) : range(2, step=1, stop=nx-1)
gi = rand(r, ngaps)
sort!(gi)
unique!(gi)
lg = length(gi)
while lg < ngaps
append!(gi, rand(r))
sort!(gi)
unique!(gi)
lg = length(gi)
end
# Generate Gaussian-distributed gap lengths
gl = ceil.(Int64, max.(δ, Δ .* min.(1.0e5, 10.0 .^ abs.(randn(ngaps)))))
# .* ((-1).^rand(Bool, ngaps))
t[2:ngaps+1,1] .= gi
t[2:ngaps+1,2] .= gl
end
return t
end
function pop_chan_tail!(C::GphysChannel, c::Bool, nx::Int64)
src_str = string("randSeisChannel(c=", c, ", nx=", nx, ")")
setfield!(C, :name, randstring(rand(12:64))) # :name
setfield!(C, :gain, rand()*10^rand(0:10)) # :gain
setfield!(C, :loc, rand_loc(true)) # :loc
setfield!(C, :src, src_str) # :src
setfield!(C, :misc, rand_misc(rand(4:24))) # :misc
note!(C, "+source ¦ " * src_str)
return nothing
end
# Populate a channel with irregularly-sampled (campaign-style) data
function populate_irr!(C::SeisChannel, nx::Int64)
# number of samples
L = nx < 1 ? 2^rand(6:12) : nx
# start time
ts = rand_ts()
# generate time matrix that always starts at ts with no duplicate sample times
t_max = round(Int64, sμ*rand_tmax())
ti = rand(1:t_max, L)
ti[1] = zero(Int64)
unique!(ti)
sort!(ti)
broadcast!(+, ti, ti, ts)
L = length(ti)
t = zeros(Int64, L, 2)
t[:,1] .= 1:L
t[:,2] .= ti
# eh, let's use a uniform distribution for irregularly-sampled time series
x = (rand() > 0.5) ? rand(Float64, L) .- 0.5 : rand(Float64, L)
# fill fields of C
C.id = rand_irr_id()
C.fs = zero(Float64)
C.units = rand(irregular_units)
C.t = t
C.x = x
pop_chan_tail!(C, true, nx)
return nothing
end
# Populate a channel with regularly-sampled (time-series) data
function populate_chan!(C::SeisChannel, s::Bool, nx::Int64, fs_min::Float64, f0::Float64)
# determine length, fs, fc
fs = rand(fs_vals)
while fs < fs_min
fs = rand(fs_vals)
end
Lx = nx < 1 ? ceil(Int64, fs*rand_tmax()) : nx
gs = (fs_min ≤ 0.0) ? round(Int64, 2*fs_min) : 1
fc = mk_fc(fs)
resp = (f0 > 0.0) ? fctoresp(Float32(f0), rand_hc()) : rand_resp(fc, 0)
# populate channel
C.id, C.units = rand_reg_id(fs, s)
C.fs = fs # fs
C.resp = resp # resp
C.t = rand_t(fs, Lx, -1, gs) # t
C.x = randn(rand() < 0.5 ? Float32 : Float64, Lx) # x
pop_chan_tail!(C, false, nx)
return nothing
end
"""
randSeisChannel()
Generate a random channel of geophysical time-series data as a SeisChannel.
### Keywords
| KW | Default | Type | Meaning |
|-------- |:--------|:----------|:------------------------------- |
| s | false | Bool | force channel to have seismic data? |
| c | false | Bool | force channel to have irregular data? |
| nx | 0 | Int64 | number of samples in channel [^1] |
| fs_min | 0.0 | Float64 | channels will have fs ≥ fs_min |
| fc | 0.0 | Float64 | rolloff frequency [^2] |
[^1]: if `nx ≤ 0`, the number of samples is determined randomly
[^2]: specifing `fc` with `c=false` returns a geophone instrument response
See also: `randSeisData`, `fctoresp`
"""
function randSeisChannel(; c::Bool=false, s::Bool=false, nx::Int64=0, fs_min::Float64=0.0, fc::Float64=0.0)
Ch = SeisChannel()
if c == true
populate_irr!(Ch, nx)
else
populate_chan!(Ch, s, nx, fs_min, fc)
end
return Ch
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2764 | # Populate a SeisData structure with random channels
"""
randSeisData()
Generate 8 to 24 channels of random seismic data as a SeisData object.
randSeisData(N)
Generate `N` channels of random seismic data as a SeisData object.
### Keywords
| KW | Default | Type | Meaning |
|-------- |:--------|:----------|:------------------------------- |
| s | 0.6 | Float64 | chance each channel is seismic data |
| c | 0.2 | Float64 | chance each channel is irregular (fs=0) |
| nx | 0 | Int64 | number of samples in each channel |
| fs_min | 0.0 | Float64 | regular channels have fs ≥ fs_min |
| a0 | false | Bool | recalculate :resp constants? |
### Behavior
* If `nx` ≤ 0, the number of samples is determined randomly for each channel
* `100*s` is the minimum percentage of channels with guaranteed seismic data
* `100*c` is the maximum percentage of channels with irregularly-sampled data
* `s` takes precedence over `c`; they are not renormalized. Thus, for example:
+ `randSeisData(12, c=1.0, s=1.0)` works like `randSeisData(12, c=0.0, s=1.0)`
+ `randSeisData(c=1.0)` works like `randSeisData(s=0.0)`
* For `N` channels, `s*N` is rounded up; `c*N` is rounded down. Thus:
+ `randSeisData(10, c=0.28)` and `randSeisData(10, c=0.2)` are equivalent
+ `randSeisData(10, s=0.28)` and `randSeisData(10, s=0.3)` are equivalent
* By default, `:resp.a0 = 1.0` for all channels; `a0=true` calls `resp_a0!(S)`
+ `:resp.p` and `:resp.z` are random; `a0=true` isn't guaranteed to work
See also: `randSeisChannel`, `resp_a0!`
"""
function randSeisData(N::Int64; c::Float64=0.2, s::Float64=0.6, nx::Integer=0, fs_min::Float64=0.0, a0::Bool=false)
S = SeisData(N)
# Evaluate probabilities, with s taking precedence
if s != 0.6
c = max(0.0, 1.0-s)
end
if c != 0.2
s = max(0.0, 1.0-c)
end
# determine number of channels of each
n_seis = max(min(ceil(Int, s*S.n), S.n), 0)
n_irr = max(min(floor(Int, c*S.n), S.n-n_seis), 0)
# fill in and shuffle data_spec
data_spec = zeros(UInt8, S.n)
data_spec[1:n_seis] .= 0x01
data_spec[n_seis+1:n_seis+n_irr] .= 0x02
shuffle!(data_spec)
# populate all channels according to data_spec
for i = 1:S.n
if data_spec[i] == 0x01
S[i] = randSeisChannel(s=true, nx=nx, fs_min=fs_min)
elseif data_spec[i] == 0x02
S[i] = randSeisChannel(c=true, nx=nx, fs_min=fs_min)
else
S[i] = randSeisChannel(nx=nx, fs_min=fs_min)
end
end
a0 && resp_a0!(S)
return S
end
randSeisData(; c::Float64=0.2, s::Float64=0.6, nx::Int64=0, fs_min::Float64=0.0, a0::Bool=false) = randSeisData(rand(8:24), c=c, s=s, nx=nx, fs_min=fs_min, a0=a0)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4954 | """
randSeisSrc()
Generate a SeisSrc structure filled with random values.
"""
function randSeisSrc(; mw::Float32=0.0f0)
R = SeisSrc()
m0 = 1.0e-7*(10^(1.5*((mw == zero(Float32) ? 10^(rand(Float64)-1.0) : Float64(mw))+10.7)))
setfield!(R, :id, string(rand(1:2^12))) # :id
setfield!(R, :m0, m0) # :m0
setfield!(R, :gap, abs(rand_lon())) # :gap
setfield!(R, :misc, rand_misc(rand(4:24))) # :misc
setfield!(R, :mt, m0.*(rand(6).-0.5)) # :mt
setfield!(R, :dm, m0.*(rand(6).-0.5)) # :dm
setfield!(R, :npol, rand(Int64(6):Int64(120))) # :npol
[note!(R, randstring(rand(16:256))) for i = 1:rand(1:6)] # :notes
setfield!(R, :pax, vcat(rand_lon(2,3),
(m0*rand() < 0.5 ? -1 : 1) .* [rand() -1.0*rand() rand()])) # :pax
setfield!(R, :planes, rand_lon(3,2)) # :planes
setfield!(R, :src, R.id * ",randSeisSrc") # :src
note!(R, "+origin ¦ " * R.src)
setfield!(R, :st, SourceTime(randstring(2^rand(6:8)), rand(), rand(), rand()))
return R
end
"""
randSeisHdr()
Generate a SeisHdr structure filled with random values.
"""
function randSeisHdr()
H = SeisHdr()
# moment magnitude and m0 in SI units
mw = Float32(log10(10.0^rand(0.0:0.1:7.0)))
m0 = 1.0e-7*(10^(1.5*(Float64(mw)+10.7)))
# a random earthquake location
dmin = 10.0^(rand()+0.5)
dmax = dmin + exp(mw)*10.0^rand()
loc = EQLoc(
(rand(0.0:1.0:89.0) + rand())*-1.0^rand(1:2),
(rand(0.0:1.0:179.0)+rand())*-1.0^rand(1:2),
min(10.0*randexp(Float64), 660.0),
rand()*4.0,
rand()*4.0,
rand()*8.0,
rand(),
rand(),
rand(),
10.0*10.0^rand(),
dmin,
dmax,
4 + round(Int64, rand()*exp(mw)),
rand(0x00:0x10:0xf0),
rand_datum(),
rand(loc_types),
"",
rand(loc_methods))
# setfield!(loc, :sig, "1σ")
# event type
rtyp = rand()
typ = rtyp > 0.3 ? "earthquake" : rand(evtypes)
# Generate random header
setfield!(H, :id, string(rand(1:2^62))) # :id
setfield!(H, :int, (mw < 0.0f0 ? 0x00 : floor(UInt8, mw),
randstring(rand(2:4)))) # :int
setfield!(H, :loc, loc) # :loc
setfield!(H, :mag, EQMag( val = mw, # :mag
scale = (rand() < 0.5 ? "m" : "M") * randstring(2),
nst = rand(3:100),
gap = abs(rand_lon()),
src = randstring(24)
)
)
setfield!(H, :misc, rand_misc(rand(4:24))) # :misc
[note!(H, randstring(rand(16:256))) for i = 1:rand(1:6)] # :notes
setfield!(H, :ot, now()) # :ot
setfield!(H, :typ, typ) # :typ
# header :src
H.src = "randSeisHdr:" * H.id
note!(H, "+origin ¦ " * H.src)
return H
end
"""
randPhaseCat()
Generate a random seismic phase catalog suitable for testing EventChannel,
EventTraceData, and SeisEvent objects.
"""
function randPhaseCat(n::Int64)
npha = (n <= 0) ? rand(3:18) : n
phase = Array{String, 1}(undef, npha)
for j = 1:length(phase)
phase[j] = rand(phase_list)
end
unique!(phase)
P = PhaseCat()
for j in phase
P[j] = SeisPha(rand(), rand(), rand(), rand(), rand(), rand(), rand(), rand(), rand(pol_list), Char(rand(0x30:0x39)))
end
return P
end
randPhaseCat() = randPhaseCat(0)
"""
randSeisEvent([, c=0.2, s=0.6])
Generate a SeisEvent structure filled with random header and channel data.
* 100*c is the percentage of :data channels _after the first_ with irregularly-sampled data (fs = 0.0)
* 100*s is the percentage of :data channels _after the first_ with guaranteed seismic data.
See also: `randSeisChannel`, `randSeisData`, `randSeisHdr`, `randSeisSrc`
"""
function randSeisEvent(N::Int64; c::Float64=0.0, s::Float64=1.0, nx::Int64=0)
V = SeisEvent(hdr=randSeisHdr(),
source=randSeisSrc(),
data=convert(EventTraceData, randSeisData(N, c=c, s=s, nx=nx)))
V.source.eid = V.hdr.id
V.hdr.loc.src = V.hdr.id * "," * V.hdr.loc.src
for i = 1:V.data.n
setindex!(getfield(getfield(V, :data), :pha), randPhaseCat(), i)
setindex!(getfield(getfield(V, :data), :az), rand_lat(), i)
setindex!(getfield(getfield(V, :data), :baz), rand_lat(), i)
setindex!(getfield(getfield(V, :data), :dist), abs(rand_lon()), i)
end
return V
end
randSeisEvent(; c::Float64=0.0, s::Float64=1.0, nx::Int64=0) = randSeisEvent(rand(8:24), c=c, s=s, nx=nx)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 965 | rand_lat() = 180 * (rand()-0.5)
rand_lon() = 360 * (rand()-0.5)
rand_lon(m::Int64, n::Int64) = 360.0.*rand(Float64, m, n).-0.5
rand_datum() = rand() > 0.2 ? "WGS-84" : rand(geodetic_datum)
# Fill :misc with garbage
function rand_misc(N::Integer)
D = Dict{String, Any}()
for n = 1:N
t = code2typ(rand(OK))
k = randstring(rand(2:12))
if Bool(t <: Real) == true
D[k] = rand(t)
elseif Bool(t <: Complex) == true
D[k] = rand(Complex{real(t)})
elseif Bool(t <: Array) == true
y = eltype(t)
if Bool(y <: Number) == true
D[k] = rand(y, rand(1:1000))
end
end
end
(haskey(D, "hc")) && (delete!(D, "hc"))
return D
end
function repop_id!(S::GphysData; s::Bool=true)
while length(unique(S.id)) < length(S.id)
for i = 1:S.n
fs = S.fs[i]
if fs == 0.0
S.id[i] = rand_irr_id()
else
S.id[i], S.units[i] = rand_reg_id(fs, s)
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2473 |
function store_int!(X::Array{Int64,1}, buf::Array{UInt8,1}, i::Int64, n::Int64)
n == 0 && return
nx = length(X)
if n > 0
(n > nx) && append!(X, zeros(Int64, n-nx))
X[n] = buf_to_int(buf, i)
end
return nothing
end
function store_dbl!(X::Array{Float64,1}, buf::Array{UInt8,1}, i::Int64, n::Int64)
n == 0 && return
nx = length(X)
if n > 0
(n > nx) && append!(X, zeros(Float64, n-nx))
X[n] = buf_to_double(buf, i)
end
return nothing
end
function get_coeff_n(io::IO, c::UInt8, buf::Array{UInt8,1})
i = 1
while c != 0x20
buf[i] = c
i += 1
c = fastread(io)
end
return buf_to_int(buf, i-1)+1
end
function to_newline(io::IO, c::UInt8)
while c != 0x0a
c = fastread(io)
end
return c
end
function skip_whitespace(io::IO, c::UInt8)
c = fastread(io)
while c == 0x20
c = fastread(io)
end
return c
end
function parse_resp_date(io::IO, T::Array{UInt16,1})
fill!(T, zero(UInt16))
i = mark(io)
k = 1
while true
c = fastread(io)
if c in (0x2c, 0x2e, 0x3a)
L = fastpos(io)-i-1
reset(io)
T[k] = stream_int(io, L)
fastskip(io, 1)
i = mark(io)
k += 1
elseif c == 0x7e
L = fastpos(io)-i-1
if L > 0
reset(io)
T[k] = stream_int(io, L)
fastskip(io, 1)
end
break
end
end
return mktime(T)
end
function string_field(sio::IO)
m = mark(sio)
c = fastread(sio)
while c != 0x7e
c = fastread(sio)
end
p = fastpos(sio)-m
reset(sio)
return String(fastread(sio, p))[1:p-1]
end
function skip_string!(sio::IO)
c = fastread(sio)
while c != 0x7e
c = fastread(sio)
end
return nothing
end
function blk_string_read(io::IO, nb::Int64, v::Integer)
checkbuf!(BUF.buf, nb)
if nb + BUF.k > 4096
part1 = 4096-BUF.k
fast_readbytes!(io, BUF.buf, part1)
fastread!(io, BUF.seq)
nb -= part1
while nb > 4088
bv = pointer(BUF.buf, part1+1)
fast_unsafe_read(io, bv, 4088)
fastread!(io, BUF.seq)
part1 += 4088
nb -= 4088
end
bv = pointer(BUF.buf, part1+1)
fast_unsafe_read(io, bv, nb)
BUF.k = 8+nb
else
fast_readbytes!(io, BUF.buf, nb)
BUF.k += nb
end
sio = IOBuffer(BUF.buf)
return sio
end
seed_time(u16::Array{UInt16, 1}, hh::UInt8, mm::UInt8, ss::UInt8, δt::Int64) =
y2μs(u16[1]) + Int64(u16[2] - one(UInt16))*86400000000 + Int64(u16[3])*100 +
Int64(hh)*3600000000 + Int64(mm)*60000000 + Int64(ss)*1000000 + δt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 20766 | # ===========================================================================
# VOLUME CONTROL BLOCKETTES
function blk_010!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
v > 1 && println("")
close(sio)
return nothing
end
function blk_011!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
if v > 1
println("")
if v > 2
nsta = stream_int(sio, 3)
println(" "^16, lpad("STA", 5), " | SEQ")
println(" "^16, "------|-------")
@inbounds for i = 1:nsta
sta = String(fastread(sio, 5))
seq = stream_int(sio, 6)
println(" "^16, sta, " | ", seq)
end
end
end
close(sio)
return nothing
end
function blk_012!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
if v > 1
N = stream_int(sio, 4)
println(", N_SPANS = ", N)
end
close(sio)
return nothing
end
# ===========================================================================
# ABBREVIATION CONTROL HEADERS
# [30] is out of scope for SeisIO
function blk_030!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
if v > 1
desc = string_field(sio)
code = stream_int(sio, 4)
fam = stream_int(sio, 3)
println(", CODE ", code, ", DESC ", desc, ", FAM ", fam)
if v > 2
ND = stream_int(sio, 2)
@inbounds for i = 1:ND
desc = string_field(sio)
println(" "^18, desc)
end
end
end
close(sio)
return nothing
end
function blk_031!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
k = stream_int(sio, 4)
class = fastread(sio)
desc = string_field(sio)
if v > 1
units = stream_int(sio, 3)
println(", CODE ", k, ", CLASS ", Char(class), ", DESC ", desc, ", UNITS ", units)
end
close(sio)
comments[k] = replace(desc, "," => "-")
return nothing
end
function blk_032!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
if v > 1
code = stream_int(sio, 2)
pub = string_field(sio)
date = string_field(sio)
name = string_field(sio)
println(", CODE ", code, ", REFERENCE ", pub, ", DATE ", date, ", PUBLISHER ", name)
end
close(sio)
return nothing
end
function blk_033!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
code = stream_int(sio, 3)
desc = string_field(sio)
close(sio)
v > 1 && println(", CODE ", code, ", DESC ", desc)
abbrev[code] = desc
return nothing
end
function blk_034!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
code = stream_int(sio, 3)
name = string_field(sio)
if v > 1
desc = string_field(sio)
println(", CODE ", code, ", NAME ", name, ", DESC ", desc)
end
close(sio)
units_lookup[code] = fix_units(name)
return nothing
end
function blk_041!(io::IO, nb::Int64, v::Integer, units::Bool)
sio = blk_string_read(io, nb, v)
resp_lookup_key = stream_int(sio, 4)
skip_string!(sio)
symm_code = fastread(sio)
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
#= NF can be completely wrong here. Two issues:
(1) The SEED manual quietly suggests writing only part of "symmetric" FIR
filters to file; in which case NF is not the length of the vector we need. =#
NF = stream_int(sio, 4)
if symm_code == 0x42
NF = div(NF, 2)+1
elseif symm_code == 0x43
NF = div(NF, 2)
end
#= (2) buried in the SEED manual, it's noted that the maximum size of
a non-data blockette length is 9999 chars, and type [41] can exceed it.
In this case, the sample files tell me that NF is dead wrong; workaround is
to read as many FIR values as possible and fix with append! of next packet.
We've already used 7 chars on blockette type & size; rest are in sio.
=#
p = fastpos(sio)
NF_true = min(NF, div(9992-p, 14))
if v > 1
println(", KEY ", resp_lookup_key, ", SYMM ", Char(symm_code))
if v > 2
println(" "^16, "units in = ", units_lookup[uic])
println(" "^16, "units out = ", units_lookup[uoc])
end
println(" "^16, "p = ", p, ", NF = ", NF, ", NF_true = ", NF_true)
end
#= This appears to fix it but results in some very odd behavior. =#
F = Array{Float64, 1}(undef, NF_true)
@inbounds for i = 1:NF_true
fast_readbytes!(sio, BUF.hdr_old, 14)
setindex!(F, buf_to_double(BUF.hdr_old, 14), i)
end
close(sio)
if v > 2
[println("F[", i, "] = ", F[i]) for i = 1:NF_true]
end
# Process to resps
if haskey(responses, resp_lookup_key)
R = responses[resp_lookup_key][1]
append!(R.b, F)
else
resp = CoeffResp(F, Float64[])
ui = units ? fix_units(units_lookup[uic]) : ""
uo = units ? fix_units(units_lookup[uoc]) : ""
responses[resp_lookup_key] = (resp, ui, uo)
end
return nothing
end
function blk_043!(io::IO, nb::Int64, v::Integer, units::Bool)
sio = blk_string_read(io, nb, v)
resp_lookup_key = stream_int(sio, 4)
skip_string!(sio)
fastskip(sio, 1)
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
fast_readbytes!(sio, BUF.hdr_old, 12)
A0 = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
F0 = buf_to_double(BUF.hdr_old, 12)
NZ = stream_int(sio, 3)
Z = Array{ComplexF64, 1}(undef, NZ)
@inbounds for i = 1:NZ
fast_readbytes!(sio, BUF.hdr_old, 12)
rr = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
ii = buf_to_double(BUF.hdr_old, 12)
Z[i] = complex(rr, ii)
fastskip(sio, 24)
end
NP = stream_int(sio, 3)
P = Array{ComplexF64,1}(undef, NP)
@inbounds for i = 1:NP
fast_readbytes!(sio, BUF.hdr_old, 12)
rr = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
ii = buf_to_double(BUF.hdr_old, 12)
P[i] = complex(rr, ii)
fastskip(sio, 24)
end
close(sio)
if v > 1
println("A0 = ", string(A0), "F0 = ", string(F0))
if v > 2
println(" "^16, "NZ = ", NZ, ":")
@inbounds for i = 1:NZ
println(" "^16, Z[i])
end
println(" "^16, "NP = ", NP, ":")
@inbounds for i = 1:NP
println(" "^16, P[i])
end
end
end
# Process to resps
R = PZResp64(a0 = A0, f0 = F0, z = Z, p = P)
ui = units ? fix_units(units_lookup[uic]) : ""
uo = units ? fix_units(units_lookup[uoc]) : ""
responses[resp_lookup_key] = (R, ui, uo)
return nothing
end
function blk_044!(io::IO, nb::Int64, v::Integer, units::Bool)
sio = blk_string_read(io, nb, v)
resp_lookup_key = stream_int(sio, 4)
skip_string!(sio)
fastskip(sio, 1)
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
NN = stream_int(sio, 4)
N = Array{Float64,1}(undef, NN)
@inbounds for i = 1:NN
fast_readbytes!(sio, BUF.hdr_old, 12)
N[i] = buf_to_double(BUF.hdr_old, 12)
fastskip(sio, 12)
end
ND = stream_int(sio, 4)
D = Array{Float64,1}(undef, ND)
@inbounds for i = 1:ND
fast_readbytes!(sio, BUF.hdr_old, 12)
D[i] = buf_to_double(BUF.hdr_old, 12)
fastskip(sio, 12)
end
close(sio)
if v > 1
println(", KEY ", resp_lookup_key)
if v > 2
println(" "^16, "units in code = ", uic)
println(" "^16, "units out code = ", uoc)
println(" "^16, "NN = ", NN, ":")
@inbounds for i = 1:NN
println(" "^16, N[i])
end
println(" "^16, "ND = ", ND, ":")
@inbounds for i = 1:min(ND)
println(" "^16, D[i])
end
end
end
if units
responses[resp_lookup_key] = (CoeffResp(N, D),
fix_units(units_lookup[uic]),
fix_units(units_lookup[uoc])
)
else
responses[resp_lookup_key] = (CoeffResp(N, D), "", "")
end
return nothing
end
function blk_047!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
resp_lookup_key = stream_int(sio, 4)
skip_string!(sio)
fast_readbytes!(sio, BUF.hdr_old, 10)
fs = buf_to_double(BUF.hdr_old, 10)
fac = stream_int(sio, 5)
os = stream_int(sio, 5)
fast_readbytes!(sio, BUF.hdr_old, 11)
delay = buf_to_double(BUF.hdr_old, 11)
fast_readbytes!(sio, BUF.hdr_old, 11)
corr = buf_to_double(BUF.hdr_old, 11)
close(sio)
if v > 1
println(", KEY ", resp_lookup_key)
if v > 2
println(" "^16, "fs = ", fs)
println(" "^16, "decimation factor = ", fac)
println(" "^16, "decimation offset = ", os)
println(" "^16, "delay = ", delay)
println(" "^16, "delay correction applied = ", corr)
end
end
responses[resp_lookup_key] = Blk47(fs, delay, corr, fac, os)
return nothing
end
function blk_048!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
resp_lookup_key = stream_int(sio, 4)
skip_string!(sio)
fast_readbytes!(sio, BUF.hdr_old, 12)
gain = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
fg = buf_to_double(BUF.hdr_old, 12)
nv = stream_int(sio, 2)
# channel histories are not in the scope of SeisIO
@inbounds for i = 1:nv
# fastskip(sio, 24)
skip_string!(sio) # should cover the 24-Byte channel history
end
close(sio)
if v > 1
println(", KEY ", resp_lookup_key)
if v > 2
println(" "^16, "gain = ", gain, " (f = ", fg, " Hz)")
end
end
responses[resp_lookup_key] = Blk48(gain, fg)
return nothing
end
function blk_050(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
# Station
fill!(BUF.hdr, 0x00)
p = pointer(BUF.hdr)
fast_unsafe_read(sio, p, 5)
#= I make the assumption here that channel coordinates are
correctly set in blockette 52; if this is not true, then
:loc will not be set.
This shortcut makes support for multiplexing impossible, though
blockette-50 multiplexing has never been encountered in my
test files (or in any files of the ObsPy test suite.)
Multiplexing would be annoying as one would need to define
new SeisData objects S_subnet of length = n_subchans and use
append!(S, S_subnet), rather than push!(S, C); indexing would
be a mess. But it's doable in theory.
=#
fastskip(sio, 33)
site_name = strip(string_field(sio))
fastskip(sio, 9)
ts = string_field(sio)
te = string_field(sio)
uc = fastread(sio)
# Network
p = pointer(BUF.hdr, 11)
fast_unsafe_read(sio, p, 2)
close(sio)
if v > 1
println(", ID = ", String(copy(BUF.id)))
if v > 2
println(" "^16, "site name = ", site_name)
println(" "^16, "start date = ", ts)
println(" "^16, "end date = ", te)
println(" "^16, "update code = ", Char(uc))
end
end
return site_name
end
# ===========================================================================
# STATION CONTROL BLOCKETTES
# not necessary
function blk_051!(io::IO, nb::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
if v > 1
println("")
if v > 2
ts = string_field(sio)
te = string_field(sio)
k = stream_int(sio, 4)
comment_level = stream_int(sio, 6)
println(" "^16, "ts = ", ts)
println(" "^16, "te = ", te)
println(" "^16, "comment code key #", k)
println(" "^16, "comment level = ", comment_level)
end
end
close(sio)
return nothing
end
function blk_052!(io::IO, nb::Int64, C::SeisChannel, ts_req::Int64, te_req::Int64, v::Integer)
sio = blk_string_read(io, nb, v)
# loc
p = pointer(BUF.hdr, 6)
fast_unsafe_read(sio, p, 2)
# cha
p = pointer(BUF.hdr, 8)
fast_unsafe_read(sio, p, 3)
fastskip(sio, 4)
inst = stream_int(sio, 3)
skip_string!(sio)
units_code = stream_int(sio, 3)
fastskip(sio, 3)
# lat, lon, el, dep, az, inc
fast_readbytes!(sio, BUF.hdr_old, 10)
lat = buf_to_double(BUF.hdr_old, 10)
fast_readbytes!(sio, BUF.hdr_old, 11)
lon = buf_to_double(BUF.hdr_old, 11)
fast_readbytes!(sio, BUF.hdr_old, 7)
el = buf_to_double(BUF.hdr_old, 7)
fast_readbytes!(sio, BUF.hdr_old, 5)
dep = buf_to_double(BUF.hdr_old, 5)
fast_readbytes!(sio, BUF.hdr_old, 5)
az = buf_to_double(BUF.hdr_old, 5)
fast_readbytes!(sio, BUF.hdr_old, 5)
inc = 90.0 - buf_to_double(BUF.hdr_old, 5)
fastskip(sio, 6)
# fs
fast_readbytes!(sio, BUF.hdr_old, 10)
fs = buf_to_double(BUF.hdr_old, 10)
# don't really need max. drift; skip_string passes it over
skip_string!(sio)
# ts, te
ts = parse_resp_date(sio, BUF.uint16_buf)
te = parse_resp_date(sio, BUF.uint16_buf)
if te == -56504908800000000
te = 19880899199000000
end
close(sio)
if v > 1
println(", ID = ", String(copy(BUF.hdr)), ", INST = ", inst)
if v > 2
println(" "^16, "lat = ", lat, ", lon = ", lon, ", z = ", el, ", dep = ", dep, ", θ = ", az, ", ϕ = ", inc)
println(" "^16, "fs = ", fs)
println(" "^16, "ts = ", u2d(div(ts, sμ)))
println(" "^16, "te = ", u2d(div(te, sμ)))
end
end
if ts ≤ te_req && te ≥ ts_req
update_hdr!(BUF)
C.id = getfield(BUF, :id_str)
C.fs = fs
C.loc = GeoLoc(lat = lat, lon = lon, el = el, dep = dep, az = az, inc = inc)
C.resp = MultiStageResp(12)
C.misc["ts"] = ts
C.misc["te"] = te
C.misc["timespan"] = string(u2d(div(ts, 1000000))) * " : " * string(u2d(div(te, 1000000)))
C.misc["inst"] = get(abbrev, inst, "")
units = get(units_lookup, units_code, "")
C.units = fix_units(units)
skipping = false
else
if v > 1
println(" "^16, "Skipping ", String(copy(BUF.hdr)), " (not in requested time range)")
end
skipping = true
end
return skipping
end
function blk_053(io::IO, nb::Int64, v::Integer, R::MultiStageResp, units::Bool)
sio = blk_string_read(io, nb, v)
tft = Char(fastread(sio))
stage = stream_int(sio, 2)
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
fast_readbytes!(sio, BUF.hdr_old, 12)
A0 = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
F0 = buf_to_double(BUF.hdr_old, 12)
NZ = stream_int(sio, 3)
Z = Array{ComplexF64,1}(undef, NZ)
@inbounds for i = 1:NZ
fast_readbytes!(sio, BUF.hdr_old, 12)
rr = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
ii = buf_to_double(BUF.hdr_old, 12)
Z[i] = complex(rr, ii)
fastskip(sio, 24)
end
NP = stream_int(sio, 3)
P = Array{ComplexF64,1}(undef, NP)
@inbounds for i = 1:NP
fast_readbytes!(sio, BUF.hdr_old, 12)
rr = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
ii = buf_to_double(BUF.hdr_old, 12)
P[i] = complex(rr, ii)
fastskip(sio, 24)
end
close(sio)
if v > 1
println(", TFT ", tft, ", STAGE ", stage)
if v > 2
println(" "^16, "units in code #", uic)
println(" "^16, "units out code #", uoc)
println(" "^16, "A0 = ", A0)
println(" "^16, "F0 = ", F0)
println(" "^16, "NZ = ", NZ, ":")
@inbounds for i = 1:NZ
println(" "^16, Z[i])
end
println(" "^16, "NP = ", NP, ":")
@inbounds for i = 1:NP
println(" "^16, P[i])
end
end
end
if stage > length(R.fs)
append!(R, MultiStageResp(6))
end
resp = PZResp64(a0 = A0, f0 = F0, z = Z, p = P)
ui = units ? fix_units(units_lookup[uic]) : ""
uo = units ? fix_units(units_lookup[uoc]) : ""
R.stage[stage] = resp
R.i[stage] = ui
R.o[stage] = uo
return stage
end
function blk_054(io::IO, nb::Int64, v::Integer, R::MultiStageResp, units::Bool)
sio = blk_string_read(io, nb, v)
fastskip(sio, 1)
stage = stream_int(sio, 2)
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
NN = stream_int(sio, 4)
N = Array{Float64,1}(undef, NN)
@inbounds for i = 1:NN
fast_readbytes!(sio, BUF.hdr_old, 12)
N[i] = buf_to_double(BUF.hdr_old, 12)
fastskip(sio, 12)
end
ND = stream_int(sio, 4)
D = Array{Float64,1}(undef, ND)
@inbounds for i = 1:ND
fast_readbytes!(sio, BUF.hdr_old, 12)
D[i] = buf_to_double(BUF.hdr_old, 12)
fastskip(sio, 12)
end
close(sio)
if v > 1
println(", STAGE ", stage)
if v > 2
println(" "^16, "units in code = ", uic)
println(" "^16, "units out code = ", uoc)
println(" "^16, "NN = ", NN, ":")
@inbounds for i = 1:NN
println(" "^16, N[i])
end
println(" "^16, "ND = ", ND, ":")
@inbounds for i = 1:ND
println(" "^16, D[i])
end
end
end
if stage > length(R.fs)
append!(R, MultiStageResp(6))
end
R.stage[stage] = CoeffResp(N, D)
if units
@inbounds R.i[stage] = fix_units(units_lookup[uic])
@inbounds R.o[stage] = fix_units(units_lookup[uoc])
end
return stage
end
function blk_057(io::IO, nb::Int64, v::Integer, R::MultiStageResp)
sio = blk_string_read(io, nb, v)
stage = stream_int(sio, 2)
fast_readbytes!(sio, BUF.hdr_old, 10)
fs = buf_to_double(BUF.hdr_old, 10)
fac = stream_int(sio, 5)
os = stream_int(sio, 5)
fast_readbytes!(sio, BUF.hdr_old, 11)
delay = buf_to_double(BUF.hdr_old, 11)
fast_readbytes!(sio, BUF.hdr_old, 11)
corr = buf_to_double(BUF.hdr_old, 11)
close(sio)
if v > 1
println(", STAGE #", stage)
if v > 2
println(" "^16, "fs = ", fs)
println(" "^16, "decimation factor = ", fac)
println(" "^16, "decimation offset = ", os)
println(" "^16, "delay = ", delay)
println(" "^16, "delay correction applied = ", corr)
end
end
@inbounds R.fs[stage] = fs
@inbounds R.delay[stage] = delay
@inbounds R.corr[stage] = corr
@inbounds R.fac[stage] = fac
@inbounds R.os[stage] = os
return stage
end
function blk_058(io::IO, nb::Int64, v::Integer, C::SeisChannel)
sio = blk_string_read(io, nb, v)
stage = stream_int(sio, 2)
fast_readbytes!(sio, BUF.hdr_old, 12)
if stage == 0
C.gain = buf_to_double(BUF.hdr_old, 12)
close(sio)
if v > 1
println(", STAGE #", stage)
end
return stage
else
C.resp.gain[stage] = buf_to_double(BUF.hdr_old, 12)
fast_readbytes!(sio, BUF.hdr_old, 12)
C.resp.fg[stage] = buf_to_double(BUF.hdr_old, 12)
if v > 1
println(", STAGE #", stage)
if v > 2
println(" "^16, "gain = ", C.resp.gain[stage], " (f = ", C.resp.fg[stage], " Hz)")
end
end
# station history is not in the scope of SeisIO
close(sio)
return stage
end
end
# Not in scope of SeisIO
function blk_059!(io::IO, nb::Int64, v::Integer, C::SeisChannel, units::Bool)
sio = blk_string_read(io, nb, v)
v > 1 && println("")
if units
ts = parse_resp_date(sio, BUF.uint16_buf)
te = parse_resp_date(sio, BUF.uint16_buf)
if te == -56504908800000000
te = 19880899199000000
end
k = stream_int(sio, 4)
t_str = string("comment,", ts, ",", te, ",", comments[k])
if v > 2
println(u2d(div(ts, 1000000)), "–", u2d(div(te, 1000000)), ": ", comments[k])
end
note!(C, t_str)
end
close(sio)
return nothing
end
# Assign dictionary elements with response info in the 41-49 blockettes
function blk_060(io::IO, nb::Int64, v::Integer, R::MultiStageResp)
sio = blk_string_read(io, nb, v)
nstg = stream_int(sio, 2)
if v > 1
println(", # STAGES = ", nstg)
end
for i = 1:nstg
seq = stream_int(sio, 2)
nr = stream_int(sio, 2)
for j = 1:nr
k = stream_int(sio, 4)
rr = get(responses, k, "")
if v > 2
printstyled(" "^16, " assigning response to stage #", seq, ":\n", color=:green, bold=true)
println(rr)
end
if isa(rr, Blk48)
@inbounds R.gain[seq] = rr.gain
@inbounds R.fg[seq] = rr.fg
elseif isa(rr, Blk47)
@inbounds R.fs[seq] = rr.fs
@inbounds R.delay[seq] = rr.delay
@inbounds R.corr[seq] = rr.corr
@inbounds R.fac[seq] = rr.fac
@inbounds R.os[seq] = rr.os
elseif typeof(rr[1]) <: InstrumentResponse
(length(R.fs) < seq) && append!(R, MultiStageResp(6))
@inbounds R.stage[seq] = rr[1]
@inbounds R.i[seq] = rr[2]
@inbounds R.o[seq] = rr[3]
end
end
end
close(sio)
return nstg
end
function blk_061(io::IO, nb::Int64, v::Integer, R::MultiStageResp, units::Bool)
sio = blk_string_read(io, nb, v)
stage = stream_int(sio, 2)
skip_string!(sio)
symm_code = Char(fastread(sio))
uic = stream_int(sio, 3)
uoc = stream_int(sio, 3)
NF = stream_int(sio, 4)
F = Array{Float64,1}(undef, NF)
@inbounds for i = 1:NF
fast_readbytes!(sio, BUF.hdr_old, 14)
F[i] = buf_to_double(BUF.hdr_old, 14)
end
close(sio)
if v > 1
println(", STAGE #", stage, " , SYMM = ", Char(symm_code))
if v > 2
println(" "^16, "units in code #", uic)
println(" "^16, "units out code #", uoc)
println(" "^16, "NF = ", NF)
@inbounds for i = 1:NF
println(" "^16, F[i])
end
end
end
(length(R.fs) < stage) && append!(R, MultiStageResp(6))
@inbounds R.stage[stage] = CoeffResp(F, Float64[])
if units
@inbounds R.i[stage] = fix_units(units_lookup[uic])
@inbounds R.o[stage] = fix_units(units_lookup[uoc])
end
return stage
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7887 | # TO DO: [200], [400], [405]
# [200] Generic Event Detection Blockette (52 bytes)
# [400] Beam Blockette (16 bytes)
# [405] Beam Delay Blockette (6 bytes)
# All blockette functions return a UInt16 value equal to blockette length in bytes
function blk_time!(t::Array{Int32,1}, sid::IO, b::Bool)
yy = fastread(sid, UInt16)
jj = fastread(sid, UInt16)
t[4] = Int32(fastread(sid))
t[5] = Int32(fastread(sid))
t[6] = Int32(fastread(sid))
fastskip(sid, 1)
ms = fastread(sid, UInt16)
if b
yy = ntoh(yy)
jj = ntoh(jj)
ms = ntoh(ms)
end
yy = Int32(yy)
jj = Int32(jj)
t[1] = yy
(t[2], t[3]) = j2md(yy, jj)
t[7] = div(Int32(ms), Int32(10))
return nothing
end
# [100] Sample Rate Blockette (12 bytes)
function blk_100(S::SeisData, sid::IO, c::Int64)
BUF.dt = 1.0 / Float64(BUF.swap ? ntoh(fastread(sid, Float32)) : fastread(sid, Float32))
fastskip(sid, 4)
return 0x000c
end
# [201] Murdock Event Detection Blockette (60 bytes)
function blk_201(S::SeisData, sid::IO, c::Int64)
fastread!(sid, BUF.B201.sig) # amplitude, period, background estimate
BUF.B201.flags = fastread(sid) # detection flags
fastskip(sid, 1) # reserved
blk_time!(BUF.B201.t, sid, BUF.swap) # onset time
fastread!(sid, BUF.B201.snr) # snr, lookback, pick algorithm
BUF.B201.det = String(fastread(sid, 24)) # detector name
# will store as a string: t_evt, :sig (3 vals), bittsring(:flags), :snr, :lb, :p)
# Store event detections in S.misc
sig = BUF.B201.sig
flag = BUF.B201.flags == 0x80 ? "dilatation" : "compression"
if BUF.swap
sig = ntoh.(sig)
flag = BUF.B201.flags == 0x01 ? "dilatation" : "compression"
end
if !haskey(S.misc[c], "seed_event")
S.misc[c]["seed_event"] = Array{String, 1}(undef, 0)
end
push!(S.misc[c]["seed_event"], join(BUF.B201.t, ',') * "," *
join(sig, ',') * "," *
flag * "," *
join(BUF.B201.snr, ',') * "," *
strip(BUF.B201.det) )
return 0x003c
end
# [300] Step Calibration Blockette (60 bytes)
# [310] Sine Calibration Blockette (60 bytes)
# [320] Pseudo-random Calibration Blockette (64 bytes)
# [390] Generic Calibration Blockette (28 bytes)
function blk_calib(S::SeisData, sid::IO, c::Int64, bt::UInt16)
p = fastpos(sid)
blk_time!(BUF.Calib.t, sid, BUF.swap) # Calibration time
fastskip(sid, 1) # Reserved byte
(bt == 0x012c) && (BUF.Calib.n = fastread(sid)) # Number of step calibrations
BUF.Calib.flags = fastread(sid) # Calibration flags
BUF.Calib.dur1 = fastread(sid, UInt32) # Calibration duration
(bt == 0x012c) && (BUF.Calib.dur2 = fastread(sid, UInt32)) # Interval duration
(bt == 0x0136) && (BUF.Calib.period = fastread(sid, Float32)) # Period of signal (seconds)
BUF.Calib.amplitude = fastread(sid, Float32) # Peak-to-peak amplitude
BUF.Calib.channel = fastread(sid, 3)
fastskip(sid, 1) # Reserved byte
BUF.Calib.ref = fastread(sid, UInt32) # Reference amplitude
# String arrays
if bt < 0x0186
BUF.Calib.coupling = fastread(sid, 12)
BUF.Calib.rolloff = fastread(sid, 12)
if bt == 0x0140
BUF.Calib.noise = fastread(sid, 8)
end
else
BUF.Calib.coupling = Array{UInt8,1}(undef,0)
BUF.Calib.rolloff = Array{UInt8,1}(undef,0)
end
bc = Char[' ', '\0']
# Check that we already have "seed_calib"
if !haskey(S.misc[c], "seed_calib")
S.misc[c]["seed_calib"] = Array{String, 1}(undef,0)
end
# Swap as needed
if BUF.swap
flag = reverse(bitstring(BUF.Calib.flags))
amp = ntoh(BUF.Calib.amplitude)
ref = ntoh(BUF.Calib.ref)
per = ntoh(BUF.Calib.period)
BUF.Calib.dur1 = ntoh(BUF.Calib.dur1)
BUF.Calib.dur2 = ntoh(BUF.Calib.dur2)
else
flag = bitstring(BUF.Calib.flags)
amp = BUF.Calib.amplitude
ref = BUF.Calib.ref
per = BUF.Calib.period
end
typ = (bt == 0x012c ? "Step" : (bt == 310 ? "Sine" : (bt == 320 ? "Pseudo-random" : "Generic")))
dur = bt == 0x012c ? string(BUF.Calib.dur1, ",", BUF.Calib.dur2) : string(BUF.Calib.dur1)
amp = bt == 0x0136 ? string(per, ",", amp) : string(amp)
flag = bt == 0x012c ? string(flag, ",", BUF.Calib.n) : flag
calib_str = join( [ typ,
join(BUF.Calib.t, ","),
flag,
dur,
amp,
strip(String(BUF.Calib.channel), bc),
string(ref) ], "," )
if bt < 0x0186
calib_str *= string(",", strip(String(BUF.Calib.coupling), bc), ",",
strip(String(BUF.Calib.rolloff), bc))
if bt == 0x0140
calib_str *= string(",", strip(String(BUF.Calib.noise), bc))
end
end
push!(S.misc[c]["seed_calib"], calib_str)
return UInt16(fastpos(sid)-p) + 0x0004
end
# [500] Timing Blockette (200 bytes)
function blk_500(S::SeisData, sid::IO, c::Int64)
BUF.B500.vco_correction = BUF.swap ? ntoh(fastread(sid, Float32)) : fastread(sid, Float32)
blk_time!(BUF.B500.t, sid, BUF.swap)
BUF.B500.μsec = fastread(sid, Int8)
BUF.B500.reception_quality = fastread(sid)
BUF.B500.exception_count = BUF.swap ? ntoh(fastread(sid, UInt32)) : fastread(sid, UInt32)
BUF.B500.exception_type = strip(String(fastread(sid, 16)), ['\0',' '])
BUF.B500.clock_model = strip(String(fastread(sid, 32)), ['\0',' '])
BUF.B500.clock_status = strip(String(fastread(sid, 128)), ['\0',' '])
# TO DO: something with this
if !haskey(S.misc[c], "seed_timing")
S.misc[c]["seed_timing"] = Array{String, 1}(undef,0)
end
push!(S.misc[c]["seed_timing"], join([string(BUF.B500.vco_correction),
join(BUF.B500.t, ','),
BUF.B500.μsec,
BUF.B500.reception_quality,
BUF.B500.exception_count,
BUF.B500.exception_type,
BUF.B500.clock_model,
BUF.B500.clock_status], ',')
)
return 0x00c8
end
# TO DO: correct S.t[c] when one of these timing blockettes is detected
# [1000] Data Only SEED Blockette (8 bytes)
function blk_1000(S::SeisData, sid::IO, c::Int64)
BUF.fmt = fastread(sid)
BUF.wo = fastread(sid)
lx = fastread(sid)
fastskip(sid, 1)
BUF.nx = 2^lx
BUF.xs = ((BUF.swap == true) && (BUF.wo == 0x01))
return 0x0008
end
# [1001] Data Extension Blockette (8 bytes)
function blk_1001(S::SeisData, sid::IO, c::Int64)
fastskip(sid, 1)
BUF.tc += signed(fastread(sid))
fastskip(sid, 2)
return 0x0008
end
# [2000] Variable Length Opaque Data Blockette
function blk_2000(S::SeisData, sid::IO, c::Int64)
# Always big-Endian? Undocumented
BUF.B2000.NB = BUF.swap ? ntoh(fastread(sid, UInt16)) : fastread(sid, UInt16)
BUF.B2000.os = BUF.swap ? ntoh(fastread(sid, UInt16)) : fastread(sid, UInt16)
n = BUF.swap ? ntoh(fastread(sid, UInt32)) : fastread(sid, UInt32)
wo = fastread(sid)
nf = fastread(sid)
flag = fastread(sid)
BUF.B2000.hdr = fastread(sid, Int(BUF.B2000.os)-15)
BUF.B2000.data = fastread(sid, BUF.B2000.NB-BUF.B2000.os)
# Store to S.misc[i]
r = "seed_opaque_" * string(n)
S.misc[c][r * "_wo"] = wo
S.misc[c][r * "_flag"] = bitstring(BUF.B2000.flag)
S.misc[c][r * "_hdr"] = String.(split(String(BUF.B2000.hdr), '~', keepempty=true, limit=Int(nf)))
S.misc[c][r * "_data"] = BUF.B2000.data
return BUF.B2000.NB
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5348 | SEED_Char(io::IO, BUF::SeisIOBuf, nb::UInt16) = replace(String(fastread(io, nb)),
['\r', '\0'] =>"")
function SEED_Unenc!(io::IO, S::GphysData, c::Int64, xi::Int64, nb::UInt16, nx::UInt16)
buf = getfield(BUF, :buf)
checkbuf_8!(buf, xi)
x = getindex(getfield(S, :x), c)
fast_readbytes!(io, buf, nb)
T::Type = (if BUF.fmt == 0x01
Int16
elseif BUF.fmt == 0x03
Int32
elseif BUF.fmt == 0x04
Float32
else
Float64
end)
xr = reinterpret(T, buf)
if BUF.swap
x[xi+1:xi+nx] .= bswap.(xr[1:nx])
else
copyto!(x, xi+1, xr, 1, nx)
end
setfield!(BUF, :k, Int64(nx))
return nothing
end
function SEED_Geoscope!(io::IO, BUF::SeisIOBuf)
mm = 0x0fff
gm = BUF.fmt == 0x0d ? 0x7000 : 0xf000
for i = 0x0001:BUF.n
x = BUF.swap ? bswap(fastread(io, UInt16)) : fastread(io, UInt16)
m = Int32(x & mm)
g = Int32((x & gm) >> 12)
ex = -1*g
setindex!(BUF.x, ldexp(Float64(m-2048), ex), i)
end
BUF.k = BUF.n
return nothing
end
function SEED_CDSN!(io::IO, BUF::SeisIOBuf)
for i = 0x0001:BUF.n
x = BUF.swap ? bswap(fastread(io, UInt16)) : fastread(io, UInt16)
m = Int32(x & 0x3fff)
g = Int32((x & 0xc000) >> 14)
mult = 4^g * g==3 ? 2 : 1
m -= 0x1fff
setindex!(BUF.x, m*mult, i)
end
BUF.k = BUF.n
return nothing
end
function SEED_SRO!(io::IO, BUF::SeisIOBuf)
for i = 0x0001:BUF.n
x = BUF.swap ? bswap(fastread(io, UInt16)) : fastread(io, UInt16)
m = Int32(x & 0x0fff)
g = Int32((x & 0xf000) >> 12)
if m > 0x07ff
m -= 0x1000
end
ex = -1*g + 10
setindex!(BUF.x, ldexp(Float64(m), ex), i)
end
BUF.k = BUF.n
return nothing
end
function SEED_DWWSSN!(io::IO, BUF::SeisIOBuf)
for i = 0x0001:BUF.n
x = signed(UInt32(BUF.swap ? bswap(fastread(io, UInt16)) : fastread(io, UInt16)))
BUF.x[i] = x > 32767 ? x - 65536 : x
end
BUF.k = BUF.n
return nothing
end
# Steim1 or Steim2
function SEED_Steim!(io::IO, BUF::SeisIOBuf, nb::UInt16)
x = getfield(BUF, :x)
buf = getfield(BUF, :buf)
ff = getfield(BUF, :uint32_buf)
nc = Int64(div(nb, 0x0040))
ni = div(nb, 0x0004)
fast_readbytes!(io, buf, nb)
(ni > lastindex(ff)) && resize!(ff, ni)
fillx_u32_be!(ff, buf, ni, 0)
k = zero(Int64) # number of values read
x0 = zero(Float32) # first data value
xn = zero(Float32) # last data value
a = zero(UInt8) # byte offset to first data in each UInt32
b = zero(UInt8) # number of reads in each UInt32
c = zero(UInt8) # length (in bits) of each read in each UInt32
d = zero(UInt8) # amount of right bitshift
fq = zero(Float32) # Float32 data placeholder
m = zero(UInt8) # counter to reads in each UInt32
dnib = zero(UInt8) # dnib; two-bit secondary encoding flag
q = zero(Int32) # Int32 data placeholder
u = zero(UInt32) # placeholder for bit-shifted UInt32
ck = zero(UInt8) # two-bit primary encoding flag
z = zero(UInt32) # UInt32 containing nibbles
χ = zero(UInt32) # packed UInt32
r = zero(Int64) # "row" index to "matrix" of UInt32s
for i = 1:nc
z = getindex(ff, 1+r)
for j = 1:16
χ = getindex(ff, j+r)
ck = UInt8((z >> steim[j]) & 0x03)
# Steim1 and Steim2 are the same here
if ck == 0x01
a = 0x00
b = 0x08
c = 0x04
# Steim1 for ck > 0x01
elseif BUF.fmt == 0x0a
a = 0x00
if ck == 0x02
b = 0x10
c = 0x02
elseif ck == 0x03
b = 0x20
c = 0x01
end
# Steim2
else
dnib = UInt8(χ >> 0x0000001e)
if ck == 0x02
a = 0x02
if dnib == 0x01
b = 0x1e
c = 0x01
elseif dnib == 0x02
b = 0x0f
c = 0x02
elseif dnib == 0x03
b = 0x0a
c = 0x03
end
elseif ck == 0x03
if dnib == 0x00
a = 0x02
b = 0x06
c = 0x05
elseif dnib == 0x01
a = 0x02
b = 0x05
c = 0x06
else
a = 0x04
b = 0x04
c = 0x07
end
end
end
if ck != 0x00
u = χ << a
m = zero(UInt8)
d = 0x20 - b
while m < c
k = k + 1
q = signed(u)
q >>= d
fq = Float32(q)
setindex!(x, fq, k)
m = m + 0x01
u <<= b
end
end
if i == 1
if j == 2
x0 = Float32(signed(χ))
elseif j == 3
xn = Float32(signed(χ))
end
end
end
r = r+16
end
# Cumsum by hand
setindex!(x, x0, 1)
xa = copy(x0)
@inbounds for i1 = 2:k
xa = xa + getindex(x, i1)
setindex!(x, xa, i1)
end
# Check data values
if isapprox(getindex(x, k), xn) == false
println(stdout, string("RDMSEED: data integrity -- Steim-",
getfield(BUF, :fmt) - 0x09, " sequence #",
String(copy(getfield(BUF, :seq))),
" integrity check failed, last_data=",
getindex(getfield(BUF, :x), k),
", should be xn=", xn))
end
setfield!(BUF, :k, k)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8580 | rebuffer!(io::IO) = readbytes!(io, BUF.dh_arr, 48)
rebuffer!(io::IOStream) = ccall(:ios_readall, Csize_t,
(Ptr{Cvoid}, Ptr{Cvoid}, Csize_t), io.ios, pointer(BUF.dh_arr, 1), 48)
function hdrswap!(BUF::SeisIOBuf)
u16 = getfield(BUF, :uint16_buf)
@inbounds for i = 1:5
u16[i] = bswap(u16[i])
end
setfield!(BUF, :n, bswap(getfield(BUF, :n)))
setfield!(BUF, :r1, bswap(getfield(BUF, :r1)))
setfield!(BUF, :r2, bswap(getfield(BUF, :r2)))
setfield!(BUF, :tc, bswap(getfield(BUF, :tc)))
return nothing
end
function update_dt!(BUF::SeisIOBuf)
r1 = getfield(BUF, :r1)
r2 = getfield(BUF, :r2)
dt = 0.0
if r1 > 0 && r2 > 0
dt = 1.0/Float64(r1*r2)
elseif r1 > 0
dt = -1.0*Float64(r2/r1)
elseif r2 > 0
dt = -1.0*Float64(r1/r2)
else
dt = Float64(r1*r2)
end
setfield!(BUF, :dt, dt)
setfield!(BUF, :Δ, round(Int64, sμ*dt))
setfield!(BUF, :r1_old, r1)
setfield!(BUF, :r2_old, r2)
return nothing
end
function update_hdr!(BUF::SeisIOBuf)
id_j = 0
id = getfield(BUF, :id)
hdr = getfield(BUF, :hdr)
for p in id_positions
if hdr[p] != 0x20
id_j += 1
id[id_j] = hdr[p]
end
if p == 12 || p == 5 || p == 7
id_j += 1
id[id_j] = id_spacer
end
end
unsafe_copyto!(getfield(BUF, :hdr_old), 1, getfield(BUF, :hdr), 1, 12)
setfield!(BUF, :id_str, unsafe_string(pointer(getfield(BUF, :id)), id_j))
return nothing
end
###############################################################################
function parserec!(S::SeisData, BUF::SeisIOBuf, sid::IO, nx_new::Int64, nx_add::Int64, strict::Bool, v::Integer)
# =========================================================================
u16 = getfield(BUF, :uint16_buf)
flags = getfield(BUF, :flags)
xi = 0
te = 0
# Fixed section of data header (48 bytes)
pos = fastpos(sid)
rebuffer!(sid)
seekstart(BUF.dh_buf)
read!(BUF.dh_buf, BUF.seq)
read!(BUF.dh_buf, BUF.hdr)
u16[1] = read(BUF.dh_buf, UInt16)
u16[2] = read(BUF.dh_buf, UInt16)
hh = read(BUF.dh_buf, UInt8)
mm = read(BUF.dh_buf, UInt8)
ss = read(BUF.dh_buf, UInt8)
skip(BUF.dh_buf, 1)
u16[3] = read(BUF.dh_buf, UInt16)
BUF.n = read(BUF.dh_buf, UInt16)
BUF.r1 = read(BUF.dh_buf, Int16)
BUF.r2 = read(BUF.dh_buf, Int16)
read!(BUF.dh_buf, flags)
BUF.tc = read(BUF.dh_buf, Int32)
u16[4] = read(BUF.dh_buf, UInt16)
u16[5] = read(BUF.dh_buf, UInt16)
if getfield(BUF, :swap) == true
hdrswap!(BUF)
end
# =========================================================================
# Post-read header processing
# This is the standard check for correct byte order...?
yy = u16[1]
jj = u16[2]
if (jj > 0x0200 || ((jj == 0x0000 || jj == 0x0100) &&
(yy > 0x0907 || yy < 0x707)) || yy>0x0bb8)
setfield!(BUF, :swap, !BUF.swap)
if ((BUF.swap == true) && (BUF.wo == 0x01))
BUF.xs = true
end
hdrswap!(BUF)
end
if (BUF.r1 != BUF.r1_old) || (BUF.r2 != BUF.r2_old)
update_dt!(BUF)
end
n = getfield(BUF, :n)
if v > 2
println(stdout, String(copy(BUF.seq)), " ", String(copy(BUF.hdr)), ", fs = ", 1.0/BUF.dt, ", n = ", n)
end
# =========================================================================
# Channel handling for S
# Check this SEED id and whether or not it exists in S
if BUF.hdr != BUF.hdr_old
update_hdr!(BUF)
end
id = getfield(BUF, :id_str)
fs = 1.0/getfield(BUF, :dt)
c = findid(id, S)
if strict
c = channel_match(S, c, fs)
end
if c == 0
if v > 1
println(stdout, "New channel; ID = ", id, ", S.id = ", S.id)
end
x = Array{Float32, 1}(undef, nx_new)
L = nx_new
nt = 2
C = SeisChannel()
setfield!(C, :id, id)
setfield!(C, :name, identity(id))
setfield!(C, :fs, fs)
setfield!(C, :x, x)
push!(S, C)
c = S.n
(v > 1) && println(stdout, "Added channel: ", id)
else
# assumes fs doesn't change within a SeisData structure
t = getindex(getfield(S, :t), c)
x = getindex(getfield(S, :x), c)
nt = div(lastindex(t), 2)
L = lastindex(x)
if nt > 0
xi = getindex(t, nt)
te = endtime(t, getindex(getfield(S, :fs), c))
end
if xi + n > L
resize!(x, xi + max(n, nx_add))
(v > 1) && println(stdout, id, ": resized from length ", L,
" to length ", length(x))
end
end
# =========================================================================
# Parse blockettes
nsk = u16[4] - 0x0030
u16[6] = u16[5] - 0x0030
nblk = flags[4]
v > 1 && println(string("Blockettes to read: ", nblk))
@inbounds for i = 0x01:0x01:nblk
# DND DND DND
fastskip(sid, u16[6])
nsk = nsk - u16[6]
u16[5] = UInt16(fastpos(sid) - pos)
# DND DND DND
bt = fastread(sid, UInt16)
u16[6] = fastread(sid, UInt16)
if getfield(BUF, :swap) == true
bt = bswap(bt)
setindex!(u16, bswap(u16[6]), 6)
end
# debug
if v > 1
printstyled(string("Position = ", fastpos(sid), "\n"), color=:light_green)
printstyled(string("Blockette type to read: ", bt, "\n"), color=:light_yellow)
println(stdout, "Relative position u16[5] = ", u16[5], " bytes from record begin")
println(stdout, "We are nsk = ", nsk, " bytes to data begin")
end
# Blockette parsing moved to individual functions
if bt == 0x03e8
blk_len = blk_1000(S, sid, c)
elseif bt == 0x03e9
blk_len = blk_1001(S, sid, c)
elseif bt == 0x0064
blk_len = blk_100(S, sid, c)
# Oral tradition: immediately update :fs
fs = 1.0 / getfield(BUF, :dt)
if (xi == 0) || (fs != S.fs[c])
setfield!(BUF, :Δ, round(Int64, sμ*BUF.dt))
note!(S, c, string("mini-SEED Blockette 100, old fs = ", S.fs[c], ", new fs = ", fs))
S.fs[c] = fs
end
elseif bt == 0x00c9
blk_len = blk_201(S, sid, c)
elseif bt == 0x01f4
blk_len = blk_500(S, sid, c)
elseif bt == 0x07d0
blk_len = blk_2000(S, sid, c)
elseif bt in BUF.calibs
blk_len = blk_calib(S, sid, c, bt)
else
v > 1 && println(stdout, id, ": no support for Blockette Type ", bt, "; skipped.")
blk_len = (u16[6] == 0x0000 ? nsk : u16[6])
fastskip(sid, blk_len - 0x0004)
end
nsk = nsk - blk_len
if u16[6] != 0x0000
u16[6] = u16[6] - blk_len - u16[5]
end
end
# =========================================================================
# Data parsing: originally adapted from rdmseed.m by Francois Beauducel
# (not very similar anymore)
if nsk > 0x0000
fastskip(sid, nsk)
end
# Get data format
fmt = getfield(BUF, :fmt)
nb = getfield(BUF, :nx) - u16[4]
# debug output
if v > 2
printstyled(string("Position = ", fastpos(sid), "\n"), color=:light_green)
println(stdout, "To parse: nx = ", n, " sample blockette, ",
"compressed size = ", nb, " bytes, fmt = ", fmt)
end
if fmt == 0x0a || fmt == 0x0b
SEED_Steim!(sid, BUF, nb)
elseif fmt == 0x00
# ASCII is a special case as it's typically not data
D = getindex(getfield(S, :misc), c)
if !haskey(D, "seed_ascii")
D["seed_ascii"] = Array{String,1}(undef,0)
end
push!(D["seed_ascii"], SEED_Char(sid, BUF, nb))
elseif fmt in UInt8[0x01, 0x03, 0x04, 0x05]
SEED_Unenc!(sid, S, c, xi, nb, n)
elseif fmt == 0x0d || fmt == 0x0e
SEED_Geoscope!(sid, BUF)
elseif fmt == 0x10
SEED_CDSN!(sid, BUF)
elseif fmt == 0x1e
SEED_SRO!(sid, BUF)
elseif fmt == 0x20
SEED_DWWSSN!(sid, BUF)
else
warn_str = string("readmseed, unsupported format = ", fmt, ", ", nb, " bytes skipped.")
@warn(warn_str); note!(S, c, warn_str)
fastskip(sid, nb)
return nothing
end
if fmt > 0x00
# Update S.x[c]
if fmt > 0x05
copyto!(x, xi+1, getfield(BUF, :x), 1, getfield(BUF, :k))
end
# Update S.t[c]
# Check for time correction
is_tc = flags[2] >> 1 & 0x01
tc = getfield(BUF, :tc)
if is_tc == false && tc != zero(Int32)
δt = Int64(tc)*100
else
δt = zero(Int64)
end
# Sample rate in μs
Δ = getfield(BUF, :Δ)
# Elapsed time since S.t[c] ended
τ = seed_time(u16, hh, mm, ss, δt)
# New channel
if te == 0
setindex!(getfield(S, :t), mk_t(n, τ), c)
# Existing channel
else
check_for_gap!(S, c, τ, n, v)
end
v > 2 && printstyled(string("Position = ", fastpos(sid), "\n"), color=:light_green)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6384 | mutable struct Blk47 <: SeedBlk
fs::Float64
delay::Float64
corr::Float64
fac::Int64
os::Int64
end
mutable struct Blk48 <: SeedBlk
gain::Float64
fg::Float64
end
function dict_cleanup!(D::Union{Dict{Int64, Any},Dict{Int64, String}})
if !isempty(D)
for k in keys(D)
delete!(D, k)
end
end
return nothing
end
function close_channel!(S::SeisData, C::SeisChannel, stg::Int64)
isempty(C) && return nothing
smax = length(C.resp.fs)
for f in (:stage, :fs, :gain, :fg, :delay, :corr, :fac, :os, :i, :o)
deleteat!(getfield(C.resp, f), stg:smax)
end
push!(S, C)
return nothing
end
function parse_dataless!(S::SeisData, io::IO, s::TimeSpec, t::TimeSpec, v::Integer, units::Bool)
C = SeisChannel()
R = MultiStageResp()
resize!(BUF.hdr_old, 14)
blk = 0
nb = 0
stg = 0
nstg = 0
codes = Array{String}
BUF.k = 0
if typeof(s) != String || typeof(t) != String
d0, d1 = parsetimewin(s, t)
ts_req = round(Int64, d2u(DateTime(d0))*sμ)
te_req = round(Int64, d2u(DateTime(d1))*sμ)
else
ts_req = round(Int64, d2u(DateTime(s))*sμ)
te_req = round(Int64, d2u(DateTime(t))*sμ)
end
skipping = false
site_name = ""
while !fasteof(io)
p = fastpos(io)
read!(io, BUF.seq)
BUF.k = 8
v > 0 && println("seq = ", String(copy(BUF.seq)), ":")
if BUF.seq[7] == 0x20
v > 0 && println(" "^16, "(empty; skipped 4096 B)")
fastskip(io, 4088)
BUF.k = 0
# ========================================================================
else
while BUF.k < 4096
#= Buried in the SEED manual: (bottom of page 32) "If there are less
than seven bytes remaining, the record *must* be flushed." =#
if BUF.k > 4089
fastskip(io, 4096-BUF.k)
BUF.k = 0
break
end
# Read blockette number; skip rest of record if blk == 0
blk = stream_int(io, 3)
BUF.k += 3
if blk == 0
δp = 4096-BUF.k
if v > 0
printstyled(string(" "^16, "no blockettes left, skipping rest of record (δp = ", δp, " B); last sequence was ", String(copy(BUF.seq)), "\n"), color=:yellow, bold=true)
end
fastskip(io, δp)
BUF.k = 0
break
elseif blk == 52
if skipping == true
skipping = false
if v > 0
println(" "^16, "(skipping turned off)")
end
end
end
# Read number of bytes
nb = stream_int(io, 4)-7
BUF.k += 4
if v > 1
printstyled(string(" "^16, "BLK ", blk, ", ", nb , " Bytes"), color=:green, bold=true)
end
if skipping
sio = blk_string_read(io, nb, v)
v > 1 && println(" (skipped)")
close(sio)
else
#= BUF.k is the counter to position within each record; below here,
it is incremented during calls to blk_string_read
=#
# This is a heinous if/else block; Julia has no SWITCH statement
# ====================================================================
# Volume control; Char(BUF.seq[7]) == 'V'
if blk == 10
blk_010!(io, nb, v)
elseif blk == 11
blk_011!(io, nb, v)
elseif blk == 12
blk_012!(io, nb, v)
# ====================================================================
# Abbreviation control, whatever that means; Char(BUF.seq[7]) == 'A'
elseif blk == 30
blk_030!(io, nb, v)
elseif blk == 31
blk_031!(io, nb, v)
elseif blk == 32
blk_032!(io, nb, v)
elseif blk == 33
blk_033!(io, nb, v)
elseif blk == 34
blk_034!(io, nb, v)
elseif blk == 41
blk_041!(io, nb, v, units)
elseif blk == 43
blk_043!(io, nb, v, units)
elseif blk == 44
blk_044!(io, nb, v, units)
elseif blk == 47
blk_047!(io, nb, v)
elseif blk == 48
blk_048!(io, nb, v)
# ====================================================================
# Station (really, channel) control; Char(BUF.seq[7]) == 'S'
elseif blk == 50
site_name = blk_050(io, nb, v)
elseif blk == 51
blk_051!(io, nb, v)
elseif blk == 52
close_channel!(S, C, nstg > 0 ? nstg : stg + 1)
C = SeisChannel()
skipping = blk_052!(io, nb, C, ts_req, te_req, v)
if isempty(C) == false
R = C.resp
end
nstg = 0
elseif blk == 53
stg = blk_053(io, nb, v, R, units)
elseif blk == 54
stg = blk_054(io, nb, v, R, units)
elseif blk == 57
stg = blk_057(io, nb, v, R)
elseif blk == 58
n = blk_058(io, nb, v, C)
if n > 0
stg = n
end
elseif blk == 59
blk_059!(io, nb, v, C, units)
elseif blk == 60
nstg = blk_060(io, nb, v, R)
v > 2 && println(R.stage)
elseif blk == 61
stg = blk_061(io, nb, v, R, units)
# ====================================================================
# Not testable == not supported; no exceptions
# send more test files if you want more blockette types covered!
else
fastskip(io, nb)
end
end
end
end
BUF.k -= 4096
end
seed_cleanup!(S, BUF)
resize!(BUF.hdr_old, 12)
# Prevent infinite concatenation
dict_cleanup!(responses)
dict_cleanup!(units_lookup)
dict_cleanup!(comments)
dict_cleanup!(abbrev)
close_channel!(S, C, nstg > 0 ? nstg : stg + 1)
return S
end
function read_dataless(fname::String;
memmap::Bool = false,
s::TimeSpec = "0001-01-01T00:00:00",
t::TimeSpec = "9999-12-31T23:59:59",
v::Integer = KW.v,
units::Bool = false)
S = SeisData()
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
fastskip(io, 6)
c = fastread(io)
if c in (0x41, 0x53, 0x54, 0x56) # 'A', 'S', 'T', 'V'
seekstart(io)
parse_dataless!(S, io, s, t, v, units)
close(io)
else
close(io)
error("Not a SEED volume!")
end
fstr = realpath(fname)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 225 | export formats,
dataless_support,
mseed_support,
parsemseed!,
read_dataless,
read_mseed_file!,
read_seed_resp!,
resp_wont_read,
scan_seed,
seed_support
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 402 | import SeisIO.Formats: formats, FmtVer, FormatDesc, HistVec
import SeisIO: BUF,
InstrumentResponse,
KW,
SeedBlk,
SeisIOBuf,
TimeSpec,
buf_to_double,
buf_to_i16,
buf_to_int,
channel_match,
check_for_gap!,
checkbuf!,
checkbuf_8!,
endtime,
fillx_u32_be!,
fillx_u32_le!,
fix_units,
is_u8_digit,
mk_t,
mktime,
stream_int,
sμ,
track_hdr!,
trunc_x!,
y2μs,
μs
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 854 | function seed_cleanup!(S::SeisData, BUF::SeisIOBuf)
trunc_x!(S)
fill!(getfield(BUF, :hdr_old), zero(UInt8))
setfield!(BUF, :r1_old, zero(Int16))
setfield!(BUF, :r2_old, zero(Int16))
return nothing
end
function parsemseed!(S::SeisData, io::IO, nx_new::Int64, nx_add::Int64, strict::Bool, v::Integer)
while !eof(io)
parserec!(S, BUF, io, nx_new, nx_add, strict, v)
end
seed_cleanup!(S, BUF)
return nothing
end
function read_mseed_file!(S::SeisData, fname::String, nx_new::Int64, nx_add::Int64, memmap::Bool, strict::Bool, v::Integer)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
fastskip(io, 6)
c = fastread(io)
if c in (0x44, 0x52, 0x4d, 0x51)
seekstart(io)
parsemseed!(S, io, nx_new, nx_add, strict, v)
close(io)
else
close(io)
error("Invalid file type!")
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 16027 | function resp_unit_split(buf::Array{UInt8,1}, L::Int64)
i = 1
while i+2 < L
if buf[i] == 0x20 && buf[i+1] == 0x2d && buf[i+2] == 0x20
return String(buf[1:i-1])
end
i = i+1
end
return String(buf[1:L])
end
function parse_resp_date!(buf::Array{UInt8,1}, L::Int64, T::Array{Int16,1})
is_u8_digit(buf[1]) || return typemax(Int64)
fill!(T, zero(Int16))
o = one(Int16)
i = zero(Int16)
j = o # counter to start position in buf
k = o # counter to y,j,h,m,s
while i ≤ L
i += o
if i > L
T[k] = buf_to_i16(buf, i-o, j)
T[k] = parse(Int16, String(buf[j:i-o]))
break
elseif buf[i] in (0x2c, 0x2e, 0x3a)
T[k] = buf_to_i16(buf, i-o, j)
k += o
k > Int16(6) && break
j = i+o
end
end
return mktime(T)
end
function close_resp_channel!(S::SeisData, C::SeisChannel, ts::Int64, te::Int64, fname::String, ns::Int64)
L = length(C.resp.fs)
for f in fieldnames(MultiStageResp)
deleteat!(getfield(C.resp, f), (ns+1):L)
end
i = findid(S, C.id)
if i != 0
t0 = isempty(S.t[i]) ? ts : S.t[i][1,2]
if ts ≤ t0 < te
S.resp[i] = C.resp
S.gain[i] = C.gain
end
else
# ObsPy does this but I don't think it's safe; stage fs is not stored uniformly.
if length(C.resp.fs) > 0
fac = C.resp.fac[end]
C.fs = C.resp.fs[end] * (fac > 0 ? 1/fac : 1.0)
end
push!(S, C)
end
return nothing
end
function add_stage!(C::SeisChannel, n::Int64, tfc::UInt8)
if C.resp.stage[n] == nothing
C.resp.stage[n] =
if tfc == 0x41
PZResp64()
elseif tfc == 0x44
CoeffResp()
else
GenResp()
end
return nothing
end
end
function read_seed_resp!(S::GphysData, files::Array{String,1}, memmap::Bool, units::Bool)
buf = BUF.buf
C = SeisChannel(resp = MultiStageResp(12))
R = C.resp
blk = zero(UInt64)
hhash = zeros(UInt64, S.n)
opts = string("memmap=", memmap, ", units=", units, ")")
# Containers for parts of the response
seq_n = zero(Int64) # Stage sequence number (053F04, 054F04, 057F03, 058F03, 060F04, 061F03, 062F04)
seq_n_old = zero(Int64) # Last sequence number
d_arr = zeros(Int16, 6) # Date
nmax = zero(Int64)
# Array containers
X = Float64[]
D = Float64[]
Z = ComplexF64[]
P = ComplexF64[]
# Unit strings containers
units_out = "" # Response out units lookup (041F07, 043F07, 044F07, 053F06, 054F06, 061F07, 062F06)
for (nf, file) in enumerate(files)
# Counters, etc.
c = 0x00
i = 1
read_state = 0x00
tfc = 0x00
ts = zero(Int64) # Start date (052F22)
te = zero(Int64) # End date (052F23)
ND = zero(Int64) # Number of denominators (044F11, 054F10)
NN = zero(Int64) # Number of coefficients (041F08, 044F08, 054F07, 061F08, 062F14)
NZ = zero(Int64) # Number of zeroes (043F10, 053F09)
NP = zero(Int64) # Number of poles (043F15, 053F14)
a0 = one(Float64) # A0 normalization factor (043F08, 048F05, 053F07)
f0 = one(Float64) # Normalization frequency (043F09, 048F06, 053F08)
id = UInt8[]
units_in = "" # Response in units lookup (041F06, 043F06, 044F06, 053F05, 054F05, 061F06, 062F05)
io = memmap ? IOBuffer(Mmap.mmap(file)) : open(file, "r")
chip = false
while !eof(io)
c = fastread(io)
# comment line ------------------------------
if c == 0x23
while c != 0x0a
c = fastread(io)
end
end
# start of blockette line -------------------
if c == 0x42
#= format: %c%3i%c%2i, 'B', blockette_#, 'F', field_#
Here, I map the blockette and field numbers (along with 'F') to a UInt64.
This mapping should be unique; for any blockette blk, I expect that
String(reinterpret(UInt8, [blk]))[1:6] = bbbFff, where bbb is
blockette # and ff is field #
=#
blk = UInt64(fastread(io))
blk |= UInt64(fastread(io)) << 8
blk |= UInt64(fastread(io)) << 16
blk |= UInt64(fastread(io)) << 24
blk |= UInt64(fastread(io)) << 32
blk |= UInt64(fastread(io)) << 40
read_state = 0x01
# Coefficient parsing ==================================================
if blk in (0x0000393046313630, 0x0000393046313430)
# Numerator coefficients (061F09, 041F09)
X = Array{Float64,1}(undef, NN)
for n = 1:NN
# To first non-whitespace
if n > 1
fastskip(io, 7)
end
c = fastread(io)
while c == 0x20
c = fastread(io)
end
j = get_coeff_n(io, c, buf)
c = skip_whitespace(io, c)
# To next newline
k = 1
while c != 0x0a
buf[k] = c
k += 1
c = fastread(io)
end
store_dbl!(X, buf, k, j)
end
elseif blk in (0x0000313146333430, 0x0000303146333530, 0x0000363146333430, 0x0000353146333530)
# Complex zeros (043F11, 053F10) / Complex poles (043F16, 053F15)
if blk in (0x0000313146333430, 0x0000303146333530)
N = NZ
Z = Array{ComplexF64,1}(undef, N)
is_p = false
else
N = NP
P = Array{ComplexF64,1}(undef, N)
is_p = true
end
for n = 1:N
# coefficient number
if n > 1
fastskip(io, 10)
else
fastskip(io, 3)
end
c = skip_whitespace(io, c)
j = get_coeff_n(io, c, buf)
# real part
c = skip_whitespace(io, c)
k = 1
while c != 0x20
buf[k] = c
k += 1
c = fastread(io)
end
xr = buf_to_double(buf, k)
# imaginary part
c = skip_whitespace(io, c)
k = 1
while c != 0x20
buf[k] = c
k += 1
c = fastread(io)
end
xi = buf_to_double(buf, k)
# store
if is_p
P[j] = complex(xr, xi)
else
Z[j] = complex(xr, xi)
end
# rest irrelevant
c = to_newline(io, c)
end
elseif blk in (0x0000383046343530, 0x0000313146343530, 0x0000353146323630)
# Numerator coefficients (054F08) / Denominator coefficients (054F11) / Polynomial coefficients (B062F15)
if blk == 0x0000383046343530
N = NN
X = Array{Float64,1}(undef, N)
is_n = true
else
N = ND
D = Array{Float64,1}(undef, N)
is_n = false
end
for n = 1:N
# To first non-whitespace
if n > 1
fastskip(io, 10)
else
fastskip(io, 3)
end
c = skip_whitespace(io, c)
j = get_coeff_n(io, c, buf)
c = skip_whitespace(io, c)
# To next whitesoace
k = 1
while c != 0x20
buf[k] = c
k += 1
c = fastread(io)
end
# Store
if is_n
store_dbl!(X, buf, k, j)
else
store_dbl!(D, buf, k, j)
end
c = to_newline(io, c)
end
else
# To separator
while c != 0x3a
c = fastread(io)
end
# To first non-whitespace
c = skip_whitespace(io, c)
end
end
# header info --------------------------------
if read_state == 0x01
i = 1
while c != 0x0a
buf[i] = c
i += 1
eof(io) && break
c = fastread(io)
end
i -= 1
# header parsing =======================================================
if blk in (0x0000383046333430, 0x0000373046333530)
# A0 normalization factor (043F08, 053F07)
a0 = buf_to_double(buf, i)
elseif blk == 0x0000343046323530
# Channel (052F04)
ni = min(i,3)
L = length(id)
append!(id, zeros(UInt8, ni+1))
id[L+1] = 0x2e
unsafe_copyto!(id, L+2, buf, 1, ni)
setfield!(C, :id, String(id))
# println("Starting channel ", C.id)
chip = true
elseif blk in (0x0000383046373530, 0x0000393046373430)
# Correction applied (seconds) (057F08) / Response correction (047F09)
store_dbl!(R.corr, buf, i, seq_n)
elseif blk in (0x0000353046373530, 0x0000363046373430)
# Decimation factor (057F05) / Response decimation factor (047F06)
store_int!(R.fac, buf, i, seq_n)
elseif blk in (0x0000363046373530, 0x0000373046373430)
# Decimation offset (057F06) / Response decimation offset (047F07)
store_int!(R.os, buf, i, seq_n)
elseif blk == 0x0000333246323530
# End date (052F23)
te = parse_resp_date!(buf, i, d_arr)
elseif blk in (0x0000373046373530, 0x0000383046373430)
# Estimated delay (seconds) (057F07) / Response delay (047F08)
store_dbl!(R.delay, buf, i, seq_n)
elseif blk in (0x0000353046383530, 0x0000363046383430)
# Frequency of gain (058F05) / Frequency of sensitivity (048F06)
# println("blk = ", String(reinterpret(UInt8, [blk])), ", buf = ", String(buf[1:i]))
store_dbl!(R.fg, buf, i, seq_n)
elseif blk in (0x0000343046383530, 0x0000353046383430)
# Gain (058F04) / Sensitivity (048F05)
if seq_n > 0
store_dbl!(R.gain, buf, i, seq_n)
else
C.gain = buf_to_double(buf, i)
end
elseif blk in (0x0000343046373530, 0x0000353046373430)
# Input sample rate (057F04) / Response input sample rate (047F05)
store_dbl!(R.fs, buf, i, seq_n)
elseif blk == 0x0000333046323530
# Location (052F03)
push!(id, 0x2e)
for j = 1:min(i,2)
if buf[j] != 0x3f
push!(id, buf[j])
end
end
elseif blk == 0x0000363146303530
# Network (050F16)
ni = min(i,2)
prepend!(id, zeros(UInt8, ni))
copyto!(id, 1, buf, 1, ni)
elseif blk in (0x0000393046333430, 0x0000383046333530)
# Normalization frequency (043F09, 053F08)
f0 = buf_to_double(buf, i)
elseif blk in (0x0000313146343430, 0x0000303146343530)
# Number of denominators (044F11, 054F10)
ND = buf_to_int(buf, i)
elseif blk in (0x0000383046313430, 0x0000383046343430, 0x0000373046343530, 0x0000383046313630, 0x0000343146323630)
# Number of numerators (041F08, 044F08, 054F07, 061F08) / Number of coefficients (062F14)
NN = buf_to_int(buf, i)
elseif blk in (0x0000353146333430, 0x0000343146333530)
# Number of poles (043F15, 053F14)
NP = buf_to_int(buf, i)
elseif blk in (0x0000303146333430, 0x0000393046333530)
# Number of zeroes (043F10, 053F09)
NZ = buf_to_int(buf, i)
elseif blk in (0x0000363046313430, 0x0000363046333430, 0x0000363046343430, 0x0000353046333530, 0x0000353046343530, 0x0000363046313630, 0x0000353046323630)
# Response in units lookup (041F06, 043F06, 044F06, 053F05, 054F05, 061F06, 062F05)
if units || isempty(C.units)
units_in = fix_units(resp_unit_split(buf, i))
if seq_n > 0 && units == true
C.resp.i[seq_n] = units_in
end
end
elseif blk in (0x0000373046313430, 0x0000373046333430, 0x0000373046343430, 0x0000363046333530, 0x0000363046343530, 0x0000373046313630, 0x0000363046323630)
# Response out units lookup (041F07, 043F07, 044F07, 053F06, 054F06, 061F07, 062F06)
if units
units_out = fix_units(resp_unit_split(buf, i))
if seq_n > 0
C.resp.o[seq_n] = units_out
end
end
elseif blk in (0x0000343046333530, 0x0000343046343530, 0x0000333046373530, 0x0000333046383530, 0x0000343046303630, 0x0000333046313630, 0x0000343046323630)
# Stage sequence number (053F04, 054F04, 057F03, 058F03, 060F04, 061F03, 062F04)
seq_n = buf_to_int(buf, i)
nmax = max(nmax, seq_n)
#= Here is where we dump everything to seq_n_old.
Fortunately RESP files list stage 0 last, which makes stupid tricks
like this viable.
=#
if seq_n_old != seq_n
# initialize stage seq_n
if seq_n > 0
(length(C.resp.stage) < seq_n) && append!(C.resp, MultiStageResp(6))
end
# dump to seq_n_old
if seq_n_old > 0
if isa(C.resp.stage[seq_n_old], CoeffResp)
C.resp.stage[seq_n_old].b = X
C.resp.stage[seq_n_old].a = D
X = Float64[]
D = Float64[]
elseif isa(C.resp.stage[seq_n_old], PZResp64)
C.resp.stage[seq_n_old].a0 = a0
C.resp.stage[seq_n_old].f0 = f0
C.resp.stage[seq_n_old].z = Z
C.resp.stage[seq_n_old].p = P
Z = ComplexF64[]
P = ComplexF64[]
a0 = one(Float64)
f0 = one(Float64)
end
end
seq_n_old = seq_n
end
# trigger new channel on seq_n
if blk in (0x0000343046333530, 0x0000343046343530, 0x0000343046323630)
if seq_n > 0 && tfc != 0x00
add_stage!(C, seq_n, tfc)
tfc = 0x00
end
end
elseif blk == 0x0000323246323530
# Start date (052F22)
ts = parse_resp_date!(buf, i, d_arr)
elseif blk == 0x0000333046303530
# Station (050F03)
if chip
# close_resp_channel!(S, C, ts, te, file, corr, delay, fg, fs_in, gain, fac, os, length(C.resp.stage))
close_resp_channel!(S, C, ts, te, file, nmax)
C = SeisChannel(resp = MultiStageResp(12))
R = C.resp
chip = false
seq_n = zero(Int64)
seq_n_old = zero(Int64)
tfc = 0x00
units_in = ""
nmax = zero(Int64)
end
ni = min(i,5)
id = zeros(UInt8, ni+1)
unsafe_copyto!(id, 2, buf, 1, ni)
id[1] = 0x2e
elseif blk in (0x0000353046333430, 0x0000353046343430, 0x0000333046333530, 0x0000333046343530, 0x0000333046323630)
# Response type (043F05, 044F05) / Transfer function type (053F03, 054F03, 062F03)
tfc = buf[1]
# trigger on response type
if blk in (0x0000353046333430, 0x0000353046343430)
if seq_n > 0
add_stage!(C, seq_n, tfc)
tfc = 0x00
end
end
elseif blk in (0x0000353046313430, 0x0000353046313630)
# Symmetry type (041F05, 061F05) tells us we have a digital filter
tfc = 0x44
if seq_n > 0
add_stage!(C, seq_n, tfc)
tfc = 0x00
end
end
if seq_n == 1 && isempty(C.units) && !isempty(units_in)
C.units = units_in
units_in = ""
end
read_state = 0x00
end
end
close(io)
close_resp_channel!(S, C, ts, te, file, nmax)
if nf != lastindex(files)
C = SeisChannel(resp = MultiStageResp(12))
R = C.resp
chip = false
seq_n = zero(Int64)
seq_n_old = zero(Int64)
tfc = 0x00
units_in = ""
nmax = zero(Int64)
end
track_hdr!(S, hhash, "resp", file, opts)
end
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6591 | const mseed_support_data = "# Mini-SEED Support
SeisIO supports mini-SEED, the \"data-only\" extension of the SEED (Standard for
the Exchange of Earthquake Data) file format.
## Supported Blockette Types
| Blockette | Key in `:misc` |
|:----------------------------------------------|:----------------|
| [100] Sample Rate Blockette | |
| [201] Murdock Event Detection Blockette | seed_event ⁽¹⁾ |
| [300] Step Calibration Blockette | seed_calib ⁽¹⁾ |
| [310] Sine Calibration Blockette | seed_calib ⁽¹⁾ |
| [320] Pseudo-random Calibration Blockette | seed_calib ⁽¹⁾ |
| [390] Generic Calibration Blockette | seed_calib ⁽¹⁾ |
| [500] Timing Blockette | seed_timing |
| [1000] Data Only SEED Blockette | |
| [1001] Data Extension Blockette | |
| [2000] Variable Length Opaque Data Blockette | seed_opaque ⁽²⁾ |
Notes on the Table
1. Stored in `:misc` in String arrays; each blockette gets a single String in the named key, separated by a newline character (\\n).
2. Stored in `:misc[\"seed_opaque\"]`, which contains raw (UInt8) byte
vectors of all data in each packet.
## Supported Data Encodings
| Format | Data Encoding |
|---------|:------------------------------------------------------------|
| 0 | ASCII [Saved to `:misc[\"seed_ascii\"]`, not `:x`] |
| 1 | Int16 unencoded |
| 3 | Int32 unencoded |
| 4 | Float32 unencoded |
| 5 | Float64 unencoded [Converted to Float32] |
| 10 | Steim-1 |
| 11 | Steim-2 |
| 12 | GEOSCOPE multiplexed, 16-bit gain ranged, 3-bit exponent |
| 15 | GEOSCOPE multiplexed, 16-bit gain ranged, 4-bit exponent |
| 16 | CDSN, 16-bit gain ranged |
| 30 | SRO |
| 32 | DWWSSN gain ranged |
### Unsupported Data Encodings
These have never been encountered by SeisIO. If support is needed, please send example files.
| Format | Data Encoding |
|---------|:------------------------------------------------------------|
| 2 | Int24 unencoded |
| 12 | GEOSCOPE multiplexed, 24-bit integer |
| 15 | US National Network |
| 17 | Graefenberg, 16-bit gain ranged |
| 18 | IPG - Strasbourg, 16-bit gain ranged |
| 19 | Steim-3 |
| 31 | HGLP |
| 33 | RSTN 16-bit gain ranged |
"
const dataless_support_data = "# Dataless SEED Support
SeisIO supports reading of dataless SEED meta-data files, a popular extension of the SEED (Standard for the Exchange of Earthquake Data) file format.
## Supported Blockette Types
| Blockette |
|:------------------------------------------------------|
| [10] Volume Identifier Blockette |
| [11] Volume Station Header Index Blockette |
| [12] Volume Time Span Index Blockette |
| [31] Comment Description Blockette |
| [33] Generic Abbreviation Blockette |
| [34] Units Abbreviations Blockette |
| [41] FIR Dictionary Blockette |
| [43] Response (Poles & Zeros) Dictionary Blockette |
| [44] Response (Coefficients) Dictionary Blockette |
| [45] Response List Dictionary Blockette |
| [47] Decimation Dictionary Blockette |
| [48] Channel Sensitivity/Gain Dictionary Blockette |
| [50] Station Identifier Blockette |
| [52] Channel Identifier Blockette |
| [53] Response (Poles & Zeros) Blockette |
| [54] Response (Coefficients) Blockette |
| [57] Decimation Blockette |
| [58] Channel Sensitivity/Gain Blockette |
| [59] Channel Comment Blockette |
| [60] Response Reference Blockette |
| [61] FIR Response Blockette |
## Unsupportable Blockette Types
These blockettes will probably never be supported as
their information lies outside the scope of SeisIO. At
high verbosity (v > 2), their information is dumped to
stdout.
| Blockette |
|:------------------------------------------------------|
| [30] Data Format Dictionary Blockette |
| [32] Cited Source Dictionary Blockette |
| [51] Station Comment Blockette |
"
const broken_resp_data = "The following is a list of breaking SEED RESP issues that we've encountered in real data. Files with these issues don't read correctly into any known program (e.g., ObsPy, SeisIO, SEED C libraries).
| Network | Station(s) | Problem(s)
| :---- | :---- | :----
| CN | (broadbands) | B058F05-06 contain units; should be B053F05-06
"
"mseed_support() shall info. dump mini-SEED blockette support to stdout."
mseed_support
function mseed_support()
show(stdout, MIME("text/plain"), Markdown.parse(mseed_support_data))
return nothing
end
"dataless_support() shall info. dump dataless SEED blockette support to stdout."
function dataless_support()
show(stdout, MIME("text/plain"), Markdown.parse(dataless_support_data))
return nothing
end
"seed_support() shall info. dump ALL SEED support info to stdout."
function seed_support()
show(stdout, MIME("text/plain"), Markdown.parse(mseed_support_data))
show(stdout, MIME("text/plain"), Markdown.parse(dataless_support_data))
show(stdout, MIME("text/plain"), Markdown.parse(broken_resp_data))
return nothing
end
"resp_wont_read() shall info. dump to stdout about broken resp files."
function resp_wont_read()
show(stdout, MIME("text/plain"), Markdown.parse(broken_resp_data))
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 11101 | function find_idvec(id::Array{UInt8, 1}, ids::Array{Array{UInt8, 1}, 1})
for j in 1:length(ids)
if ids[j] == id
return j
end
end
return -1
end
function scanrec!(sid::IO,
ints::Array{Int64, 1},
ff::Array{Array{Float64, 1}, 1},
segs::Array{Array{Int64, 1}, 1},
ids::Array{Array{UInt8, 1}, 1},
fs_times::Bool,
seg_times::Bool,
v::Integer)
# ===================================================================
u16 = getfield(BUF, :uint16_buf)
flags = getfield(BUF, :flags)
# Fixed section of data header (48 bytes)
pos = fastpos(sid)
rebuffer!(sid)
seekstart(BUF.dh_buf)
read!(BUF.dh_buf, BUF.seq)
read!(BUF.dh_buf, BUF.hdr)
u16[1] = read(BUF.dh_buf, UInt16)
u16[2] = read(BUF.dh_buf, UInt16)
hh = read(BUF.dh_buf, UInt8)
mm = read(BUF.dh_buf, UInt8)
ss = read(BUF.dh_buf, UInt8)
skip(BUF.dh_buf, 1)
u16[3] = read(BUF.dh_buf, UInt16)
BUF.n = read(BUF.dh_buf, UInt16)
BUF.r1 = read(BUF.dh_buf, Int16)
BUF.r2 = read(BUF.dh_buf, Int16)
read!(BUF.dh_buf, flags)
BUF.tc = read(BUF.dh_buf, Int32)
u16[4] = read(BUF.dh_buf, UInt16)
u16[5] = read(BUF.dh_buf, UInt16)
if getfield(BUF, :swap) == true
hdrswap!(BUF)
end
# ==================================================================
# Post-read header processing
# Check for correct byte order
yy = u16[1]
jj = u16[2]
if (jj > 0x0200 || ((jj == 0x0000 || jj == 0x0100) &&
(yy > 0x0907 || yy < 0x707)) || yy>0x0bb8)
setfield!(BUF, :swap, !BUF.swap)
if ((BUF.swap == true) && (BUF.wo == 0x01))
BUF.xs = true
end
hdrswap!(BUF)
end
# Number of data points
n = getfield(BUF, :n)
# Output
if v > 2
println(stdout, String(copy(BUF.seq)), " ", String(copy(BUF.hdr)), ", fs = ", 1.0/BUF.dt, ", n = ", n)
end
# ==================================================================
# Channel handling
# Index to channel ID in ids
j = find_idvec(BUF.hdr, ids)
if j == -1
append!(ints, zeros(Int64, 4))
push!(ids, copy(BUF.hdr))
j = length(ids)
if fs_times
push!(ff, Float64[])
end
if seg_times
push!(segs, zeros(Int64, 2))
end
ints[4j-1] = one(Int64)
if v > 0
printstyled("New channel; SEED header = ", String(copy(BUF.hdr)), "\n", color=:red, bold=true)
end
elseif v == 3
println("j = ", j)
end
# Check for fs changes
if (BUF.r1 != BUF.r1_old) || (BUF.r2 != BUF.r2_old)
update_dt!(BUF)
if fs_times
push!(ff[j], 1.0/BUF.dt)
push!(ff[j], zero(Float64))
push!(ff[j], zero(Float64))
end
if ints[4j] != 0
ints[4j-1] += 1
end
end
# Get fs
fs = 1.0/getfield(BUF, :dt)
# ===================================================================
# Parse blockettes
# u16[5] = position within record
# u16[6] = number of bytes to skip to next blockette
nsk = u16[4] - 0x0030
u16[6] = u16[5] - 0x0030
nblk = flags[4]
v > 1 && println(string("Number of Blockettes = ", nblk))
v > 2 && println(stdout, "Relative position in record = ", u16[5], " B from begin, ", u16[6], " B to next blockette, ", nsk, " B to data")
@inbounds for i in 0x01:nblk
fastskip(sid, u16[6])
bt = fastread(sid, UInt16)
u16[6] = fastread(sid, UInt16)
if getfield(BUF, :swap) == true
bt = bswap(bt)
u16[6] = bswap(u16[6])
end
# Special handling for certain time, fs corrections
if bt == 0x0064 # [100] Sample Rate Blockette
BUF.dt = 1.0 / Float64(BUF.swap ? ntoh(fastread(sid, Float32)) : fastread(sid, Float32))
setfield!(BUF, :Δ, round(Int64, sμ*BUF.dt))
# must still be parsed (nearly) in full ... could skip fmt
elseif bt == 0x03e8 # [1000] Data Only SEED Blockette
BUF.fmt = fastread(sid)
BUF.wo = fastread(sid)
lx = fastread(sid)
fastskip(sid, 1)
BUF.nx = 2^lx
BUF.xs = ((BUF.swap == true) && (BUF.wo == 0x01))
elseif bt == 0x03e9 # [1001] Data Extension Blockette
fastskip(sid, 1)
BUF.tc += signed(fastread(sid))
end
# Get current position relative to record begin
u16[5] = UInt16(fastpos(sid) - pos)
# Update bytes to next blockette or bytes to data
if i < nblk
u16[6] = u16[6] - u16[5]
else
nsk = u16[4] - u16[5]
end
if v > 1
printstyled(string("Position = ", fastpos(sid), ", Blockette [", bt, "]\n"), color=:light_yellow)
println(stdout, "Relative position in record = ", u16[5], " B from begin, ", u16[6], " B to next blockette, ", nsk, " B to data")
end
end
# Skip to data section
if nsk > 0x0000
fastskip(sid, nsk)
if (v > 2)
println("Skipped ", nsk, " B")
printstyled(string("Position = ", fastpos(sid), "\n"), color=:light_green)
end
end
# ===================================================================
# Skip data
nb = getfield(BUF, :nx) - u16[4]
v > 1 && println("Skipping ", nb, " B")
fastskip(sid, nb)
# Check for time correction
is_tc = flags[2] >> 1 & 0x01
tc = getfield(BUF, :tc)
if is_tc == false && tc != zero(Int32)
δt = Int64(tc)*100
else
δt = zero(Int64)
end
# Sample rate in μs
Δ = getfield(BUF, :Δ)
# Start time of this record
τ = seed_time(u16, hh, mm, ss, δt)
# if seg_times, check to update start_time
if seg_times
if segs[j][end-1] == 0
segs[j][end-1] = τ
end
end
# check for gap
if ints[4j] != 0
gap = τ - ints[4j] - Δ
# gap found
if abs(gap) > div(Δ, 2)
ints[4j-2] += 1
if seg_times
append!(segs[j], zeros(Int64, 2))
segs[j][end-1] = τ
end
end
elseif seg_times
segs[j][end-1] = τ
end
# Set end time of current segment
ints[4j] = τ + (n-1)*Δ
# if seg_times tracked, always update segment end time
if seg_times
segs[j][end] = ints[4j]
end
v > 2 && printstyled(string("Position = ", fastpos(sid), "\n"), color=:light_green)
# Append number of samples to ints[4j-3]
ints[4j-3] = ints[4j-3] + getfield(BUF, :n)
# Logging if fs_times == true
if fs_times
if ff[j][end-1] == 0.0
ff[j][end-1] = τ*1.0e-6
end
ff[j][end] = ints[4j]*1.0e-6
end
if v > 2
println("ints[", 4j-3, "]:ints[", 4j, "] = ", ints[4j-3:4j])
end
# Done
return nothing
end
# These are nearly identical to parse_mseed, parse_mseed_file
function scan_seed!(io::IO,
ints::Array{Int64, 1},
ff::Array{Array{Float64, 1}, 1},
segs::Array{Array{Int64, 1}, 1},
ids::Array{Array{UInt8, 1}, 1},
fs_times::Bool,
seg_times::Bool,
v::Integer)
while !eof(io)
scanrec!(io, ints, ff, segs, ids, fs_times, seg_times, v)
end
fill!(getfield(BUF, :hdr_old), zero(UInt8))
setfield!(BUF, :r1_old, zero(Int16))
setfield!(BUF, :r2_old, zero(Int16))
return nothing
end
"""
soh = scan_seed(fname::String[, KWs])
Scan seed file `fname` and report properties in human-readable string array `soh`.
### General Keywords
* quiet (Bool): `true` to only return compact summary strings (no stdout)
* memmap (Bool): `true` to use memory mapping
* v (Integer): `v > 0` increases scan verbosity
### Output Keywords
These are all Booleans; `false` excludes from scan.
* `npts`: Number of samples per channel (default: `true`)
* `ngaps`: Number of time gaps per channel (default: `true`)
* `nfs`: Number of unique fs values per channel (default: `true`)
* `seg_times`: Exact gap times (default: `false`)
* `fs_times`: Exact times of fs changes (default: `false`)
!!! caution
Rarely, the number of gaps reported is off-by-one from `read_data`.
"""
function scan_seed(fname::String;
fs_times::Bool = false,
seg_times::Bool = false,
memmap::Bool = false,
nfs::Bool = true,
ngaps::Bool = true,
npts::Bool = true,
quiet::Bool = false,
v::Integer = 0
)
#=
TO DO:
* segment time tracking
=#
# [npts ngaps nfs end_time]
ints = Int64[]
# [[fs_1, te_1, fs_2, te_2, ...]_1, ...]
ff = Array{Array{Float64, 1}, 1}(undef, 0)
ids = Array{Array{UInt8, 1}, 1}(undef, 0)
segs = Array{Array{Int64, 1}, 1}(undef, 0)
io = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
fastskip(io, 6)
c = fastread(io)
if c in (0x44, 0x52, 0x4d, 0x51)
seekstart(io)
scan_seed!(io, ints, ff, segs, ids, fs_times, seg_times, v)
close(io)
else
close(io)
error("Invalid file type!")
end
# ================================================================
# Screen output
nc = length(ids)
# -----------------------------------------------------------------
# Compact output
soh = Array{String, 1}(undef, nc)
for j in 1:nc
copyto!(BUF.hdr, ids[j])
update_hdr!(BUF)
soh[j] = string(BUF.id_str, ", ",
"nx = ", ints[4j-3], ", ",
"ngaps = ", ints[4j-2], ", ",
"nfs = ", ints[4j-1])
end
quiet && (return soh)
# -----------------------------------------------------------------
# Basic counters
println("\n", lpad("CHANNEL", 15), " ",
npts ? string("| ", lpad("N PTS", 11), " ") : "",
ngaps ? "| N GAPS " : "",
nfs ? "| N FS " : "",
"\n", "-"^16,
npts ? string("+", "-"^13) : "",
ngaps ? string("+", "-"^8) : "",
nfs ? string("+", "-"^5) : "",
)
for j in 1:nc
copyto!(BUF.hdr, ids[j])
update_hdr!(BUF)
println(lpad(BUF.id_str, 15), " ",
npts ? string("| ", lpad(ints[4j-3], 11), " ") : "",
ngaps ? string("| ", lpad(ints[4j-2], 6), " ") : "",
nfs ? string("| ", lpad(ints[4j-1], 4), " ") : "",
)
end
println("")
# -----------------------------------------------------------------
# Detailed tracking
if fs_times
# Detailed tracking strings
for j in 1:nc
copyto!(BUF.hdr, ids[j])
update_hdr!(BUF)
println(BUF.id_str * " fs tracking:\n\n" *
lpad("TIME WINDOW", 50) * " | " * "FS\n" * "-"^51 * "+" * "---")
nf = div(length(ff[j]), 3)
for i in 1:nf
println(stdout, rpad(u2d(ff[j][3i-1]), 23),
" -- ",
rpad(u2d(ff[j][3i]), 23),
" | ",
repr(ff[j][3i-2], context=:compact=>true)
)
end
println("")
end
end
if seg_times
# Detailed tracking strings
for j in 1:nc
copyto!(BUF.hdr, ids[j])
update_hdr!(BUF)
println(BUF.id_str * " segment times:\n\n")
ns = div(length(segs[j]), 2)
for i in 1:ns
println(stdout, rpad(u2d(segs[j][2i-1]*1.0e-6), 23),
" -- ",
rpad(u2d(segs[j][2i]*1.0e-6), 23)
)
end
println("")
end
end
return soh
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6398 | abstract type SUDSStruct end
# STATIONCOMP: Generic station component information
mutable struct StationComp <: SUDSStruct
az ::Int16 # component azimuth, N°E
inc ::Int16 # component angle of incidence from vertical
lat ::Float64 # latitude, N = +
lon ::Float64 # longitude, E = +
ele ::Float32 # elevation in meters
codes ::Array{UInt8,1}
# 1 d=dam, n=nuclear plant, v=vault, b=buried, s=surface, etc.
# 2 annotated comment code
# 3 type device data recorded on
# 4 rock type: i=igneous, m=metamorphic, s=sedimentary
# 5:6 code for type of rock
# 7 p=permafrost, etc.
# 8 sensor type: d=displacement, v=velocity, a=acceleration, t=time code
# 9 data type: see suds_decode.jl
# 10 data units: d=digital counts, v=mV, n=nanometers (nm/s or nm/s2
# 11 polarity: n=normal, r=reversed
# 12 status: d=dead, g=good
gain ::Array{Float32,1} # 1 maximum gain of the amplifier
# 2 abs val at which clipping begins
# 3 conversion to millivolts: mV/count
a2d ::Array{Int16,1} # 1 a2d channel number
# 2 gain of analog-to-digital converter
t_corr ::Array{Float32,1} # 1 clock correction in seconds
# 2 seismological station delay
StationComp() = new(zero(Int16),
zero(Int16),
zero(Float64),
zero(Float64),
zero(Float32),
zeros(UInt8, 12),
zeros(Float32, 3),
zeros(Int16, 2),
zeros(Float32, 2)
)
end
#= DESCRIPTRACE: Descriptive information about a seismic trace.
Normally followed by waveform data =#
mutable struct TraceHdr <: SUDSStruct
net ::UInt16
n_ch ::Int16
desc ::UInt8
ns ::Int32
TraceHdr() = new( 0x0000,
zero(Int16),
0x00,
zero(Int32)
)
end
# FEATURE: Observed phase arrival time, amplitude, and period
mutable struct SudsPhase <: SUDSStruct
pc ::UInt16
onset ::UInt8
fm ::UInt8
snr ::Int16
gr ::Int16
amp ::Float32
SudsPhase() = new(0x0000,
0x00,
0x00,
zero(Int16),
zero(Int16),
0.0f0)
end
# ORIGIN: Information about a specific solution for a given event
mutable struct SudsEvtHdr <: SUDSStruct
evno ::Int32
auth ::Int16
chars ::Array{UInt8,1}
reg ::Int32
ot ::Float64
lat ::Float64
lon ::Float64
floats ::Array{Float32,1}
model ::Array{UInt8, 1}
gap ::Int16
d_min ::Float32
shorts ::Array{Int16,1}
mag ::Array{Float32,1}
SudsEvtHdr() = new( zero(Int32),
zero(Int16),
zeros(UInt8, 6),
zero(Int32),
zero(Float64),
zero(Float64),
zero(Float64),
zeros(Float32, 4),
zeros(UInt8, 6),
zero(Int16),
zero(Float32),
zeros(Int16, 8),
zeros(Float32, 3)
)
end
# CHANSET
mutable struct ChanSet <: SUDSStruct
typ ::Int16 # CHANSET
n ::Int16
sta ::Array{UInt8,1}
tu ::Int32
td ::Int32
inst ::Int32 # CHANSETENTRY
stream ::Int16
chno ::Int16
ChanSet() = new(zero(Int16),
zero(Int16),
zeros(UInt8, 9),
zero(Int32),
zero(Int32),
zero(Int32),
zero(Int16),
zero(Int16))
end
mutable struct SUDSBuf
# struct_tag
sid::Int16
nbs::Int32
nbx::Int32
nx::Int32
nz::Int32
# staident
hdr ::Array{UInt8,1}
id ::Array{UInt8,1}
id_str ::String
# common variables in many structures
data_type ::UInt8 # data type code
sync_code ::UInt8 # sync code
irig ::Bool # whether the time correction is to "IRIG"
fs ::Float32 # fs
rc ::Float32 # rc
t_f64 ::Float64 # Begin time, phase time
t_f32 ::Float32 # Pick time (τ)
t_i32 ::Int32 # Effective time, time picked (!= pick time)
t_i16 ::Int16 # Local time correction in minutes
tc ::Float64 # Time correction
# Placeholders
nx_new ::Int64
nx_add ::Int64
# too trivial for its own sub-buffer
comm_i ::Array{Int16,1} # Numeric indices associated with comments
comm_s ::String
# sub-buffers
S05::StationComp
T::TraceHdr
P::SudsPhase
H::SudsEvtHdr
C::ChanSet
function SUDSBuf()
new(
zero(Int16), # sid
zero(Int32), # nbs
zero(Int32), # nbx
zero(Int32), # nx
zero(Int32), # nz
zeros(UInt8, 12), # hdr
zeros(UInt8, 13), # id
"", # id_str
0x00, # data_type
0x00, # sync_code
false, # irig
0.0f0, # fs
0.0f0, # rc
zero(Float64), # t_f64
zero(Float32), # t_f32
zero(Int32), # t_i32
zero(Int16), # t_i16
zero(Float64), # tc
KW.nx_new, # nx_new
KW.nx_add, # nx_add
zeros(Int16, 2), # comm_i
"", # comm_s
# structural buffers Code
StationComp(), # 5
TraceHdr(), # 6-7
SudsPhase(), # 10
SudsEvtHdr(), # 14
ChanSet(), # 32--33
)
end
end
const SB = SUDSBuf()
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1211 | SUDS_fmt = FormatDesc(
"SUDS (Seismic Universal Data System)",
"\"suds\"",
"U.S. Geological Survey and University of Alaska, Fairbanks, Alaska, USA",
"http://www.iris.edu/pub/programs/sel/sun/SUDS.2.6_tar.Z",
"Banfill Software Engineering <[email protected]>",
HistVec(),
["uses SEED-like data structures, some of which are followed by data",
"each packet has a structure identifier, a structure, and possibly data",
"documentation contains many inconsistences and errors",
"WIN-SUDS won't run on 64-bit systems, even in legacy mode",
"PC-SUDS software requires a DOS emulator",
"developed by Peter Ward, USGS, Menlo Park, CA, USA"
],
["US Geological Survey (USGS)",
"USGS Volcano Disaster Assistance Program (VDAP)",
"Alaska Volcano Observatory (AVO)",
"Observatorio Vulcanológico y Sismológico de Costa Rica (OVISCORI)",
"volcano monitoring"],
["https://pubs.usgs.gov/of/1989/0188/report.pdf",
"https://banfill.net/suds/PC-SUDS.pdf",
"https://banfill.net/suds/Win-SUDS.pdf",
"docs/Formats/suds_man.pdf"],
0xfd
)
SUDS_fmt.ver = [ FmtVer(2.6, "1994-05-11", false) ,
FmtVer(1.41, "1989-03-29", nothing)
]
formats["suds"] = SUDS_fmt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 155 | import SeisIO: BUF,
KW,
check_for_gap!,
checkbuf!,
checkbuf_8!,
sμ,
μs
import SeisIO.Formats: formats, FmtVer, FormatDesc, HistVec
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7628 | function read_suds(fname::String;
full::Bool=false,
memmap::Bool=false,
v::Integer=KW.v,
)
sid = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
S = SeisData()
# Tracking channels and indices
xc = Array{UnitRange{Int64}, 1}(undef, 32) # Array of arrays of channel indices
xn = Array{Int64,1}(undef, 32) # Array of lengths
xt = Array{Int64,1}(undef, 32) # Array of times
xz = Array{Int64,1}(undef, 32) # Array of segment sizes
xnet = Array{UInt16,1}(undef, 32) # Array of IDs mapped to UInt16
xi = 1 # index into BUF.x
xj = 0 # index into xc, xn
nx = 0
fs = 0.0
fs_last = 0.0
# File read ================================================================
# Parse all structures
cnt = 0
while !eof(sid)
cnt += 1
read_struct_tag!(sid, v)
# (struct_id, struct_len, nb) = read_struct_tag(sid, v)
# Attempt to skip unsupported structures
if SB.sid in unsupported
nsk = SB.nbs + SB.nbx
fastskip(sid, nsk)
(v > 1) && @warn(string("SUDS struct #", SB.sid, " unsupported; skipped ", nsk, " B"))
continue
end
v > 0 && println("reading structure code ", SB.sid)
# 6: MUXDATA ------------------------------------------------------------
if SB.sid == Int16(6)
# (net, data_type, ts, loctime, Nc, fs, data_type, nz) = read_6!(S, sid, v, full)
read_6!(S, sid, v, full)
t0 = 1000000*round(Int64, SB.t_f64)
(v > 2) && println("nz = ", SB.nz)
if fs_last == 0.0
for i in 1:SB.T.n_ch
if S.fs[i] == 0.0
S.fs[i] = SB.fs
end
S.src[i] = fname
end
fs_last = SB.fs
elseif fs_last != SB.fs
@warn(string("fs changes in structure ", cnt))
end
else
# 5: STATIONCOMP ------------------------------------------------------
if SB.sid == Int16(5)
i = read_5!(S, sid, v, full)
t0 = 1000000*Int64(SB.t_i32)
# 7: DESCRIPTRACE -----------------------------------------------------
elseif SB.sid == Int16(7)
read_7!(S, sid, v, full)
t0 = 1000000*round(Int64, SB.t_f64)
j = findid(SB.id_str, S)
if j > 0
(S.fs[j] == 0.0) && (S.fs[j] = Float64(SB.fs))
isempty(S.src[j]) && (S.src[j] = fname)
end
# 20: COMMENT ---------------------------------------------------------
elseif SB.sid == Int16(20)
read_20!(S, sid, v, full)
t0 = 0
continue
# 30: TIMECORRECTION ---------------------------------------------------
elseif SB.sid == Int16(30)
read_30!(S, sid, v, full)
SB.data_type = 0x00
t0 = 0
# 32: CHANSET ----------------------------------------------------------
elseif SB.sid == Int16(32)
read_32!(S, sid, v, full)
SB.data_type = 0x00
t0 = 0
continue
# 25-29 are skipped unless logged -------------------------------------
elseif SB.sid in 25:29 || SB.sid == Int16(31)
if v > 1
getfield(SUDS, Symbol(string("read_", SB.sid, "!")))(S, sid, v, full)
SB.data_type = 0x00
t0 = 0
else
nsk = SB.nbs + SB.nbx
fastskip(sid, nsk)
SB.data_type = 0x00
t0 = 0
continue
end
# ANYTHING ELSE -------------------------------------------------------
else
getfield(SUDS, Symbol(string("read_", SB.sid, "!")))(S, sid, v, full)
SB.data_type = 0x00
t0 = 0
end
SB.nz = zero(Int32)
end
# Parse data
if SB.nbx > 0
# read and reinterpret data
(v > 2) && println(stdout, "reading ", SB.nbx, " B")
checkbuf_8!(BUF.buf, SB.nbx)
fast_readbytes!(sid, BUF.buf, SB.nbx)
(y, sz) = suds_decode(BUF.buf, SB.data_type)
nx = div(SB.nbx, sz)
# Increment xj
xj += 1
checkbuf!(BUF.x, xi+nx)
if xj > length(xn)
L = length(xc)
resize!(xc, L+32)
resize!(xn, L+32)
resize!(xt, L+32)
resize!(xz, L+32)
resize!(xnet, L+32)
end
copyto!(BUF.x, xi, SB.data_type == 0x63 ? real.(y) : y, 1, nx)
# Store channel indices to xc
if SB.sid == Int16(6)
xc[xj] = 1:SB.T.n_ch
xnet[xj] = SB.T.net
elseif SB.sid == Int16(7)
j = findid(SB.id_str, S)
xc[xj] = j:j
xnet[xj] = 0x0000
end
# Store start indices to xn and per-segment sizes to xz. increment xi
xn[xj] = xi
xt[xj] = t0
xz[xj] = SB.nz == zero(Int32) ? nx : SB.nz
xi += nx
end
# if structure is a time correction, flush the suds buffers and write to S
if SB.sid == Int16(30)
# First identify which start times get corrected
net = 0x0000
tc = round(Int64, SB.tc*sμ)
if SB.irig
net = reinterpret(UInt16, SB.id[1:2])[1]
for j = 1:xj
if xnet[j] == net
xt[j] += tc
end
end
end
flush_suds!(S, xc, xn, xt, xz, xj, v)
# Adjust fs after flushing buffers, else fictitious time gaps appear
if SB.irig
net_str = String(copy(SB.id[1:2]))
if SB.rc != 0.0f0
for i = 1:S.n
if (S.id[i][1:2] == net_str) && (S.fs[i] != SB.fs + SB.rc)
(v > 2) && println("Adjusting S.fs[", i, "]")
S.fs[i] += SB.rc
end
end
end
end
xi = 1
xj = 0
SB.irig = false
end
end
flush_suds!(S, xc, xn, xt, xz, xj, v)
resize!(BUF.buf, 65535)
resize!(BUF.x, 65535)
return S
end
read_suds!(
S::GphysData,
fname::String;
full::Bool=false,
memmap::Bool=false,
v::Integer=KW.v,
) = (U = read_suds(fname, full=full, memmap=memmap, v=v); append!(S,U))
function readsudsevt(fname::String;
full::Bool=false,
memmap::Bool=false,
v::Integer=KW.v,
)
TD = read_suds(fname, full=full, memmap=memmap, v=v)
src_auth = get(auth, Int32(SB.H.auth), "")
m = SUDS.SB.H.shorts[6]
mag_scale = try
mag_scale[m]
catch
replace(join(Char.(reinterpret(UInt8, [m]))), "\0" => "")
end
# Generate empty event structs
Mag = EQMag(val = SB.H.mag[1],
nst = Int64(SB.H.shorts[7]),
scale = mag_scale,
src = src_auth
)
# Create loc
Loc = EQLoc(lat = SB.H.lat,
lon = SB.H.lon,
dep = Float64(SB.H.floats[1]),
dx = Float64(SB.H.floats[2]),
dy = Float64(SB.H.floats[2]),
dz = Float64(SB.H.floats[3]),
rms = Float64(SB.H.floats[4]),
gap = Float64(SB.H.gap),
dmin = Float64(SB.H.d_min),
nst = Int64(SB.H.shorts[1]),
flags = SB.H.chars[6] in (0x47, 0x4e, 0x53, 0x65, 0x66) ? 0x20 : 0x00,
src = get(loc_prog, Char(SB.H.chars[6]), "Unknown location program")
)
# Create header
filename = abspath(fname)
H = SeisHdr(id = string(SB.H.evno),
loc = Loc,
mag = Mag,
ot = u2d(SB.H.ot),
src = filename
)
H.misc["auth"] = src_auth
H.misc["reg"] = SB.H.reg
H.misc["model"] = String(copy(SB.H.model))
note!(H, "+source ¦ " * filename)
# Create source
R = SeisSrc(src = filename)
note!(R, "+source ¦ " * filename)
# Create event container
Ev = SeisEvent(hdr = H, data = TD, source = R)
return Ev
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1528 | function flush_suds!(S::GphysData,
xc::Array{UnitRange{Int64},1},
xn::Array{Int64,1},
xt::Array{Int64,1},
xz::Array{Int64,1},
xj::Int64,
v::Integer)
(xj == 0) && return nothing
# Post-read ================================================================
resize!(xc, xj)
resize!(xn, xj)
resize!(xt, xj)
resize!(xz, xj)
if v > 1
println("Done processing: ")
println("Processed xj = ", xj, " segments")
println("Segment ranges = ", xc)
println("Segment starts = ", xn)
println("Segment start times = ", xt)
println("Channel segment lengths = ", xz)
end
# Determine length of each array in S.x
Lx = zeros(Int64, S.n)
for j = 1:xj
nz = xz[j]
for i in xc[j]
Lx[i] += nz
end
end
# Data assignment ==========================================================
# Initialize arrays in S.x and counters sxi
sxi = ones(Int64, S.n)
for i = 1:S.n
if isempty(S.x[i])
S.x[i] = Array{Float32,1}(undef, Lx[i])
else
lxi = length(S.x[i])
sxi[i] = lxi + 1
resize!(S.x[i], lxi + Lx[i])
end
end
# Loop again over xn; this time, copy each segment over
for j = 1:xj
xs = xn[j]
si = xs
for i in xc[j]
t0 = xt[j]
nz = xz[j]
copyto!(S.x[i], sxi[i], BUF.x, si, nz)
si += nz
sxi[i] += nz
# determine start time of channel
if isempty(S.t[i])
S.t[i] = Int64[1 t0; nz 0]
else
check_for_gap!(S, i, t0, nz, v)
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 14374 | # Code lists begin on page 322 of the SUDS manual
const unsupported = Int16.((3, 4, 8, 9, 11, 12, 13, 15, 16, 17, 18, 19, 21, 22, 23, 24))
const disp_only = Int16.((20, 25, 26, 27, 28, 29, 31))
const suds_codes = Dict{Int64, String}(
1 => "STATIONID",
2 => "STRUCTTAG",
3 => "PC_TERMINATOR",
4 => "EQUIPMENT",
5 => "STATIONCOMP",
6 => "MUXDATA",
7 => "DESCRIPTRACE",
8 => "LOCTRACE",
9 => "PC_CALIBRATION",
10 => "FEATURE",
11 => "RESIDUAL",
12 => "PC_EVENT",
13 => "EVDESCR",
14 => "ORIGIN",
15 => "ERROR",
16 => "FOCALMECH",
17 => "MOMENT",
18 => "VELMODEL",
19 => "LAYERS",
20 => "PC_COMMENT",
21 => "PROFILE",
22 => "SHOTGATHER",
23 => "CALIB",
24 => "COMPLEX",
25 => "TRIGGERS",
26 => "TRIGSETTING",
27 => "EVENTSETTING",
28 => "DETECTOR",
29 => "ATODINFO",
30 => "TIMECORRECTION",
32 => "CHANSET",
31 => "INSTRUMENT",
33 => "CHANSETENTRY"
)
const sensor_types = Dict{UInt8, String}(
0x42 => "K" , # 'B' = "bolometer"
0x43 => "{unknown}" , # 'C' = "local clock"
0x48 => "%" , # 'H' = "humidity"
0x50 => "Pa" , # 'P' = "pressure sensor"
0x52 => "{unknown}" , # 'R' = "rainfall"
0x53 => "m/m" , # 'S' = "linear strain meter"
0x54 => "K" , # 'T' = "temperature sensor"
0x56 => "m3/m3" , # 'V' = "volumetric strain meter"
0x57 => "{unknown}" , # 'W' = "wind"
0x61 => "m/s2" , # 'a' = "accelerometer"
0x63 => "m" , # 'c' = "creep meter"
0x64 => "m" , # 'd' = "displacement sensor"
0x67 => "cm/s2" , # 'g' = "gravimeter"
0x69 => "degrees" , # 'i' = "tilt meter/inclinometer"
0x6d => "T" , # 'm' = "magnetic field"
0x72 => "{radon}" , # 'r' = "radon sensor"
0x73 => "{unknown}" , # 's' = "satellite time code"
0x74 => "m" , # 't' = "tidal meter"
0x76 => "m/s" , # 'v' = "velocity seismometer"
0x77 => "N.m2" , # 'w' = "torsion"
0x78 => "{unknown}" , # 'x' = "experimental"
)
const data_types = Dict{Int64, String}(
-33 => "PAD4", -32 => "PAD2", -31 => "PAD1 ", -30 => "INT3", -29 => "IDXPTR ",
-28 => "YESNO", -27 => "UCHAR ", -26 => "UINT2", -25 => "UINT4 ",
-24 => "MEMPTR", -23 => "CHRPTR", -22 => "GENPTR", -21 => "CODESTR ",
-20 => "INT2TM", -19 => "LIST", -18 => "LATIT", -17 => "LONGIT ",
-16 => "MS i TIME", -15 => "ST i TIME", -14 => "FLOAT8", -13 => "FLOAT4 ",
-12 => "AUTHOR", -11 => "DOMAIN", -10 => "REFERS2", -9 => "LABEL",
-8 => "CODE4", -7 => "CODE2 ", -6 => "CODE1", -5 => "FIXED", -4 => "INT4",
-3 => "INT2 ", -2 => "STRING", -1 => "CHAR", 2 => "structtag",
3 => "pc i terminator", 4 => "equipment", 5 => "stationcomp", 6 => "muxdata",
7 => "descriptrace", 8 => "loctrace", 9 => "pc i calibration",
10 => "feature", 11 => "residual", 12 => "pc i event", 13 => "evdescr",
14 => "origin", 15 => "error", 16 => "focalmech", 17 => "moment",
18 => "velmodel", 19 => "layers", 20 => "pc i comment", 23 => "calib",
25 => "triggers", 26 => "trigsetting", 27 => "eventsetting", 28 => "detector",
29 => "atodinfo", 30 => "timecorrection", 31 => "instrument", 32 => "chanset",
33 => "chansetentry", 104 => "sig i path i cmp", 105 => "signal i path",
106 => "mux i waveform", 107 => "waveform", 108 => "data i group",
109 => "response", 110 => "pick", 111 => "pick i residual", 112 => "event",
113 => "signif i event", 114 => "solution", 115 => "solution i err",
116 => "focal i mech", 118 => "vel i model", 119 => "vel i layer i data",
123 => "resp i pz i data", 125 => "lsa i detection", 126 => "lsa i setting",
130 => "clock i rate", 131 => "recorder", 200 => "variable i info",
201 => "structure i info", 202 => "member i info", 203 => "stream",
204 => "file i index", 205 => "code i list", 206 => "gui i default",
211 => "comment", 212 => "structure i tag", 213 => "terminator",
214 => "code i data", 300 => "site", 301 => "spectra",
302 => "sig i cmp i data", 303 => "resp i fap i data",
304 => "lsa i set i data", 305 => "resp i cfs i data",
306 => "sig i path i data", 307 => "magnitude", 308 => "processing",
309 => "polarity", 310 => "ssam i setup", 311 => "ssam i output",
312 => "map i element", 313 => "seismometer", 314 => "resp i fir i data",
315 => "filter", 316 => "sig i path i ass", 317 => "ssam i band i data",
318 => "beam i data", 319 => "resp i sen i data", 320 => "calibration",
321 => "source", 322 => "user i vars", 323 => "service",
324 => "recorder i ass", 325 => "seismo i ass", 326 => "coordinate i sys"
)
const pick_types = Dict{Int16, String}(
0 => "NOT GIVEN",
1 => "WINDOW",
2 => "F FINIS",
3 => "X MAXIMUM AMPLITUDE",
4 => "INCREASE GAIN STEP",
5 => "DECREASE GAIN STEP",
50 => "P",
51 => "P",
52 => "P∗",
53 => "PP",
54 => "PPP",
55 => "PPPP",
56 => "PPS",
57 => "PG",
58 => "PN",
59 => "PDIFFRACTED",
60 => "PCP",
61 => "PCPPKP",
62 => "PCS",
63 => "PP",
64 => "PPP",
65 => "PKP",
66 => "PKPPKP",
67 => "PKPPKS",
68 => "PKPSKS",
69 => "PKS",
70 => "PPKS",
71 => "PKKP",
72 => "PKKS",
73 => "PCPPKP",
74 => "PCSPKP",
100 => "S",
101 => "S",
102 => "S∗",
103 => "SS",
104 => "SSS",
105 => "SSSS",
106 => "SG",
107 => "SN",
108 => "SCS",
109 => "SPCS",
110 => "SS",
111 => "SSS",
112 => "SSSS",
113 => "SSCS",
114 => "SCSPKP",
115 => "SCP",
116 => "SKS",
117 => "SKKS",
118 => "SKKKS",
119 => "SKSSKS",
120 => "SKP",
121 => "SKKP",
122 => "SKKKP",
201 => "LG",
202 => "LR",
203 => "LR2",
204 => "LR3",
205 => "LR4",
206 => "LQ",
207 => "LQ2",
208 => "LQ3",
209 => "LQ4",
301 => "T")
# Instrument codes
# 0 = "not specified"
# 1 = "sp usgs"
# 2 = "sp wwssn"
# 3 = "lp wwssn"
# 4 = "sp dwwssn"
# 5 = "lp dwwssn"
# 6 = "hglp lamont"
# 7 = "lp hglp lamont"
# 8 = "sp sro"
# 9 = "lp sro"
# 10 = "sp asro"
# 11 = "lp asro"
# 12 = "sp rstn"
# 13 = "lp rstn"
# 1 4 = "sp uofa U of alaska"
# 15 = "STS-1/UVBB"
# 16 = "STS-1/VBB"
# 17 = "STS-2"
# 18 = "FBA-23"
# 19 = "Wilcoxin "
# 50 = "USGS cassette"
# 51 = "GEOS"
# 52 = "EDA"
# 53 = "Sprengnether refraction"
# 54 = "Teledyne refraction"
# 55 = "Kinemetrics refraction"
# 300 = "amplifier"
# 301 = "amp/vco"
# 302 = "filter"
# 303 = "summing amp"
# 304 = "transmitter"
# 305 = "receiver"
# 306 = "antenna"
# 307 = "battery"
# 308 = "solar cell"
# 309 = "discriminator"
# 310 = "discr. rack"
# 311 = "paper recorder"
# 312 = "film recorder"
# 313 = "smoked glass recorder"
# 314 = "atod converter"
# 315 = "computer"
# 316 = "clock"
# 317 = "time receiver"
# 318 = "magnetic tape"
# 319 = "magnetic disk"
# 320 = "optical disk"
# ampunits
# 'd' = "digital counts",
# 'm' = "millimeters on develocorder" ,
# 'n' = "nanometers (/sec or /sec/sec)",
# 'v' = "millivolts",
const auth = Dict{Int32, String}(
0 => "none: not given" ,
1 => "temp: Temporary, for testing purposes",
2 => "suds: Internal to SUDS",
101 => "calnet usgs menlo park, ca",
102 => "alaska net usgs menlo park, ca" ,
103 => "katmai net usgs menlo park, ca",
104 => "scalnet usgs pasadena, ca.",
120 => "shumagin net lamont palisades,ny",
10000 => "gsmen: US Geological Survey, Menlo Park, CA",
10001 => "suds: testing of suds at the USGS, Menlo Park, CA" ,
10002 =>"calnt: network porcessing group, USGS, Menlo Park, CA",
10005 => "5day: 5 day recorders US Geological Survey, Menlo Park, CA" ,
10006 => "geos: GEOS recorders US Geological Survey, Menlo Park, CA",
10007 => "cent: centipede recorders US Geological Survey, Menlo Park, CA" ,
10008 => "citgs: CIT stations maintained by USGS, Menlo Park, CA",
10009 => "lllgs: LLL stations maintained by USGS, Menlo Park, CA" ,
10010 => "dwrgs: LLL stations maintained by USGS, Menlo Park, CA",
10011 => "unrgs: UNR stations maintained by USGS, Menlo Park, CA",
10012 => "yel: Yellowstone Park, Wyoming, maintained by USGS, Menlo Park, CA",
10500 => "RTP: main rtp, USGS, Menlo Park",
10501 => "PRTP: prototype rtp, USGS, Menlo Park",
10502 => "MRTP: motorola rtp, USGS, Menlo Park" ,
10503 => "TUST1: CUSP Tustin A/D #1",
10504 => "TUST2: CUSP Tustin A/D #2" ,
10505 => "ECLIP: CUSP Eclipse digitizer",
10506 => "CVAX: CUSP-VAX/750 digitizer" ,
10507 => "HPARK: Haliburton digital, Parkfield",
10520 => "CITT1: Tustin #1, Pasadena",
10521 => "CITT2: Tustin #2, Pasadena",
10522 => "CITN3: 11/34 online, Pasadena" ,
10523 => "CITS3: 11/34 online, Pasadena",
10524 => "CITD1: Nova/Eclipse, Pasadena" ,
10525 => "CITF: VAX, Pasadena",
10526 => "CITH: hand timed in Pasadena" ,
11000 => "Daiss, Charles, USGS, Menlo Park, CA",
11001 => "Oppenheimer, Dave, USGS, Menlo Park, CA" ,
11002 => "Eaton, Jerry, USGS, Menlo Park, CA",
15000 => "gspas: US Geological Survey, Pasadena, CA" ,
15001 => "tergp: TERRAscope, US Geological Survey, Pasadena, CA",
20000 => "uofa: Geophysical Institute, University of Alaska, College, AK" ,
30000 => "uofw: Geophysics, University of Washington, WA",
40000 => "ldgo: Lamont Doherty Geological Observatory, Palisades, NY" ,
50000 => "iris: IRIS Consortium, Seattle Data Center, WA",
51000 => "gsn: Global Seismographic Network, USGS, Albuquerque, NM" ,
52000 => "asro: Abbreviated Seismic Research Observatories",
53000 => "passc: PASSCAL Program, IRIS",
60000 => "lll: Lawrence Livermore Labs, Livermore, CA",
70000 => "lbl: Lawrence Berkeley Labs, U. C. Berkeley, CA" ,
80000 => "lanl: Los Alamos National Labs, Los Alamos, NM",
90000 => "stl: St. Louis University, St. Louis, MO",
100000 => "ucsd: University of California, San Diego and SCRIPPS",
110000 => "ucb: University of California, Berkeley, CA",
120000 => "ucsb: University of California, Santa Barbara, CA",
130000 => "ucsc: University of California, Santa Cruz, CA",
140000 => "usc: University of Southern California, Los Angeles, CA",
150000 => "cit: California Institute of Technology, Pasadena, CA",
150001 => "terct: TERRAscope network, California Institute of Technology, Pasadena, CA",
160000 => "nnunr: Northern Nevada net, University of Nevada, Reno, NV",
160001 => "snunr: Southern Nevada net, University of Nevada, Reno, NV",
170000 => "utah: University of Utah, Salt Lake City, UT",
180000 => "msu: Memphis State University, Memphis, TN",
180000 => "msu: Memphis State University, Memphis, TN" ,
181010 => "sanju: PANDA experiment in SAN JUAN, Argentina",
181011 => "jujuy: PANDA experiment in JUJUY, Argentina",
181020 => "newma: PANDA experiment in NEW MADRID, TN",
181030 => "arken: PANDA experiment in AK",
181040 => "hawii: PANDA experiment in HAWII",
181050 => "palmn: PANDA experiment in PALMERSTON NORTH, New Zealand" ,
181051 => "taran: PANDA2 experiment in mountain TARAMAKI, New Zealand",
181060 => "taiwa: PANDA2 experiment in Taiwan",
187000 => "archj: ARCH Johnston, professor, director of research",
187001 => "jmch: Jer-Ming CHiu, professor",
187002 => "wych: Wai-Ying CHung, associate research professor",
187003 => "hjdo: H.James DOrman, executive director",
187004 => "mell: Michael ELLis, associate professor",
187005 => "Josep: JOSE Pujol, associate professor",
187006 => "paulr: PAUL Rydelek, assistant research professor",
187007 => "robsm: ROBert SMalley, assistant research professor" ,
187008 => "paulb: PAUL Bodin, assistant professor",
187009 => "eusc: EUgene SChweig, adjunct professor, USGS geologist" ,
187010 => "johng: JOHN Geomberg, adjunct professor, USGS geophysicist",
187011 => "scda: SCott DAvis, USGS guest researcher",
187500 => "jimbo: JIM BOllwerk, seismic networks engineer",
187501 => "stepb: STEPhen Brewer, ceri seismic networks director" ,
187502 => "cchiu: Christy CHIU, research associate II",
187503 => "michf: MICHael Frohme, director of computing",
188000 => "zrli: ZhaoRen LI, graduate research assistant",
188001 => "kcch: Kou-Cheng Chen, graduate research assistant" ,
189000 => "group: data processing GROUP in ceri",
190000 => "aftac: AFTAC Center for Seismic Studies, Alexandria, VA" ,
200000 => "uhhil: University of Hawaii, Hilo, HA",
210000 => "uhhon: University of Hawaii, Honolulu, HA" ,
220000 => "mit: Massachusetts Institute of Technology, Cambridge, MA",
230000 => "dtm: Department of Terrestrial Magnetism, Washington, DC" ,
240000 => "vpi: Virginia Polytechnic Institute, Blacksburg, VA",
250000 => "anu: Australian National University",
260000 => "gsgol: US Geological Survey, Golden, CO",
260001 => "nngsg: Northern Nevada network, US Geological Survey, Golden, CO" ,
260002 => "sngsg: Southern Nevada network, US Geological Survey, Golden, CO",
270000 => "bmr: Bureau of Mineral Resources",
280000 => "cands: Canadian Digital Seismic Network",
290000 => "cdsn: China Digital Seismic Network",
300000 => "cdmg: California Division Mines-Geology, Sacramento, CA",
310000 => "pge: Pacific Gas and Electric/Woodward-Clyde, CA",
315001 => "unoiv: Union Oil, Imperial Valley, CA",
315002 => "unoml: Union Oil, Medicine Lake",
320000 => "terra: Terra Corporation, Mendocino, CA",
330000 => "cadwr: California Division of Water Resources",
340000 => "gikar: Geophysical Institute, Karlsruhe, Germany",
350000 => "gfz: GeoForschungsZentrum, Potsdam, Germany",
360000 => "cnrir: CNR-IRS, Milan, Italy",
370000 => "gsc: Geological Survey of Canada, Ottawa, Canada" ,
380000 => "ind: industry",
385000 => "geot: Geotech, Garland, Texas",
390000 => "nano: Nanometrics, Kanata, Ontario, Canada",
395000 => "lenn: Lennartz Electronic, Tubingen, Germany",
400000 => "kine: Kinemetrics, Pasadena, CA",
405000 => "snl: Sandia National Laboratories, Albuquerque, NM",
410000 => "cices: CICESE, Ensenada, Mexico",
415000 => "nmt: New Mexico Inst Mining and Tech, Soccorro, NM"
)
const mag_scale = ("coda", "tau", "xmag", "ml", "mb", "ms", "mw")
# const mag_type = Dict{Char, String}(
# 'A' => "average coda and amplitude",
# 'S' => "Msz",
# 'a' => "amplitude",
# 'b' => "Mb",
# 'c' => "coda",
# 'l' => "Ml",
# 'm' => "moment",
# 's' => "Ms",
# 'w' = "Mw"
# )
const loc_prog = Dict{Char, String}(
'7' => "Hypo-71",
'l' => "HypoLayer",
'e' => "HypoEllipse",
'i' => "HypoInverse", # Also given as 'h' in SAC readsuds documentation
'r' => "relp",
'u' => "Uhrhammer",
'c' => "Centroid",
'h' => "HypoInverse",
'v' => "Velest"
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1422 | # trace data character codes
# /* s = 12 bit unsigned stored as short int, 0 to 4095, */
# /* q = 12 bit signed stored as short int, -2048 to 2047, */
# /* u = 16 bit unsigned stored as short int, 0 to 65535 */
# /* i = 16 bit signed stored as short int, -32767 to 32767, */
# /* 2 = 24 bit signed integer stored as long, */
# /* l = 32 bit signed integer stored as long, */
# /* r = 12 bit data, 4 lsb time stored as short int, */
# /* f = float (32 bit IEEE real), */
# /* d = double (64 bit IEEE real), */
# /* c = complex, */
# /* v = vector, */
# /* t = tensor */
function suds_decode(x::Array{UInt8,1}, code::UInt8)
if code in (0x69, 0x71, 0x73, 0x75) # 'i', 'q', 's', 'u'
y = reinterpret(Int16, x)
s = 2
elseif code == 0x32 || code == 0x6c # '2', 'l'
y = reinterpret(Int32, x)
s = 4
elseif code == 0x63 # 'c'
y = reinterpret(Complex{Float32}, x)
s = 8
elseif code == 0x64 # 'd'
y = reinterpret(Float64, x)
s = 8
elseif code == 0x66 # 'f'
y = reinterpret(Float32, x)
s = 4
else
error(string("no decoder for trace data code ", code, "!"))
end
return y,s
end
# No decoders for (and no idea what to do with)
# 0x72 # 'r', would need a win32-style bits parser
# 0x74 # 't', not a bits type, not defined in documentation
# 0x76 # 'v', not a bits type, not defined in documentation
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 16782 | function read_struct_tag!(io::IO, v::Integer=0)
s = fastread(io)
s == 0x53 || (close(io); error("damaged or scrambled SUDS file; can't continue."))
m = fastread(io)
SB.sid = fastread(io, Int16)
SB.nbs = fastread(io, Int32)
SB.nbx = fastread(io, Int32)
(v > 0) && println("suds_structtag: machine code = ", Char(m),
", ID = ", SB.sid,
", struct size = ", SB.nbs, " B",
", data size = ", SB.nbx, " B",
)
return nothing
end
function staident!(io::IO, v::Integer=0)
fastread!(io, SB.hdr)
fill!(SB.id, 0x00)
# Set ID string
j = 1
for i = 1:2
if SB.hdr[i] != 0x00
SB.id[j] = SB.hdr[i]
j += 1
end
end
SB.id[j] = 0x2e
j += 1
for i = 5:9
if SB.hdr[i] != 0x00
SB.id[j] = SB.hdr[i]
j += 1
end
end
SB.id[j] = 0x2e
SB.id[j+1] = 0x2e
SB.id[j+2] = SB.hdr[10]
j += 2
setfield!(SB, :id_str, unsafe_string(pointer(getfield(SB, :id)), j))
# Print new ID to STDOUT if v > 1
(v > 1) && println( "id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]))
return nothing
end
function read_chansetentry!(S::GphysData, io::IO, v::Integer)
SB.C.inst = fastread(io, Int32)
SB.C.stream = fastread(io, Int16)
SB.C.chno = fastread(io, Int16)
staident!(io, 0)
if v > 2
println("id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
", inst = ", SB.C.inst,
", stream = ", SB.C.stream,
", chno = ", SB.C.chno)
end
return nothing
end
# # STAT_IDENT: Station identification
# read_1!(S::GphysData, io::IO, v::Integer, full::Bool) = staident!(io, v)
#
# # STRUCTTAG: Structure to identify structures when archived together
# read_2!(S::GphysData, io::IO, v::Integer, full::Bool) = read_struct_tag!(io)
# STATIONCOMP: Generic station component information
function read_5!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
SB.S05.az = fastread(io, Int16) # azimuth, N°E
SB.S05.inc = fastread(io, Int16) # incidence, from vertical
SB.S05.lat = fastread(io, Float64) # latitude, N = +
SB.S05.lon = fastread(io, Float64) # longitude, E = +
SB.S05.ele = fastread(io, Float32) # elevation, meters
fastread!(io, SB.S05.codes)
fastread!(io, SB.S05.gain)
fastread!(io, SB.S05.a2d)
SB.t_i32 = fastread(io, Int32) # date/time values became effective
fastread!(io, SB.S05.t_corr)
# Post-read processing
SB.data_type = SB.S05.codes[9]
sensor_type = SB.S05.codes[8]
# Does this station exist in S?
i = findid(SB.id_str, S)
if i == 0
loc = GeoLoc( "",
SB.S05.lat,
SB.S05.lon,
Float64(SB.S05.ele),
0.0,
Float64(SB.S05.az),
Float64(SB.S05.inc)
)
misc = if full
Dict{String, Any}(
"ic" => Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
"enclosure" => Char(SB.S05.codes[1]),
"annotation" => Char(SB.S05.codes[2]),
"recorder" => Char(SB.S05.codes[3]),
"rockclass" => Char(SB.S05.codes[4]),
"rocktype" => Int16(SB.S05.codes[6]) << 8 | Int16(SB.S05.codes[5]),
"sitecondition" => Char(SB.S05.codes[7]),
"sensor_type" => Char(sensor_type),
"data_type" => Char(SB.data_type),
"data_units" => Char(SB.S05.codes[10]),
"polarity" => Char(SB.S05.codes[11]),
"st_status" => Char(SB.S05.codes[12]),
"max_gain" => SB.S05.gain[1],
"clip_value" => SB.S05.gain[2],
"con_mvolts" => SB.S05.gain[3],
"a2d_ch" => SB.S05.a2d[1],
"a2d_gain" => SB.S05.a2d[2],
"dt_eff" => SB.t_i32,
"clock_corr" => SB.S05.t_corr[1],
"sta_delay" => SB.S05.t_corr[2]
)
else
Dict{String, Any}(
"data_type" => Char(SB.data_type),
"sensor_type" => Char(sensor_type),
)
end
push!(S, SeisChannel( id = SB.id_str,
loc = loc,
misc = misc,
gain = Float64(SB.S05.gain[1]),
units = sensor_types[sensor_type]
)
)
i = S.n
end
return i
end
# MUXDATA: Header for (possibly) multiplexed data
function read_6!(S::GphysData, io::IO, v::Integer, full::Bool)
SB.T.net = fastread(io, UInt16)
fastskip(io, 2)
SB.t_f64 = fastread(io, Float64)
SB.t_i16 = fastread(io, Int16)
SB.T.n_ch = fastread(io, Int16)
SB.fs = fastread(io, Float32)
SB.data_type = fastread(io)
fastskip(io, 3)
SB.T.ns = fastread(io, Int32)
SB.nz = fastread(io, Int32)
if v > 1
println("net = ", Char(SB.T.net & 0x00ff), Char(SB.T.net >> 8),
", btime = ", u2d(SB.t_f64),
", loctime = ", SB.t_i16,
", n = ", SB.T.n_ch,
", fs = ", SB.fs,
", data_type = ", Char(SB.data_type),
", n sweeps = ", SB.T.ns,
", nx = ", SB.nz
)
end
# return (net, data_type, btime, loctime, numchans, fs, data_type, nx)
return nothing
end
#= DESCRIPTRACE: Descriptive information about a seismic trace.
Normally followed by waveform data =#
function read_7!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
SB.t_f64 = fastread(io, Float64)
SB.t_i16 = fastread(io, Int16)
SB.data_type = fastread(io)
SB.T.desc = fastread(io)
fastskip(io, 4)
SB.nz = fastread(io, Int32)
SB.fs = fastread(io, Float32)
fastskip(io, 16)
SB.t_f64 += fastread(io, Float64)
SB.rc = fastread(io, Float32)
if v > 2
tl = SB.t_i16
println("id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
", begin = ", u2d(SB.t_f64 + tl*60),
" (GMT ", tl < 0 ? "-" : "+", div(tl, 60), ")",
", data_type = ", Char(SB.data_type),
", nx = ", SB.nx,
", fs = ", SB.fs + SB.rc)
end
# return (id, data_type, nx, Float64(fs+rc), btime+tc)
return nothing
end
# FEATURE: Observed phase arrival time, amplitude, and period
function read_10!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
SB.P.pc = fastread(io, Int16)
SB.P.onset = fastread(io)
SB.P.fm = fastread(io)
SB.P.snr = fastread(io, Int16)
fastskip(io, 4)
SB.P.gr = fastread(io, Int16)
SB.t_f64 = fastread(io, Float64)
SB.P.amp = fastread(io, Float32)
SB.t_f32 = fastread(io, Float32)
SB.t_i32 = fastread(io, Int32)
fastskip(io, 4)
phase = pick_types[SB.P.pc]
(v > 1) && println(
"id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
", phase: ", phase, Char(SB.P.onset), Char(SB.P.fm),
", t = ", u2d(SB.t_f64),
", A = ", SB.P.amp,
", τ = ", SB.t_f32,
", SNR = ", SB.P.snr,
", gain range = ", SB.P.gr)
if isa(S, EventTraceData)
i = findid(SB.id_str, S)
if i == 0
push!(S, SeisChannel(id = SB.id_str))
i = S.n
end
S.pha[i][phase] = SeisPha(amp = Float64(SB.P.amp),
tt = SB.t_f64,
pol = Char(SB.P.fm),
)
end
return nothing
end
# ORIGIN: Information about a specific solution for a given event
function read_14!(S::GphysData, io::IO, v::Integer, full::Bool)
SB.H.evno = fastread(io, Int32)
SB.H.auth = fastread(io, Int16)
fastread!(io, SB.H.chars)
SB.H.reg = fastread(io, Int32)
SB.H.ot = fastread(io, Float64)
SB.H.lat = fastread(io, Float64)
SB.H.lon = fastread(io, Float64)
fastread!(io, SB.H.floats)
fastread!(io, SB.H.model)
SB.H.gap = fastread(io, Int16)
SB.H.d_min = fastread(io, Float32)
fastread!(io, SB.H.shorts)
fastread!(io, SB.H.mag)
SB.t_i32 = fastread(io, Int32)
if v > 1
println("evno = ", SB.H.evno,
", auth = ", SB.H.auth,
", codes = ", map(Char, SB.H.chars),
", reg = ", SB.H.reg,
", ot = ", u2d(SB.H.ot),
", origin = ", SB.H.lat, "N, ", SB.H.lon, "E, z ", SB.H.floats[1], " km",
", δx = ", SB.H.floats[2], " km",
", δz = ", SB.H.floats[3], " km",
", rms = ", SB.H.floats[4],
", model = ", String(copy(SB.H.model)),
", Δ = ", SB.H.gap, "∘",
", d_min = ", SB.H.d_min, " km",
", shorts = ", SB.H.shorts,
", mag = ", SB.H.mag,
", t_eff = ", u2d(Float64(SB.t_i32))
)
end
return nothing
end
# COMMENT: Comment tag to be followed by the bytes of comment
function read_20!(S::GphysData, io::IO, v::Integer, full::Bool)
fastread!(io, SB.comm_i)
L = fastread(io, Int16)
fastskip(io, 2)
if v > 1
println("structure ref. ID = ", SB.comm_i[1],
", item in struct ID = ", SB.comm_i[2],
", L = ", L)
end
SB.comm_s = String(copy(fastread(io, L)))
(v > 2) && (printstyled("comment: \n", color=:green); println(SB.comm_s); printstyled("--\n", color=:green))
return nothing
end
# TRIGGERS: Earthquake detector trigger statistics
function read_25!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
shorts = fastread(io, Int16, 6)
trig_time = fastread(io, Float64)
if v > 1
println("id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
", shorts = ", shorts,
", btime = ", u2d(trig_time))
end
return nothing
end
# TRIGSETTING: Settings for earthquake trigger system
function read_26!(S::GphysData, io::IO, v::Integer, full::Bool)
net = fastread(io, UInt8, 4)
btime = fastread(io, Float64)
shorts = fastread(io, Int16, 6)
t_sweep = fastread(io, Float32)
t_aper = fastread(io, Float32)
alg = fastread(io)
fastskip(io, 3)
if v > 1
println("net = ", String(copy(net)),
", btime = ", u2d(btime),
", shorts = ", shorts,
", t_sweep = ", t_sweep,
", t_aperture = ", t_aper,
", alg = ", Char(alg))
end
return nothing
end
# EVENTSETTING: Settings for earthquake trigger system
function read_27!(S::GphysData, io::IO, v::Integer, full::Bool)
net = fastread(io, UInt8, 4)
btime = fastread(io, Float64)
shorts = fastread(io, Int16, 4)
dur_min = fastread(io, Float32)
dur_max = fastread(io, Float32)
alg = fastread(io)
fastskip(io, 3)
if v > 1
println("net = ", String(copy(net)),
", btime = ", u2d(btime),
", shorts = ", shorts,
", dur_min = ", dur_min,
", dur_max = ", dur_max,
", alg = ", Char(alg))
end
return nothing
end
# DETECTOR
function read_28!(S::GphysData, io::IO, v::Integer, full::Bool)
alg = fastread(io)
event_type = fastread(io)
net_node_id = fastread(io, UInt8, 10)
version = fastread(io, Float32)
event_num = fastread(io, Int32)
fastskip(io, 4)
if v > 1
println("algorithm = ", Char(alg),
", event_type = ", Char(event_type),
", net_node_id = ", String(net_node_id),
", version = ", version,
", event_num = ", event_num
)
end
return nothing
end
# ATODINFO
function read_29!(S::GphysData, io::IO, v::Integer, full::Bool)
io_addr = fastread(io, Int16)
dev_id = fastread(io, Int16)
dev_flags = fastread(io, UInt16)
ext_bufs = fastread(io, Int16)
ext_mux = fastread(io, Int16)
timing_src = fastread(io)
trig_src = fastread(io)
if v > 1
println("io_addr = ", io_addr,
", dev_id = ", dev_id,
", dev_flags = ", bitstring(dev_flags),
", ext_bufs = ", ext_bufs,
", ext_mux = ", ext_mux,
", timing_src = ", timing_src,
", trig_src = ", trig_src
)
end
return nothing
end
# TIMECORRECTION: Time correction information
function read_30!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
SB.tc = fastread(io, Float64)
SB.rc = fastread(io, Float32)
SB.sync_code = Char(fastread(io))
fastskip(io,1)
SB.t_i32 = fastread(io, Int32)
fastskip(io, 2)
if v > 1
println("tc = ", SB.tc,
", rc = ", SB.rc,
", sync_code = ", SB.sync_code,
", t_eff = ", SB.t_i32)
end
# Check: is the second part of ID "IRIG" (i.e., a local time clock)?
SB.irig = false
id_b = SB.id
i1 = findfirst(SB.id.==0x2e)+1
i2 = findnext(SB.id.==0x2e, i1+1)-1
if SB.id[i1:i2] == [0x49, 0x52, 0x49, 0x47] # "IRIG"
SB.irig = true
end
# return (round(Int64, tc*sμ), rc, t_eff*1000000, SB.id_str, netflg)
return nothing
end
#= INSTRUMENT: Instrument hardware settings, mainly PADS related
added by R. Banfill, Jan 1991 =#
function read_31!(S::GphysData, io::IO, v::Integer, full::Bool)
staident!(io, 0)
serial = fastread(io, Int16)
ncmp = fastread(io, Int16)
chno = fastread(io, Int16)
sens_type = fastread(io)
data_type = fastread(io)
void_samp = fastread(io, Int32)
floats = fastread(io, Float32, 10)
t_eff = fastread(io, Int32)
pre_evt = fastread(io, Float32)
trig_num = fastread(io, Int16)
study = fastread(io, 6)
sn_serial = fastread(io, Int16)
if v > 1
println("id = ", SB.id_str,
", inst code = ", Int16(SB.hdr[12]) << 8 | Int16(SB.hdr[11]),
", serial = ", serial,
", ncmp = ", ncmp,
", chno = ", chno,
", sens_type = ", Char(sens_type),
", data_type = ", Char(data_type),
", void_samp = ", void_samp,
", floats = ", floats,
", t_eff = ", u2d(t_eff),
", pre_evt = ", pre_evt,
", trig_num = ", trig_num,
", study = ", String(study),
", sn_serial = ", sn_serial
)
end
return nothing
end
# CHANSET: Associate stations and components with sets...???
function read_32!(S::GphysData, io::IO, v::Integer, full::Bool)
SB.C.typ = fastread(io, Int16)
SB.C.n = fastread(io, Int16)
fastread!(io, SB.C.sta)
fastskip(io, 1)
SB.C.tu = fastread(io, Int32)
SB.C.td = fastread(io, Int32)
if v > 1
println("type = ", SB.C.typ,
", n = ", SB.C.n,
", sta = ", String(copy(SB.C.sta)),
", t_up = ", u2d(SB.C.tu),
", t_dn = ", u2d(SB.C.td)
)
(v > 2) && printstyled("channel entries:\n", color=:green)
end
for i = 1:SB.C.n
read_chansetentry!(S, io, v)
end
return nothing
end
"""
suds_support()
Dump info to STDOUT on support for each SUDS structure type.
* **Green** structures are fully supported and read into memory.
* **Yellow** structures can be dumped to stdout by invoking *read_data("suds", ...)* with high verbosity (v=2).
* **Red** structures are unsupported and have not been seen in available test data.
"""
function suds_support()
println("\nCurrent support for SUDS structures\n")
printstyled("CODE STRUCTURE \n", color=:green, bold=true)
printstyled("==== ========= \n", color=:green, bold=true)
for i in 1:33
str = lpad(i, 4) * " " * suds_codes[i] * "\n"
if i in unsupported
printstyled(str, color=:red)
elseif i in disp_only
printstyled(str, color=11)
else
printstyled(str, color=:green, bold=true)
end
end
printstyled("\n(supported = ")
printstyled("GREEN", color=:green, bold=true)
printstyled(", logging only = ")
printstyled("YELLOW", color=11)
printstyled(", unsupported = ")
printstyled("RED", color=:red)
printstyled(")\n")
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1229 | #AuxiliaryData/{...}/{...}/{TAG}
function asdf_getaux( hdf_out::String )
if isfile(hdf_out)
io = h5open(hdf_out, "r+")
fmt = read(attrs(io)["file_format"])
(fmt == "ASDF") || (close(io); error("invalid ASDF file!"))
if has(io, "AuxiliaryData")
aux = io["AuxiliaryData"]
else
aux = g_create(io, "AuxiliaryData")
end
else
io = h5open(hdf_out, "cw")
attrs(io)["file_format"] = "ASDF"
attrs(io)["file_format_version"] = "1.0.2"
aux = g_create(io, "AuxiliaryData")
end
return io, aux
end
"""
asdf_waux(hdf_out, path, X)
Write `X` to AuxiliaryData/path in `hdf_out`. If an object already exists at
AuxiliaryData/path, it will be deleted and overwritten with `X`.
"""
function asdf_waux(hdf_out::String, path::String, X::Union{HDF5Type,HDF5Array})
# Correct leading /
while startswith(path, "/")
path = path[nextind(path, 1):lastindex(path)]
end
# Correct paths that start with AuxiliaryData
startswith(path, "AuxiliaryData") && (path = String(split(path, "/", limit=2, keepempty=true)[2]))
io, aux = asdf_getaux(hdf_out)
# Remove existing object if it exists
has(aux, path) && o_delete(aux, path)
aux[path] = X
close(io)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4496 | # TO DO:
# As soon as someone sends me a file with waveform traces that have the
# attributes event_id, magnitude_id, focal_mechanism_id, I can trivially
# output SeisEvents. No one has done that yet.
@doc """
(H,R) = asdf_rqml(fpat::String)
Read QuakeXML (qml) from ASDF file(s) matching file string pattern `fpat`. Returns:
* `H`, Array{SeisHdr,1}
* `R`, Array{SeisSrc,1}
""" asdf_rqml
function asdf_rqml(io::HDF5File)
EvCat = Array{SeisHdr,1}()
EvSrc = Array{SeisSrc,1}()
if has(io, "QuakeML")
qml = parse_string(String(UInt8.(read(io["QuakeML"]))))
event_xml!(EvCat, EvSrc, qml)
free(qml)
end
return EvCat, EvSrc
end
function asdf_rqml(fpat::String)
files = safe_isfile(fpat) ? [fpat] : ls(fpat)
EvCat = Array{SeisHdr,1}()
EvSrc = Array{SeisSrc,1}()
for file in files
io = h5open(file, "r")
if has(io, "QuakeML")
qml = parse_string(String(UInt8.(read(io["QuakeML"]))))
event_xml!(EvCat, EvSrc, qml)
free(qml)
end
close(io)
end
return(EvCat, EvSrc)
end
function asdf_wqml!(hdf::HDF5File, HDR::Array{SeisHdr,1}, SRC::Array{SeisSrc,1},
ovr::Bool, v::Integer)
hq = Int8[]
io = IOBuffer()
H0 = SeisHdr[]
R0 = SeisSrc[]
if has(hdf, "QuakeML")
if ovr
(v > 0) && println("ovewriting QuakeML...")
new_qml!(io)
else
hq = read(hdf["QuakeML"])
# check whether we can append directly
nq = length(hq)
si = nq-29
test_str = String(UInt8.(hq[si:nq]))
if test_str == "</eventParameters>\n</quakeml>\n"
# behavior for SeisIO-compatible QuakeML
deleteat!(hq, si:nq)
append!(hq, ones(Int8, 4).*Int8(32))
else
# behavior for other QuakeML
qml = parse_string(String(UInt8.(hq)))
event_xml!(H0, R0, qml)
free(qml)
new_qml!(io)
hq = Int8[]
GC.gc()
end
end
# delete hdf["QuakeML"]. I fear one can't resize HDF5 arrays in-place.
# One certainly can't in the Julia HDF5 interface...
o_delete(hdf, "QuakeML")
else
new_qml!(io)
end
if isempty(H0) && isempty(R0)
write_qml!(io, HDR, SRC, v)
else
append!(H0, HDR)
append!(R0, SRC)
write_qml!(io, H0, R0, v)
end
buf = vcat(hq, take!(io))
hdf["QuakeML"] = buf
close(io)
return nothing
end
@doc """
asdf_wqml(fname, SHDR::Array{SeisHdr,1}, SSRC::Array{SeisSrc,1}[, KWs])
asdf_wqml(fname, H::SeisHdr, R::SeisSrc[, KWs])
Write QuakeXML (qml) to "QuakeML/" dataset in ASDF file `fname` from `SHDR` and
`SSRC`.
asdf_wqml(fname, evt::SeisEvent[, KWs])
asdf_wqml(fname, evt::Array{SeisEvent,1}[, KWs])
As above, for the `:hdr` and `:source` fields of `evt`.
|KW | Type | Default | Meaning |
|:--- |:--- |:--- |:--- |
| ovr | Bool | false | overwrite QML in existing ASDF file? [^1] |
| v | Integer | 0 | verbosity |
[^1] By default, data are appended to the existing contents of "QuakeML/".
!!! warning
To write data from `R ∈ SSRC`, it must be true that `R.eid == H.id` for some `H ∈ SHDR`.
See also: `write_qml`
""" asdf_wqml
function asdf_wqml(hdf_out::String, HDR::Array{SeisHdr,1}, SRC::Array{SeisSrc,1};
ovr::Bool=false,
v::Integer=0)
if isfile(hdf_out)
hdf = h5open(hdf_out, "r+")
fmt = read(attrs(hdf)["file_format"])
(fmt == "ASDF") || (close(hdf); error("invalid ASDF file!"))
else
hdf = h5open(hdf_out, "cw")
attrs(hdf)["file_format"] = "ASDF"
attrs(hdf)["file_format_version"] = "1.0.2"
end
asdf_wqml!(hdf, HDR, SRC, ovr, v)
close(hdf)
# logging
opts = string(", ovr=\"", ovr, "\", v=", v)
for h in HDR
fwrite_note_quake!(h, "asdf_wqml", hdf_out, opts)
end
for r in SRC
fwrite_note_quake!(r, "asdf_wqml", hdf_out, opts)
end
return nothing
end
asdf_wqml(hdf_out::String, H::SeisHdr, R::SeisSrc; ovr::Bool=false, v::Integer=0) = asdf_wqml(hdf_out, [H], [R], ovr=ovr, v=v)
asdf_wqml(hdf_out::String, W::SeisEvent; ovr::Bool=false, v::Integer=0) = asdf_wqml(hdf_out, [W.hdr], [W.source], ovr=ovr, v=v)
function asdf_wqml(hdf_out::String, events::Array{SeisEvent,1};
ovr::Bool=false,
v::Integer=0)
N = length(events)
H = Array{SeisHdr, 1}(undef, N)
R = Array{SeisSrc, 1}(undef, N)
for i in 1:N
H[i] = events[i].hdr
R[i] = events[i].source
end
asdf_wqml(hdf_out, H, R, ovr=ovr, v=v)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1861 | #=
HDF5 TYPES SUPPORTED
signed and unsigned integers of 8, 16, 32, and 64 bits, Float32, Float64;
Complex versions of these numeric types
Arrays of these numeric types (including complex versions)
ASCIIString and UTF8String; and Arrays of these two string types.
Note: can't really do String support for interoperability reasons.
=#
const HDF5Type = Union{ Float32, Float64,
Complex{Float32}, Complex{Float64},
UInt8, UInt16, UInt32, UInt64,
Complex{UInt8}, Complex{UInt16},
Complex{UInt32}, Complex{UInt64},
Int8, Int16, Int32, Int64,
Complex{Int8}, Complex{Int16},
Complex{Int32}, Complex{Int64} }
const HDF5Array = Union{Array{Float64, N},
Array{Float32, N},
Array{Complex{Float64}, N},
Array{Complex{Float32}, N},
Array{UInt8, N},
Array{UInt16, N},
Array{UInt32, N},
Array{UInt64, N},
Array{Complex{UInt8}, N},
Array{Complex{UInt16}, N},
Array{Complex{UInt32}, N},
Array{Complex{UInt64}, N},
Array{Int8, N},
Array{Int16, N},
Array{Int32, N},
Array{Int64, N},
Array{Complex{Int8}, N},
Array{Complex{Int16}, N},
Array{Complex{Int32}, N},
Array{Complex{Int64}, N}} where N
# These are adapted from unix2datetime.(1.0e-9.*[typemin(Int64), typemax(Int64)])
const unset_s = "1677-09-21T00:12:44"
const unset_t = "2262-04-11T23:47:16"
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 157 | export asdf_rqml,
asdf_waux,
asdf_wqml,
read_asdf_evt,
read_hdf5,
read_hdf5!,
scan_hdf5,
write_hdf5
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 442 | function get_trace_bounds(ts::Int64, te::Int64, t0::Int64, t1::Int64, Δ::Int64, nx::Int64)
i0 = 1
i1 = nx
while t0 < ts
(i0 >= i1) && break
t0 += Δ
i0 += 1
end
while t1 > te
(i1 <= i0) && break
t1 -= Δ
i1 -= 1
end
return i0, i1, t0
end
function get_trace_bound(t0::Int64, ts::Int64, Δ::Int64, nx::Int64)
i0 = 1
while ts < t0
(i0 >= nx) && break
ts += Δ
i0 += 1
end
return i0
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1159 | using SeisIO, SeisIO.RandSeis
# equivalence:
# FDSN wildcard RegEx
# ? => .
# * => .*
function id_to_regex(cid::Array{UInt8,1})
replace!(cid, 0x2e=>0x5c, 0x3f=>0x2e)
L = length(cid)
i = L
while i > 0
# replace: '*' => '.*'
if cid[i] == 0x2a
splice!(cid, i:i-1, 0x2e)
i -= 1
# now replace: '\' => '\.'
elseif cid[i] == 0x5c
if i == L
push!(cid, 0x2e)
else
splice!(cid, i:i, [0x5c, 0x2e])
end
end
i -= 1
end
return Regex(String(cid))
end
id_to_regex(id::AbstractString) = id_to_regex(copy(codeunits(id)))
function netsta_to_regex(id::AbstractString)
cid = copy(codeunits(id))
i = 0
j = 0
L = length(cid)
while i < L
i += 1
if cid[i] == 0x2e
j += 1
end
if j == 2
deleteat!(cid, i:L)
break
end
end
return id_to_regex(cid)
end
function id_match(id::AbstractString, S::GphysData)
j = findid(id, S.id)
j > 0 && return [j]
idr = id_to_regex(id)
chans = Int64[]
for (j, cid) in enumerate(S.id)
if occursin(idr, cid)
push!(chans, j)
end
end
return chans
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 355 | import LightXML: free, parse_string
import SeisIO: ChanSpec,
KW,
TimeSpec,
check_for_gap!,
dtconst,
endtime,
fastread,
fwrite_note!,
mk_t,
mk_xml!,
parsetimewin,
read_station_xml!,
split_id,
sxml_mergehdr!,
sμ,
t_win,
trunc_x!,
x_inds,
μs
import SeisIO.Quake: event_xml!,
fwrite_note_quake!,
new_qml!,
write_qml!
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 412 | function load_data!(A::Array{T,1}, dset::HDF5Dataset, src_ind::AbstractRange{Int}, dest_ind::AbstractRange{Int}) where T
dsel_id = HDF5.hyperslab(dset, src_ind)
V = view(A, dest_ind)
memtype = HDF5.datatype(A)
memspace = HDF5.dataspace(V)
HDF5.h5d_read(dset.id, memtype.id, memspace.id, dsel_id, dset.xfer, V)
HDF5.close(memtype)
HDF5.close(memspace)
HDF5.h5s_close(dsel_id)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4598 | #= List of Variables
tr trace dataset
wv waveform dataset
sta station dataset
idₛ station id
idₜ trace id
stas list of station names
traces list of trace names
trₛ names of traces in sta
idⱼ S[j] :id
iⱼ S[j] first index in :x to write
jⱼ S[j] last index in :x to write
lxⱼ S[j] value of :t[end,1]
ntⱼ S[j] number of times in :t[:,1]
nⱼ S[j] length(:x)
tⱼ S[j] :t
eᵣ request window end time [ns from epoch]
nᵣ request window length in samples
sᵣ request window start time [ns from epoch]
eₜ trace end time [ns from epoch]
fs trace sampling frequency [Hz]
iₜ trace first index to read
jₜ trace last index to read
nₜ trace total number of samples
sₜ trace start time [ns from epoch]
nₒ overlap window number of samples
sₒ overlap window start time [ns from epoch]
j index to channel in S
v verbosity
Δ sampling interval [ns]
because I managed to confuse even myself with the bookkeeping, and that's not
happened since coding SeisIO began
=#
function read_asdf!( S::GphysData,
hdf::String,
id::String,
s::TimeSpec,
t::TimeSpec,
msr::Bool,
v::Integer )
SX = SeisData() # for XML
idr = isa(id, String) ? id_to_regex(id) : id
(v > 2) && println("Reading IDs that match ", idr)
if typeof(s) == String && typeof(t) == String
d0 = s
d1 = t
sᵣ = DateTime(s).instant.periods.value*1000 - dtconst
eᵣ = DateTime(t).instant.periods.value*1000 - dtconst
else
(d0, d1) = parsetimewin(s, t)
sᵣ = DateTime(d0).instant.periods.value*1000 - dtconst
eᵣ = DateTime(d1).instant.periods.value*1000 - dtconst
end
sᵣ *= 1000
eᵣ *= 1000
Δ = 0
fs = 0.0
# this nesting is a mess
netsta = netsta_to_regex(id)
idr = id_to_regex(id)
f = h5open(hdf, "r")
wv = f["Waveforms"]
stas = names(wv)
sort!(stas)
(v > 2) && println("Net.sta found: ", stas)
for idₛ in stas
if occursin(netsta, idₛ)
sta = wv[idₛ]
traces = names(sta)
sort!(traces)
(v > 2) && println("Traces found: ", traces)
for idₜ in traces
if idₜ == "StationXML"
sxml = String(UInt8.(read(sta[idₜ])))
read_station_xml!(SX, sxml, d0, d1, msr, v)
elseif occursin(idr, idₜ)
tr = sta[idₜ]
nₜ = length(tr)
sₜ = read(tr["starttime"])
fs = read(tr["sampling_rate"])
# convert fs to sampling interval in ns
Δ = round(Int64, 1.0e9/fs)
eₜ = sₜ + (nₜ-1)*Δ
(v > 2) && println("sₜ = ", sₜ,"; eₜ = ", eₜ, "; sᵣ = ", sᵣ, "; eᵣ = ", eᵣ)
if (sᵣ ≤ eₜ) && (eᵣ ≥ sₜ)
lxⱼ = 0
iₜ, jₜ, sₒ = get_trace_bounds(sᵣ, eᵣ, sₜ, eₜ, Δ, nₜ)
nₒ = jₜ-iₜ+1
idⱼ = String(split(idₜ, "_", limit=2, keepempty=true)[1])
j = findid(idⱼ, S.id)
(v > 2) && println("idⱼ = ", idⱼ, " (found at index in S = ", j, ")")
nᵣ = div(eᵣ-sᵣ, Δ)+1
if j == 0
T = eltype(tr)
push!(S, SeisChannel(id = idⱼ,
fs = fs,
x = Array{T,1}(undef, nᵣ)))
j = S.n
if has(tr, "event_id")
S.misc[j]["event_id"] = read(tr["event_id"])
end
else
tⱼ = getindex(getfield(S, :t), j)
ntⱼ = div(lastindex(tⱼ), 2)
nⱼ = lastindex(S.x[j])
if ntⱼ > 0
lxⱼ = getindex(tⱼ, ntⱼ)
check_for_gap!(S, j, div(sₒ, 1000), nₒ, v)
end
if lxⱼ + nₒ > nⱼ
resize!(S.x[j], lxⱼ + max(nₒ, nᵣ))
end
if S.fs[j] == 0.0
S.fs[j] = fs
end
end
if lxⱼ == 0
tⱼ = mk_t(nₒ, div(sₒ, 1000))
setindex!(getfield(S, :t), tⱼ, j)
end
iⱼ = lxⱼ+1
jⱼ = iⱼ+nₒ-1
load_data!(S.x[j], tr, iₜ:jₜ, iⱼ:jⱼ)
end
HDF5.h5d_close(tr)
end
end
HDF5.h5g_close(sta)
end
end
# merge in the XML that we read
sxml_mergehdr!(S, SX, false, true, v)
trunc_x!(S)
# Done
HDF5.h5g_close(wv)
close(f)
return S
end
function read_asdf(hdf::String, id::Union{String,Regex}, s::TimeSpec, t::TimeSpec, msr::Bool, v::Integer)
S = SeisData()
read_asdf!(S, hdf, id, s, t, msr, v)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3525 | function get_evt_range!(filestr::String, idr::Regex, t::Array{Int64,1})
f = h5open(filestr, "r")
D = get_datasets(f)
for i in D
has(i, "event_id") || continue
evid = read(i["event_id"])
if occursin(idr, evid)
t0 = read(i["starttime"])
fs = read(i["sampling_rate"])
nx = length(i)
# convert fs to sampling interval in ns
Δ = round(Int64, 1.0e9/fs)
t1 = t0 + (nx-1)*Δ
t[1] = min(t[1], t0-Δ)
t[2] = max(t[2], t1+Δ)
end
end
close(f)
return nothing
end
@doc """
EventCat = read_asdf_evt(filestr, event_id::Union{String, Regex}[, KWs])
Read data in seismic HDF5 format with ids matching `event_id` from files
matching pattern `filestr`. Returns an array of SeisEvent structures.
EventCat = read_asdf_evt(filestr, [, KWs])
Read data in seismic HDF5 format from files matching pattern `filestr` into
SeisEvent structures. Matches any event ID in any matching file.
|KW | Type | Default | Meaning |
|:--- |:--- |:--- |:--- |
| msr | Bool | true | read full (MultiStageResp) instrument resp? |
| v | Integer | 0 | verbosity |
See also: `TimeSpec`, `parsetimewin`, `read_data`, `read_hdf5`
""" read_asdf_evt
function read_asdf_evt(filestr::String, event_id::Union{String, Regex};
msr ::Bool = true, # read multistage response?
v ::Integer = KW.v # verbosity
)
one_file = safe_isfile(filestr)
S = SeisData()
# Regex ID
idr = isa(event_id, String) ? id_to_regex(event_id) : id
# Time range to read
t = zeros(Int64, 2)
t[1] = typemax(Int64)
t[2] = typemin(Int64)
if one_file
# Time range
get_evt_range!(filestr, idr, t)
t_start = string(u2d(t[1]*1.0e-9))
t_end = string(u2d(t[2]*1.0e-9))
(v > 0) && println("read range: ", t_start, " -- ", t_end)
# Read data
append!(S, read_asdf(filestr, "*", t_start, t_end, msr, v))
else
files = ls(filestr)
# Time range
for fname in files
get_evt_range!(fname, idr, t)
end
t_start = string(u2d(t[1]*1.0e-9))
t_end = string(u2d(t[2]*1.0e-9))
(v > 0) && println("read range: ", t_start, " -- ", t_end)
# Read data
for fname in files
(v > 0) && println("reading from ", fname)
append!(S, read_asdf(fname, "*", t_start, t_end, msr, v))
end
end
# List of event IDs for each channel
ns = S.n
sid = Array{String,1}(undef, ns)
for i in 1:ns
sid[i] = get(S.misc[i], "event_id", "")
end
(H,R) = asdf_rqml(filestr)
# List of event IDs for each SeisHdr
nh = length(H)
hid = Array{String,1}(undef, nh)
inds = Int64[]
sizehint!(inds, nh)
fill!(hid, "")
for i in 1:nh
evid = H[i].id
hid[i] = evid
if occursin(event_id, evid)
push!(inds, i)
end
end
# Form event catalog by matching IDs
EC = Array{SeisEvent,1}(undef, length(inds))
for (k,j) in enumerate(inds)
chans = Int64[]
for i in 1:S.n
if sid[i] == hid[j]
push!(chans, i)
end
end
EC[k] = SeisEvent(hdr = H[j], source = R[j], data = S[chans])
end
return EC
end
read_asdf_evt(filestr::String;
msr ::Bool = true, # read multistage response?
v ::Integer = KW.v # verbosity
) = read_asdf_evt(filestr, "", msr=msr, v=v)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2779 | @doc """
S = read_hdf5(filestr, s::TimeSpec, t::TimeSpec, [, keywords])
read_hdf5!(S, filestr, s::TimeSpec, t::TimeSpec, [, keywords])
Read data in seismic HDF5 format from files matching pattern `filestr`.
`s`, `t` are required arguments but can be any Type ∈ (DateTime, Real, String);
type `?TimeSpec` for more information about how these are interpreted.
|KW | Type | Default | Meaning |
|:--- |:--- |:--- |:--- |
| id | String | "*" | id pattern, formated nn.sss.ll.ccc |
| | | | (net.sta.loc.cha); FDSN wildcards [^1] |
| msr | Bool | true | read full (MultiStageResp) instrument resp? |
| v | Integer | 0 | verbosity |
[^1] A question mark ('?') is a wildcard for a single character; an asterisk ('*') is a wildcard for zero or more characters
See also: `TimeSpec`, `parsetimewin`, `read_data`
""" read_hdf5!
function read_hdf5!(S::GphysData, fpat::String, s::TimeSpec, t::TimeSpec;
fmt ::String = "asdf", # data format
id ::Union{String, Regex} = "*", # id string
msr ::Bool = true, # read multistage response?
v ::Integer = KW.v # verbosity
)
N = S.n
filestr = abspath(fpat)
one_file = safe_isfile(filestr)
if fmt == "asdf"
if one_file
append!(S, read_asdf(filestr, id, s, t, msr, v))
else
files = ls(filestr)
for fname in files
append!(S, read_asdf(fname, id, s, t, msr, v))
end
end
else
error("Unknown file format (possibly NYI)!")
end
new_chan_src = view(S.src, N+1:S.n)
fill!(new_chan_src, filestr)
note!(S, N+1:S.n, string( " ¦ +source ¦ read_hdf5!(S, ",
"\"", fmt, "\", ",
"\"", s, "\", ",
"\"", t, "\", ",
"fmt=\"", fmt, "\", ",
"id=\"", id, "\", ",
"msr=", msr, ", ",
"v=", KW.v, ")")
)
return nothing
end
@doc (@doc read_hdf5!)
function read_hdf5(filestr::String, s::TimeSpec, t::TimeSpec;
fmt ::String = "asdf", # data format
id ::Union{String, Regex} = "*", # id string
msr ::Bool = true, # read multistage response?
v ::Integer = KW.v # verbosity
)
S = SeisData()
read_hdf5!(S, filestr, s, t,
fmt = fmt,
id = id,
msr = msr,
v = v
)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 413 | function save_data!(A::Array{T,1}, dset::HDF5Dataset, src_ind::AbstractRange{Int}, dest_ind::AbstractRange{Int}) where T
dsel_id = HDF5.hyperslab(dset, src_ind)
V = view(A, dest_ind)
memtype = HDF5.datatype(A)
memspace = HDF5.dataspace(V)
HDF5.h5d_write(dset.id, memtype.id, memspace.id, dsel_id, dset.xfer, V)
HDF5.close(memtype)
HDF5.close(memspace)
HDF5.h5s_close(dsel_id)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 840 | """
scan_hdf5(hdf)
Scan HDF5 archive `hdf` and return station names with waveform data contained
therein as a list of Strings formatted "nn.sssss" (network.station).
scan_hdf5(hdf, level="trace")
Scan HDF5 archive `hdf` and return channel names with waveform data contained
therein as a list of Strings formatted "nn.sssss.ll.ccc" (network.station.location.channel).
"""
function scan_hdf5( hdf::String;
fmt::String="asdf",
level::String="station")
f = h5open(hdf, "r")
if fmt =="asdf"
if level == "station"
str = names(f["Waveforms"])
elseif level == "trace"
D = get_datasets(f)
str = String[]
for i in D
push!(str, name(i))
end
else
error("unsupported level!")
end
else
error("unknown format or NYI!")
end
close(f)
unique!(str)
return str
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 10120 | function mk_netsta(S::GphysData)
netsta = Array{String,1}(undef, S.n)
cha = Array{String,1}(undef, S.n)
for i in 1:S.n
id = split_id(S.id[i])
netsta[i] = id[1]*"."*id[2]
cha[i] = lowercase(id[4])
end
nsid = unique(netsta)
return netsta, cha, nsid
end
function asdf_wsxml(xbuf::IOBuffer, S::GphysData, chans::Array{Int64,1}, sta::HDF5Group)
seekstart(xbuf)
mk_xml!(xbuf, S, chans)
sta["StationXML"] = take!(xbuf)
return nothing
end
function asdf_mktrace(S::GphysData, xml_buf::IO, chan_numbers::Array{Int64,1}, wav::HDF5Group, ts::Array{Int64,1}, te::Array{Int64,1}, len::Int64, v::Integer, tag::String)
nc = length(chan_numbers)
netsta, cha, nsid = mk_netsta(S)
trace_names = Array{Array{String,1},1}(undef, nc)
(v>2) && println("traces to create = channels ", chan_numbers)
# Build trace_names
d0 = zeros(Int64, nc)
for (i,j) in enumerate(chan_numbers)
trace_names[i] = String[]
id = S.id[j]
cc = isempty(tag) ? cha[j] : tag
# divide t0 and t1 by the time length using div to get d0, d1
d0[i] = len*(div(ts[i], len))
d1 = len*(div(te[i], len))
Δ = round(Int64, 1.0e9/S.fs[j])
# range of times is then d0:d1
for d in d0[i]:len:d1
# ...but we must end Δ ns *before* d1 to prevent a one-sample overlap
s0 = string(u2d(d/1000000000))
s1 = string(u2d((d+len-Δ)/1000000000))
# create string like CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_
chan_str = join([id, s0, s1, cc], "__")
push!(trace_names[i], chan_str)
end
(v > 2) && println("trace_names[", i, "] = ", trace_names[i])
end
# Check for trace names in each ns
chans = falses(nc)
for j in 1:length(nsid)
id = nsid[j]
fill!(chans, false)
for (j,i) in enumerate(chan_numbers)
if (netsta[i] == id) && (S.fs[i] != 0.0)
chans[j] = true
end
end
(maximum(chans) == false) && continue
# does net.sta exist?
if has(wav, id)
sta = wav[id]
else
# Create Waveforms/net.sta
(v > 0) && println("creating Waveforms/", id)
sta = g_create(wav, id)
# Write StationXML to sta
asdf_wsxml(xml_buf, S, chan_numbers[chans], sta)
end
# Create Waveforms/net.sta/chan_str
for i = 1:length(chans)
if chans[i]
j = chan_numbers[i]
(v > 2) && println("S.id[", j, "] = ", S.id[j], ", trace_names[", i, "] = ", trace_names[i])
T = eltype(S.x[j])
fs = S.fs[j]
Δ = round(Int64, 1.0e9/fs)
nx = div(len, Δ)
for k = 1:length(trace_names[i])
chan_str = trace_names[i][k]
# is there a trace with the corresponding string?
if has(sta, chan_str) == false
(v > 1) && println("creating trace: Waveforms/", id, "/", chan_str)
sta[chan_str] = ones(T, nx).*T(NaN)
# create a trace of all NaNs
attrs(sta[chan_str])["sampling_rate"] = fs
attrs(sta[chan_str])["starttime"] = d0[i] + (k-1)*len
end
end
end
end
end
return nothing
end
function asdf_write_chan(S::GphysData, sta::HDF5Group, i::Int64, tag::String, eid::String, v::Integer)
fs = S.fs[i]
tx = S.t[i]
t = t_win(tx, fs)
xi = x_inds(tx)
n_seg = size(t, 1)
(n_seg == 0) && return
for k = 1:n_seg
t0 = t[k,1]
t1 = t[k,2]
s0 = string(u2d(div(t0, 1000000)))
s1 = string(u2d(div(t1, 1000000)))
# create string like CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_
chan_str = join([S.id[i], s0, s1, tag], "__")
if has(sta, chan_str)
(v > 0) && println("incrementing tag of ", chan_str)
j = 0x2f
while has(sta, chan_str)
j += 0x01
tag1 = String(vcat(UInt8.(codeunits(tag)), j))
chan_str = join([S.id[i], s0, s1, tag1], "__")
end
end
sta[chan_str] = S.x[i][xi[k,1]:xi[k,2]]
# set dictionary attributes
D = attrs(sta[chan_str])
D["sampling_rate"] = fs
D["starttime"] = t0*1000
if !isempty(eid)
D["event_id"] = eid
end
end
return nothing
end
function write_asdf( hdf_out::String,
S::GphysData,
chan_numbers::Array{Int64,1},
add::Bool,
evid::String,
ovr::Bool,
len::Period,
tag::String,
v::Integer)
# "add" implies "ovr"
if add == true
ovr = true
end
# Precheck for degenerate time structs, time shift
if ovr
for i in chan_numbers
if (size(S.t[i],1) < 2) && (S.fs[i] != 0.0)
error(string(S.id[i] *
": malformed :t; can't write with ovr=true."))
end
# ensure each channel starts on an exact sample
t0 = S.t[i][1,2]
Δ = round(Int64, sμ/S.fs[i])
t1 = div(t0,Δ)*Δ
δ = t0-t1
S.t[i][1,2] = t1
S.misc[i]["tc"] = δ
end
end
xml_buf = IOBuffer()
netsta, cha, nsid = mk_netsta(S)
if isfile(hdf_out)
io = h5open(hdf_out, "r+")
fmt = read(attrs(io)["file_format"])
(fmt == "ASDF") || (close(io); error("invalid ASDF file!"))
if has(io, "Waveforms")
wav = io["Waveforms"]
else
wav = g_create(io, "Waveforms")
end
else
io = h5open(hdf_out, "cw")
attrs(io)["file_format"] = "ASDF"
attrs(io)["file_format_version"] = "1.0.2"
wav = g_create(io, "Waveforms")
end
# =======================================================================
# Add "empty" traces of all NaNs
if add
nc = length(chan_numbers)
ts = zeros(Int64, nc)
te = zeros(Int64, nc)
for (i,j) in enumerate(chan_numbers)
ts[i] = S.t[j][1,2]*1000
te[i] = endtime(S.t[j], S.fs[j])*1000
end
p = convert(Nanosecond, len).value
asdf_mktrace(S, xml_buf, chan_numbers, wav, ts, te, p, v, tag)
end
# write channels to net.sta waveform groups
for j in 1:length(nsid)
(v > 0) && println("writing ", nsid)
id = nsid[j]
chans = Int64[]
for i in chan_numbers
if (netsta[i] == id) && (S.fs[i] != 0.0)
push!(chans, i)
end
end
nc = length(chans)
# does net.sta exist?
if has(wav, id)
sta = wav[id]
if ovr
# ====================================================================
# overwrite StationXML
(v > 1) && println("merging XML")
# read old XML
SX = SeisData()
sxml = String(UInt8.(read(sta["StationXML"])))
read_station_xml!(SX, sxml, "0001-01-01T00:00:00", "9999-12-31T23:59:59", true, v)
# merge S headers into SX, overwriting SX
SM = SeisData(length(chan_numbers))
for f in (:id, :name, :loc, :fs, :gain, :resp, :units)
setfield!(SM, f, deepcopy(getindex(getfield(S, f), chan_numbers)))
end
sxml_mergehdr!(SX, SM, false, true, v)
# remake channel list; SX ordering differs from S
cc = Int64[]
for i in 1:SX.n
idx = split_id(SX.id[i])
ns = idx[1]*"."*idx[2]
if ns == id
push!(cc, i)
end
end
o_delete(sta, "StationXML")
asdf_wsxml(xml_buf, SX, cc, sta)
# ==================================================================
# overwrite trace data
trace_ids = S.id[chans]
t = Array{Array{Int64,2},1}(undef,nc)
for (i,j) in enumerate(chans)
t[i] = t_win(S.t[j], S.fs[j])
broadcast!(*, t[i], t[i], 1000)
end
(v > 2) && println("t = ", t)
# loop over trace waveforms using id, start time, end time
for n in names(sta)
(n == "StationXML") && continue
cha_id = String(split(n, "_", limit=2, keepempty=true)[1])
x = sta[n]
(v > 1) && println("checking ", n)
nx = length(x)
t0 = read(x["starttime"])
fs = read(x["sampling_rate"])
(v > 2) && println("t0 = ", t0, "; fs = ", fs, "; nx = ", nx)
# convert fs to sampling interval in ns
Δ = round(Int64, 1.0e9/fs)
t1 = t0 + (nx-1)*Δ
for i in 1:length(chans)
(trace_ids[i] == cha_id) || continue
nk = size(t[i], 1)
for k in 1:nk
(v > 2) && println("segment k = ", k, "/", nk)
# overlap
ts = t[i][k,1]
te = t[i][k,2]
if (ts ≤ t1) && (te ≥ t0)
# set channel index j in S
j = chans[i]
(v > 2) && println("j = ", j)
# check for fs mismatch
trace_fs = S.fs[j]
(trace_fs == fs) || (@warn(string("Can't write ", S.id[j], "; fs mismatch!")); continue)
lx = div(te-ts, Δ)+1
# determine start, end indices in x that are overwritten
(v > 2) && println("ts = ", ts, "; te = ", te, "; t0 = ", t0, "; t1 = ", t1, "; nx = ", nx, "; lx = ", lx)
i0, i1, t2 = get_trace_bounds(ts, te, t0, t1, Δ, nx)
# determine start, end indices in X to copy
si = get_trace_bound(t0, ts, Δ, lx)
ei = si + min(i1-i0, lx-1)
# overwrite
if v > 2
println("writing ", si, ":", ei, " to ", i0, ":", i1)
end
save_data!(S.x[j], x, i0:i1, si:ei)
end
end
end
end
else
for i in chans
asdf_write_chan(S, sta, i, cha[i], evid, v)
end
end
elseif ovr == false
# Create Waveforms/net.sta
sta = g_create(wav, id)
# Create Waveforms/net.sta/chan_str
for i in chans
asdf_write_chan(S, sta, i, isempty(tag) ? cha[i] : tag, evid, v)
end
# Write StationXML to sta
asdf_wsxml(xml_buf, S, chans, sta)
end
end
close(xml_buf)
close(io)
# Correct :t
if ovr
for i in chan_numbers
δ = get(S.misc[i], "tc", 0)
t0 = S.t[i][1,2]
S.t[i][1,2] = t0+δ
delete!(S.misc[i], "tc")
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5381 | @doc """
write_hdf5( hdf_out::String, S::GphysData[, KWs] )
Write data to file `hdf_out` from structure `S` in a seismic HDF5 format.
write_hdf5( hdf_out::String, W::SeisEvent[, KWs] )
Write data to file `hdf_out` from structure `W` in a seismic HDF5 format.
If the format doesn't record event header and source info, only `W.data` is
stored.
## Keywords
#### GphysData
|KW | Type | Default | Meaning |
|:--- |:--- |:--- |:--- |
| add | Bool | false | Add new traces to file as needed? |
| chans | ChanSpec | 1:S.n | Channels to write to file |
| len | Period | Day(1) | Length of new traces added to file |
| ovr | Bool | false | Overwrite data in existing traces? |
| tag | String | "" | Tag for trace names in ASDF volumes |
| v | Integer | 0 | verbosity |
#### SeisEvent
|KW | Type | Default | Meaning |
|:--- |:--- |:--- |:--- |
| chans | ChanSpec | 1:S.data.n| Channels to write to file |
| tag | String | "" | Tag for trace names in ASDF volumes |
| v | Integer | 0 | verbosity |
## Write Methods
### Add (add = true)
This KW determines the start and end times of all data in `chans`, and
initializes new traces (filled with NaNs) of length = `len`.
#### ASDF behavior
Mode `add=true` follows these steps in this order:
1. Determine times of all data in `S[chans]` and all traces in "Waveforms/".
1. If data lie outside existing trace bounds, new traces are initialized.
1. For each segment in `S[chans]`:
+ Merge the header data in `S[chans]` into the relevant station XML.
+ Overwrite part of the relevant trace in `Waveforms/`.
Thus, unless `len` exactly matches the time boundaries of each segment in `S`,
the traces created will be intentionally larger.
### Overwrite (ovr = true)
If `ovr=true` is specified, but `add=false`, `write_hdf5` *only* overwrites
*existing* data in `hdf_out`.
* No new trace data objects are created in `hdf_out`.
* No new file is created. If `hdf_out` doesn't exist, nothing happens.
* If no traces in `hdf_out` overlap segments in `S`, `hdf_out` isn't modified.
* In ASDF format, station XML is merged in channels that are partly overwritten.
!!! warning
`add=true`/`ovr=true` changes `:t` on file to begin at an exact sample time.
See also: `read_hdf5`
""" write_hdf5
function write_hdf5(file::String, S::GphysData;
chans ::ChanSpec = Int64[], # channels
fmt ::String = "asdf", # data format
add ::Bool = false, # add traces
ovr ::Bool = false, # overwrite trace data
len ::Period = Day(1), # length of added traces
tag ::String = "", # trace tag (ASDF)
v ::Integer = KW.v # verbosity
)
chans = mkchans(chans, S, keepirr=false)
if fmt == "asdf"
# write_asdf(file, S, chans, add=add, len=len, ovr=ovr, tag=tag, v=v)
write_asdf(file, S, chans, add, "", ovr, len, tag, v)
else
error("Unknown file format (possibly NYI)!")
end
# log write operation
opts = string(", chans=", chans,
", add=", add,
", fmt=\"", fmt,
"\", len=", len,
", ovr=", ovr,
", tag=\"", tag,
"\", v=", v)
for i in chans
fwrite_note!(S, i, "write_hdf5", file, opts)
end
return nothing
end
function write_hdf5(file::String, C::GphysChannel;
add ::Bool = false, # add traces
fmt ::String = "asdf", # data format
len ::Period = Day(1), # length of added traces
ovr ::Bool = false, # overwrite trace data
tag ::String = "", # trace tag (ASDF)
v ::Integer = KW.v # verbosity
)
S = SeisData(C)
write_hdf5(file, S, fmt=fmt, ovr=ovr, tag=tag, v=v)
opts = string(", add=", add,
", fmt=\"", fmt,
"\", len=", len,
", ovr=", ovr,
", tag=\"", tag,
"\", v=", v)
fwrite_note!(C, "write_hdf5", file, opts)
return nothing
end
function write_hdf5(file::String, W::SeisEvent;
chans ::ChanSpec = Int64[], # channels
fmt ::String = "asdf", # data format
tag ::String = "", # trace tag (ASDF)
v ::Integer = KW.v # verbosity
)
S = getfield(W, :data)
chans = mkchans(chans, S, keepirr=false)
if fmt == "asdf"
H = getfield(W, :hdr)
R = getfield(W, :source)
# write_asdf(file, S, chans, evid=H.id, tag=tag, v=v)
write_asdf(file, S, chans, false, H.id, false, Day(1), tag, v)
asdf_wqml(file, [H], [R], v=v)
else
error("Unknown file format (possibly NYI)!")
end
# logging
opts = string(".data, chans=", chans,
", fmt=\"", fmt,
", tag=\"", tag,
"\", v=", v)
for i in chans
fwrite_note!(W.data, i, "write_hdf5", file, opts)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 878 | UW_fmt = FormatDesc(
"UW (University of Washington)",
"\"uw\"",
"University of Washington, Seattle, WA, USA",
"(a tarball that predates the World Wide Web)",
"Steve Malone, Prof. Emeritus, [email protected]",
HistVec(),
["event archival format consisting of a data file and an ASCII pick file",
"the first line of a pick file is an event summary called an ACARD",
"station file (human-maintained) needed for instrument locations",
"developed by R. Crosson & son at UW, late 1970s",
"maintained by R. Crosson, P. Lombard, and S. Malone through 2002"],
["Pacific Northwest Seismic Network",
"Cascades Volcano Observatory",
"volcano monitoring (northwestern United States)"],
["docs/uwdfif.pdf"],
0xff
)
UW_fmt.ver = [ FmtVer(2, "1996-06-02", false),
FmtVer(1, "ca. 1978", nothing)
]
formats["uw"] = UW_fmt
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 261 | import SeisIO: BUF,
KW,
add_chan!,
checkbuf!,
checkbuf_8!,
dtconst,
fastread,
fastseekend,
fillx_i16_be!,
fillx_i32_be!,
mk_t!,
sμ,
μs
import SeisIO.Quake: unsafe_convert
import SeisIO.Formats: formats,
FmtVer,
FormatDesc,
HistVec
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6974 | function uwdf!( S::GphysData,
fname::String,
full::Bool,
memmap::Bool,
strict::Bool,
v::Integer)
D = Dict{String,Any}()
# Open data file
fid = memmap ? IOBuffer(Mmap.mmap(fname)) : open(fname, "r")
# Process main header
N = Int64(bswap(fastread(fid, Int16)))
mast_fs = bswap(fastread(fid, Int32))
mast_lmin = bswap(fastread(fid, Int32))
mast_lsec = bswap(fastread(fid, Int32))
mast_nx = bswap(fastread(fid, Int32))
fastskip(fid, 24)
extra = fastread(fid, 10)
fastskip(fid, 80)
if v > 2
println("mast_header:")
println("N = ", N)
println("mast_fs = ", mast_fs)
println("mast_lmin = ", mast_lmin)
println("mast_lsec = ", mast_lsec)
println("mast_nx = ", mast_nx)
println("extra = ", Char.(extra))
end
# Seek EOF to get number of structures
fastseekend(fid)
fastskip(fid, -4)
nstructs = bswap(fastread(fid, Int32))
v>0 && println(stdout, "nstructs = ", nstructs)
structs_os = (-12*nstructs)-4
tc_os = 0
v>1 && println(stdout, "structs_os = ", structs_os)
# Set version of UW seismic data file (char may be empty, leave code as-is!)
uw2::Bool = extra[3] == 0x32 ? true : false
chno = Array{Int32, 1}(undef, N)
corr = Array{Int32, 1}(undef, N)
# Read in UW2 data structures from record end
if uw2
fastseekend(fid)
fastskip(fid, structs_os)
for j = 1:nstructs
structtag = fastread(fid)
fastskip(fid, 3)
M = bswap(fastread(fid, Int32))
byteoffset = bswap(fastread(fid, Int32))
if structtag == 0x43 # 'C'
N = Int64(M)
elseif structtag == 0x54 # 'T'
fpos = fastpos(fid)
fastseek(fid, byteoffset)
chno = Array{Int32, 1}(undef, M)
corr = Array{Int32, 1}(undef, M)
n = 0
@inbounds while n < M
n += 1
chno[n] = fastread(fid, Int32)
corr[n] = fastread(fid, Int32)
end
chno .= (bswap.(chno) .+ 1)
corr .= bswap.(corr)
tc_os = -8*M
fastseek(fid, fpos)
end
end
end
v>0 && println(stdout, "Processing ", N , " channels.")
# Write time corrections
timecorr = zeros(Int64, N)
if length(chno) > 0
for n = 1:N
# corr is in μs
timecorr[chno[n]] = Int64(corr[n])
end
end
# Read UW2 channel headers ========================================
if uw2
fastseekend(fid)
fastskip(fid, -56*N + structs_os + tc_os)
I32 = Array{Int32, 2}(undef, 5, N) # chlen, offset, lmin, lsec (μs), fs, unused: expan1
I16 = Array{Int16, 2}(undef, 3, N) # lta, trig, bias unused: fill
U8 = Array{UInt8, 2}(undef, 24, N) # name(8), tmp(4), compflg(4), chid(4), expan2(4)
i = 0
@inbounds while i < N
i = i + 1
I32[1,i] = bswap(fastread(fid, Int32))
I32[2,i] = bswap(fastread(fid, Int32))
I32[3,i] = bswap(fastread(fid, Int32))
I32[4,i] = bswap(fastread(fid, Int32))
I32[5,i] = bswap(fastread(fid, Int32))
if full == true
fastskip(fid, 4)
I16[1,i] = bswap(fastread(fid, Int16))
I16[2,i] = bswap(fastread(fid, Int16))
I16[3,i] = bswap(fastread(fid, Int16))
fastskip(fid, 2)
else
fastskip(fid, 12)
end
j = 0
while j < 24
j = j + 1
U8[j,i] = fastread(fid)
end
end
# Parse U8 --------------------------------------------
# rows 01:08 channel name
# rows 09:12 format code
# rows 13:16 compflg(4)
# rows 17:20 chid
# rows 21:24 expan2
if full == true
j = 0
while j < N
j = j + 1
i = 16
while i < 24
i = i + 1
if getindex(U8, i, j) == 0x00
setindex!(U8, 0x20, i, j)
end
end
end
end
fastseek(fid, getindex(I32, 2, 1))
buf = getfield(BUF, :buf)
checkbuf_8!(buf, 4*maximum(I32[1,:]))
id = BUF.id
id[1] = 0x55
id[2] = 0x57
id[3] = 0x2e
i = 0
os = 0
@inbounds while i < N
i += 1
fastskip(fid, os)
nx = getindex(I32, 1, i)
# Generate ID
j = 3
k = 1
while j < 8 && k < 9
c = getindex(U8, k, i)
if c != 0x00
j += 1
id[j] = c
end
k += 1
end
id[j+1] = 0x2e
id[j+2] = 0x2e
j += 2
J = j+3
k = 13
while j < J && k < 17
c = getindex(U8, k, i)
if c != 0x00
j += 1
id[j] = c
end
k += 1
end
# Save to SeisChannel
C = SeisChannel()
setfield!(C, :id, unsafe_string(pointer(id), j))
setfield!(C, :fs, Float64(getindex(I32, 5, i))*1.0e-3)
setfield!(C, :units, "m/s")
if full == true
D = getfield(C, :misc)
D["lta"] = I16[1,i]
D["trig"] = I16[2,i]
D["bias"] = I16[3,i]
D["chid"] = String(getindex(U8, 17:20, i))
D["expan2"] = String(getindex(U8, 21:24, i))
if i == 1
D["mast_fs"] = mast_fs*1.0f-3
D["mast_lmin"] = mast_lmin
D["mast_lsec"] = mast_lsec
D["mast_nx"] = mast_nx
D["extra"] = String(extra)
# Go back to main header; grab what we skipped
p = fastpos(fid)
fastseek(fid, 18) # we have the first few fields already
D["mast_tape_no"] = bswap(fastread(fid, Int16))
D["mast_event_no"] = bswap(fastread(fid, Int16))
D["flags"] = bswap.(fastread!(fid, Array{Int16, 1}(undef, 10)))
fastskip(fid, 10) # we have "extra" already
comment = fastread(fid, 80)
D["comment"] = String(comment[comment.!=0x00])
# Return to where we were
fastseek(fid, p)
end
end
# Generate T
t = Array{Int64,2}(undef,2,2)
ch_time = 60000000*Int64(getindex(I32, 3, i)) +
Int64(getindex(I32, 4, i)) +
getindex(timecorr, i) -
11676096000000000
mk_t!(C, nx, ch_time)
# Generate X
x = Array{Float32,1}(undef, nx)
fmt = getindex(U8, 9, i)
if fmt == 0x53
fast_readbytes!(fid, buf, 2*nx)
fillx_i16_be!(x, buf, nx, 0)
elseif fmt == 0x4c
fast_readbytes!(fid, buf, 4*nx)
fillx_i32_be!(x, buf, nx, 0)
else
fast_readbytes!(fid, buf, 4*nx)
x .= bswap.(reinterpret(Float32, buf))[1:nx]
end
setfield!(C, :x, x)
# Push to SeisData
add_chan!(S, C, strict)
if i < N
os = getindex(I32, 2, i+1) - fastpos(fid)
end
end
end
close(fid)
return nothing
end
function uwdf(fname::String, full::Bool, memmap::Bool, strict::Bool, v::Integer)
S = SeisData()
uwdf!(S, fname, full, memmap, strict, v)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3260 | @doc """
Ev = readuwevt(fpat)
Read UW-format event data with file pattern stub `fpat` into SeisEvent `Ev`. `fstub` can be a datafile name, a pickfile name, or a stub:
* A datafile name must end in 'W'
* A pickfile name must end in a lowercase letter (a-z except w) and should describe a single event.
* A filename stub must be complete except for the last letter, e.g. "99062109485".
* Wild cards for multi-file read are not supported by `readuw` because the data format is strictly event-oriented.
""" readuwevt
function readuwevt(filename::String; v::Integer=KW.v, full::Bool=false)
df = String("")
pf = String("")
# Identify pickfile and datafile
if Sys.iswindows() == false
filename = relpath(filename)
end
ec = UInt8(filename[end])
lc = vcat(collect(UInt8, 0x61:0x76), 0x78, 0x79, 0x7a) # skip 'w'
if Base.in(ec, lc)
pf = filename
df = filename[1:end-1]*"W"
else
if ec == 0x57
df = filename
pfstub = filename[1:end-1]
else
df = filename*"W"
safe_isfile(df) || error("Invalid filename stub (no corresponding data file)!")
pfstub = filename
end
pf = pfstub * "\0"
for i in lc
pf = string(pfstub, Char(i))
if safe_isfile(pf)
break
end
end
end
# Datafile + pickfile read wrappers
if safe_isfile(df)
# Datafile read wrapper
v>0 && println(stdout, "Reading datafile ", df)
W = SeisEvent()
S = uwdf(df, full, false, false, v)
setfield!(W, :data, unsafe_convert(EventTraceData, S))
v>0 && println(stdout, "Done reading data file.")
# Pickfile read wrapper
if safe_isfile(pf)
v>0 && println(stdout, "Reading pickfile ", pf)
uwpf!(W, pf, v=v)
v>0 && println(stdout, "Done reading pick file.")
# Move event keys to event header dict
hdr = getfield(W, :hdr)
data = getfield(W, :data)
klist = ("extra", "flags", "mast_event_no", "mast_fs", "mast_lmin", "mast_lsec", "mast_nx", "mast_tape_no")
D_data = getindex(getfield(data, :misc), 1)
D_hdr = getfield(hdr, :misc)
for k in klist
if haskey(D_data, k)
D_hdr[k] = D_data[k]
delete!(D_data, k)
end
end
D_hdr["comment_df"] = get(D_data, "comment", "")
delete!(D_data, "comment")
# Convert all phase arrival times to travel times
δt = μs*(rem(hdr.ot.instant.periods.value*1000 - dtconst, 60000000))
for i = 1:data.n
D = getindex(getfield(data, :pha), i)
for p in keys(D)
pha = get(D, p, SeisPha())
tt = getfield(pha, :tt) - δt
(tt < 0.0) && (tt = mod(tt, 60))
setfield!(pha, :tt, tt)
end
end
#= Note: use of "mod" above corrects for the (annoyingly frequent) case
where file begin time and origin time have a different minute value.
=#
else
v>0 && println(stdout, "Skipping pickfile (not found or not given)")
end
# Pickfile only
else
(hdr, source) = uwpf(pf, v=v)
W = SeisEvent(hdr = hdr, source = source)
end
# Set event name if unset
if isempty(W.hdr.id)
evid = String((split(safe_isfile(pf) ? pf : df, r"[/\\]")[end])[1:end-1])
W.hdr.id = evid
W.source.eid = evid
end
return W
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 10794 | function nextline(pf::IO, c::Char)
eof(pf) && return "-1"
s = "\0"
while s[1] != c
eof(pf) && return "-1"
s = chomp(readline(pf))
end
return s
end
@doc """
H, R = uwpf(pf[, v])
Read UW-format seismic pick file `pf` into SeisHdr object `H`, with seismic
source description (focal mechanism) returned in SeisSrc object `R`.
uwpf!(W, pf[, v::Integer=KW.v])
Read UW-format seismic pick info from pickfile `f` into SeisEvent object `W`.
Overwrites W.source and W.hdr with pickfile information. Keyword `v` controls
verbosity.
!!! caution
Reader has no safety check to guarantee that `pf` is from the same event.
""" uwpf
function uwpf(pickfile::String; v::Integer=KW.v)
# Initialize variables that will fill SeisHdr structure
D = Dict{String, Any}()
MAG = -5.0f0
ID = ""
OT = zero(Float64)
loc = zeros(Float64, 12)
sig = ""
locflags = Array{Char, 1}(undef,8)
R = SeisSrc()
fill!(locflags, '0')
# Read begins
pf = open(pickfile, "r")
# ========================================================================
# Acard line
A = nextline(pf, 'A')
(v > 1) && println(stdout, A)
c = 0
if length(A) == 75 || length(A) == 12
y = zero(Int8)
c = 1900
else
y = Int8(2)
end
D["type"] = getindex(A, 2)
# ACard indices
#
# Crosson's notes:
# Type, Year, Month, Day, Hour, Min, Sec, LatDeg, NS, Latmin*100, LongDeg, EW, Lonmin*100, Depth, Fix, Magnitude, Numsta, numphase, Gap, Mindelta, RMS, ERR, Q1, Q2, Velmodel
# ATYYYYMMDDHHMM SS.SS LLNMMMM LLLWMMMM DD.DD* M.M NN/0NN GGG DD R.RR EE.EQQ VV
# AF200206291436 4.79 45N2009 121W4118 6.20* 4.5 41/041 37 9 0.27 0.1BB O0
#
# Sec,Lat,Lon,Dep,Fix,Mag,Nst,Nph,Gap, d0,RMS,ERR, Q, mod
si = Int8[13, 19, 27, 36, 42, 43, 47, 51, 54, 58, 61, 66, 71, 74] .+ y
ei = Int8[18, 26, 35, 41, 42, 46, 49, 53, 57, 60, 65, 70, 72, 75] .+ y
L = length(si)
# Parse reset of Acard line
ah = Array{String,1}(undef, L)
for i = 1:L
setindex!(ah, getindex(A, getindex(si, i):getindex(ei, i)), i)
end
v > 2 && println("ah = ", ah)
# origin time, event depth, and magnitude
OT = d2u(DateTime(string(parse(Int64, A[3:4+y]) + c)*A[5+y:12+y],
"yyyymmddHHMM")) + parse(Float64, getindex(ah, 1))
evla = getindex(ah, 2)
evlo = getindex(ah, 3)
loc[3] = parse(Float64, getindex(ah, 4)) # depth :dep
locflags[3] = (getindex(ah, 5) == "F") ? '1' : '0'
MAG = parse(Float32, getindex(ah, 6))
nst = parse(Int64, getindex(ah, 7))
D["numpha"] = parse(Int64, getindex(ah, 8))
loc[10] = parse(Float64, getindex(ah, 9)) # gap :gap
loc[11] = parse(Float64, getindex(ah, 10)) # min distance :dmin
loc[9] = parse(Float64, getindex(ah, 11)) # rms pick error :rms
loc[8] = parse(Float64, getindex(ah, 12)) # standard error :se
D["qual"] = getindex(ah, 13)
D["vmod"] = getindex(ah, 14)
# Convert lat and lon to decimal degrees
loc[1] = (parse(Float64, evla[1:3]) +
parse(Float64, evla[5:6])/60.0 +
parse(Float64, evla[7:8])/6000.0) * (evla[4] == 'S' ? -1.0 : 1.0)
loc[2] = (parse(Float64, evlo[1:4]) +
parse(Float64, evlo[6:7])/60.0 +
parse(Float64, evlo[8:9])/6000.0) * (evlo[5] == 'W' ? -1.0 : 1.0)
# ========================================================================
# Error line
seekstart(pf)
eline = nextline(pf, 'E')
if eline != "-1"
# E O0 0.27 0.022 0.281 0.281 141.30 38 Z 0.05 0.05 0.11 0.01 4.50 0.000.03
# Effectively: 10x MeanRMS SDabout0 SDaboutMean SSWRES NDFR FIXXYZT SDx SDy SDz SDt Mag 5x MeanUncert
# 10x f6.3 f6.3 f6.3 f8.2 i4 a4 f5.2 f5.2 f5.2 f5.2 f5.2 5x f4.2
eline_keys = String["MeanRMS", "SDabout0", "SDaboutMean", "SSWRES", "NDFR", "FIXXYZT", "SDx", "SDy", "SDz", "SDt", "Mag", "MeanUncert"]
si = Int8[ 11, 17, 23, 29, 37, 42, 46, 51, 56, 61, 66, 76]
ei = Int8[ 16, 22, 28, 36, 40, 45, 50, 55, 60, 65, 70, 79]
j = 0
while j < length(eline_keys)
j = j + 1
a = getindex(si, j)
b = getindex(ei, j)
s = getindex(eline, a:b)
if j == 6
(s[1] == 'X') && (locflags[1] = '1')
(s[2] == 'Y') && (locflags[2] = '1')
(s[3] == 'Z') && (locflags[3] = '1')
(s[4] == 'T') && (locflags[4] = '1')
elseif j == 7
loc[4] = parse(Float64, s)
elseif j == 8
loc[5] = parse(Float64, s)
elseif j == 9
loc[6] = parse(Float64, s)
elseif j == 10
if s != "*****"
loc[7] = parse(Float64, s)
end
elseif !isempty(s)
k = getindex(eline_keys, j)
D[k] = parse(j == 5 ? Int32 : Float32, s)
end
end
sig = "1σ"
end
LOC = EQLoc(loc..., nst, parse(UInt8, join(locflags), base=2), "", "hypocenter", "", "SPONG")
# ========================================================================
# Focal mechanism line(s)
#= Note: planes F and G azimuth and an incidence are NOT in N°E. They're
measured clockwise from the (N°E) azimuth of the dip vector, because R.
Crosson wanted to be a unique special snowflake.
The axes copied to Hdr.axes are therefore P, T, with the last field of
the 3Tuple set to 0.0.
=#
seekstart(pf)
mline = nextline(pf,'M')
m = 0
PAX = Array{Float64,2}(undef, 2 ,2)
NP = Array{Float64,2}(undef, 2, 2)
if mline != "-1"
# Convert first mechanism line
M = split(mline)
setindex!(NP, parse(Float64, getindex(M, 3)), 1)
setindex!(NP, parse(Float64, getindex(M, 4)), 2)
setindex!(NP, parse(Float64, getindex(M, 6)), 3)
setindex!(NP, parse(Float64, getindex(M, 7)), 4)
# Order here is T, P
setindex!(PAX, parse(Float64, getindex(M, 18)), 1)
setindex!(PAX, parse(Float64, getindex(M, 19)), 2)
setindex!(PAX, parse(Float64, getindex(M, 15)), 3)
setindex!(PAX, parse(Float64, getindex(M, 16)), 4)
setfield!(R, :pax, PAX)
setfield!(R, :planes, NP)
pf_str = abspath(pickfile)
setfield!(R, :src, pf_str)
note!(R, "+source ¦ " * pf_str)
# Add rest to a dictionary
mech_lines = Array{String, 1}(undef, 0)
mline = nextline(pf,'M')
while mline != "-1"
m += 1
push!(mech_lines, mline)
mline = nextline(pf,'M')
end
R.gap = getindex(loc, 10)
if !isempty(mech_lines)
R.misc["mech_lines"] = mech_lines
end
v>0 && println(stdout, "Processed ", m, " focal mechanism lines.")
note!(R, string("planes are arranged [θ₁ θ₂; ϕ₁ ϕ₂] but θ₁, θ₂ are NOT oriented N°E. ",
"They're measured clockwise from the N°E azimuth of the dip vector."))
note!(R, string("gap is copied from the A-card line; this likely ",
"under-represents the true focal mechanism gap."))
end
# ========================================================================
# Comment lines
seekstart(pf)
m = 0
cline = nextline(pf,'C')
if cline != "-1"
D["comment"] = Array{String, 1}(undef, 0)
while cline != "-1"
m = m + 1
L = lastindex(cline)
if occursin("NEAR", cline)
D["loc_name"] = getindex(cline, 8:L)
elseif occursin("EVENT ID", cline)
ID = getindex(cline, 13:L)
elseif occursin("LOCATED BY", cline)
setfield!(LOC, :src, getfield(LOC, :src) * "; " * getindex(cline, 3:L))
else
push!(D["comment"], getindex(cline, 3:L))
end
cline = nextline(pf,'C')
end
v > 0 && println(stdout, "Processed ", m, " comment lines.")
end
# ========================================================================
# Done reading
close(pf)
H = SeisHdr()
src_str = abspath(pickfile)
setfield!(H, :loc, LOC)
setfield!(H, :src, src_str)
note!(H, "+source ¦ " * src_str)
if MAG != -5.0f0
setfield!(H, :mag, EQMag(val = MAG, scale = "Md", src = "SPONG"))
end
if OT != zero(Float64)
setfield!(H, :ot, u2d(OT))
end
if isempty(ID) == false
setfield!(H, :id, string(ID))
setfield!(R, :eid, string(ID))
end
if isempty(D) == false
setfield!(H, :misc, D)
end
return H, R
end
@doc (@doc uwpf)
function uwpf!(S::SeisEvent, pickfile::String; v::Integer=KW.v)
(H, R) = uwpf(pickfile, v=v)
N = getfield(getfield(S, :data), :n)
ID = getfield(getfield(S, :data), :id)
PHA = getfield(getfield(S, :data), :pha)
cha = Array{String,1}(undef, N)
for i = 1:N
id = split(getindex(ID, i), ".", keepempty=true)
setindex!(cha, string(".", getindex(id, 2), ".", getindex(id, 4)), i)
end
v > 2 && println("cha = ", cha)
# Pick lines (requires reopening/rereading file)
m = 0
ndur = 0
npol = 0
pf = open(pickfile, "r")
pick_line = nextline(pf, '.')
while pick_line != "-1"
v>1 && println(stdout, "pick_line = ", pick_line)
m += 1
pdat = split(pick_line, "(")
pick_cha = getindex(pdat, 1)
pcat = PhaseCat()
dur = 0.0
# Parse picks
for j = 2:length(pdat)
pha = split(getindex(pdat, j))
if getindex(pha, 1) == "P"
v > 2 && println(pha)
pol = getindex(pha, 3)[1]
if pol != '_'
npol = npol + 1
end
pcat[getindex(pha,2)] = SeisPha(
0.0, # amp
0.0, # d
0.0, # inc
parse(Float64, pha[7][1:end-1]), # res
0.0, # rp
0.0, # ta
parse(Float64, pha[4]), # tt --> relative to file begin; fixed below
parse(Float64, pha[6]), # unc
pol, # polarity
pha[5][1] # quality
)
elseif getindex(pha, 1) == "D"
dur = parse(Float64, getindex(pha,2)[1:end-1])
end
end
# Assign to the correct channel
for i = 1:N
if startswith(pick_cha, getindex(cha, i))
for p in keys(pcat)
PHA[i][p] = pcat[p]
end
if dur > 0.0
ndur = ndur + 1
S.data.misc[i]["dur"] = dur
end
break
end
end
pick_line = nextline(pf, '.')
end
v>0 && println(stdout, "Processed ", m, " pick lines.")
close(pf)
# Set ndur, npol
setfield!(getfield(H, :mag), :nst, ndur)
setfield!(R, :npol, npol)
# Place hdr, source
setfield!(S, :hdr, H)
setfield!(S, :source, R)
# Done
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 478 | export GphysChannel
abstract type GphysChannel end
function isequal(C::T, D::T) where {T<:GphysChannel}
q::Bool = true
F = fieldnames(T)
for f in F
if f != :notes
q = min(q, getfield(C,f) == getfield(D,f))
end
end
return q
end
==(C::T, D::T) where {T<:GphysChannel} = isequal(C,D)
in(s::String, C::GphysChannel) = getfield(C, :id)==s
@doc (@doc namestrip)
namestrip!(C::T) where {T<:GphysChannel} = setfield!(C, :name, namestrip(getfield(C, :name)))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6449 | export GphysData, findid, findchan, prune, prune!, pull
abstract type GphysData end
"""
findchan(id::String, S::GphysData)
Get all channel indices `i` in S with id ∈ S.id[i]
"""
findchan(r::Union{Regex,String}, S::GphysData) = findall([occursin(r, i) for i in getfield(S, :id)])
# Extract
"""
T = pull(S::SeisData, id::String)
Extract the first channel with id=`id` from `S` and return it as a new SeisChannel structure. The corresponding channel in `S` is deleted.
T = pull(S::SeisData, i::Union{Integer, UnitRange, Array{In64,1}}
Extract channel `i` from `S` as a new SeisChannel struct, deleting it from `S`.
"""
function pull(S::T, s::String) where {T<:GphysData}
i = findid(S, s)
U = deepcopy(getindex(S, i))
deleteat!(S, i)
return U
end
function pull(S::T, J::UnitRange) where {T<:GphysData}
U = deepcopy(getindex(S, J))
deleteat!(S, J)
return U
end
function pull(S::T, J::Array{Int64,1}) where {T<:GphysData}
U = deepcopy(getindex(S, J))
deleteat!(S, J)
return U
end
function pull(S::T, i::Integer) where {T<:GphysData}
C = deepcopy(getindex(S, i))
deleteat!(S, i)
return C
end
# ============================================================================
# Indexing, searching, iteration, size
# s = S[j] returns a SeisChannel struct
# s = S[i:j] returns a SeisData struct
# S[i:j].foo = bar won't work
lastindex(S::T) where {T<:GphysData} = getfield(S, :n)
firstindex(S::T) where {T<:GphysData} = 1
length(S::T) where {T<:GphysData} = S.n
in(s::String, S::GphysData) = in(s, getfield(S, :id))
function getindex(S::T, J::Array{Int,1}) where {T<:GphysData}
n = getfield(S, :n)
U = T()
F = fieldnames(T)
# ([(getfield(S, f))[J] = getfield(U, f) for f in datafields])
for f in F
if (f in unindexed_fields) == false
setfield!(U, f, getindex(getfield(S, f), J))
end
end
setfield!(U, :n, lastindex(J))
return U
end
getindex(S::GphysData, J::UnitRange) = getindex(S, collect(J))
function setindex!(S::T, U::T, J::Array{Int,1}) where {T<:GphysData}
typeof(S) == typeof(U) || throw(MethodError)
length(J) == U.n || throw(BoundsError)
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
setindex!(getfield(S, f), getfield(U, f), J)
end
end
# ([(getfield(S, f))[J] = getfield(U, f) for f in datafields])
return nothing
end
setindex!(S::GphysData, U::GphysData, J::UnitRange) = setindex!(S, U, collect(J))
@doc """
sort!(S::SeisData, [rev=false])
In-place sort of channels in object S by `S.id`. Specify `rev=true` to reverse the sort order.
sort(S::SeisData, [rev=false])
Sort channels in object S by `S.id`. Specify `rev=true` to reverse the sort order.
""" sort!
function sort!(S::T; rev=false::Bool) where {T<:GphysData}
j = sortperm(getfield(S, :id), rev=rev)
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
setfield!(S, f, getfield(S,f)[j])
end
end
return nothing
end
@doc (@doc sort!)
function sort(S::T; rev=false::Bool) where {T<:GphysData}
U = deepcopy(S)
sort!(U, rev=rev)
return U
end
isempty(S::T) where {T<:GphysData} = (S.n == 0) ? true : minimum([isempty(getfield(S,f)) for f in fieldnames(T)])
function isequal(S::T, U::T) where {T<:GphysData}
q = true
F = fieldnames(T)
for f in F
if f != :notes
q = min(q, getfield(S,f) == getfield(U,f))
end
end
return q
end
==(S::T, U::T) where {T<:GphysData} = isequal(S,U)
# Append, add, delete, sort
function append!(S::T, U::T) where {T<:GphysData}
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
append!(getfield(S, f), getfield(U, f))
end
end
S.n += U.n
return nothing
end
# ============================================================================
# deleteat!
function deleteat!(S::T, j::Int) where {T<:GphysData}
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
deleteat!(getfield(S, f), j)
end
end
S.n -= 1
return nothing
end
function deleteat!(S::T, J::Array{Int,1}) where {T<:GphysData}
sort!(J)
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
deleteat!(getfield(S, f), J)
end
end
S.n -= lastindex(J)
return nothing
end
deleteat!(S::T, K::UnitRange) where {T<:GphysData} = deleteat!(S, collect(K))
@doc """
prune!(S::SeisData)
Delete all channels from S that have no data (i.e. S.x is empty or non-existent).
""" prune!
function prune!(S::GphysData)
n = getfield(S, :n)
klist = Array{Int64,1}(undef, 0)
sizehint!(klist, n)
T = getfield(S, :t)
X = getfield(S, :x)
i = 0
while i < n
i = i+1
# non-empty X with empty T should be rare
if isempty(getindex(X, i))
push!(klist, i)
elseif isempty(getindex(T, i))
push!(klist, i)
end
end
deleteat!(S, klist)
return nothing
end
@doc (@doc prune!)
prune(S::T) where {T<:GphysData} = (U = deepcopy(S); prune!(U); return U)
# ============================================================================
# delete!
function delete!(S::T, s::Union{Regex,String}; exact=true::Bool) where {T<:GphysData}
if exact
i = findid(S, s)
deleteat!(S, i)
else
deleteat!(S, findchan(s,S))
end
return nothing
end
# With this convention, S+U-U = S
function delete!(S::T, U::T) where {T<:GphysData}
id = reverse(getfield(U, :id))
J = Array{Int64,1}(undef,0)
for i in id
j = findlast(S.id.==i)
(j == nothing) || push!(J, j)
end
deleteat!(S, J)
return nothing
end
-(S::T, r::Union{Regex,String}) where {T<:GphysData} = (U = deepcopy(S); delete!(U,r); return U) # By channel id regex or string
-(S::T, U::T) where {T<:GphysData} = (S2 = deepcopy(S); delete!(S2, U); return S2) # Remove all channels with IDs in one SeisData from another
# Purpose: deal with intentional overly-generous "resize!" when parsing SEED
function trunc_x!(S::GphysData)
n = getfield(S, :n)
X = getfield(S, :x)
T = getfield(S, :t)
for i = 1:n
t = getindex(T, i)
L = size(t, 1)
if L == 0
setindex!(X, Array{Float64,1}(undef, 0), i)
else
x = getindex(X, i)
nx = t[L,1]
if length(x) > nx
resize!(x, nx)
end
((t[L,1] == t[L-1,1]) && (t[L,2] == 0)) && (S.t[i] = t[1:L-1,:])
end
end
return nothing
end
function namestrip!(S::GphysData)
names = getfield(S, :name)
for (i, name) in enumerate(names)
setindex!(names, namestrip(name), i)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 12847 | export InstrumentPosition, EQLoc, GenLoc, GeoLoc, NodalLoc, UTMLoc, XYLoc
@doc """
**InstrumentPosition**
An abstract type whose subtypes (GenLoc, GeoLoc, UTMLoc, XYLoc)
describe instrument positions in different ways.
Additional structures can be added for custom types.
Matrix of structure fields and rough equivalencies:
| **GenLoc** | **GeoLoc** | **UTMLoc** | **XYLoc** | **EQLoc** | **NodalLoc |
|:---|:---|:---|:---|:---|:---|
| datum | datum | datum | datum | datum | |
| loc | | | | | |
| | zone | orig | | |
| | lon | E | x | lon | x |
| | lat | N | y | lat | |
| | el | el | z | | |
| | dep | dep | | dep | |
| | az | az | az | | |
| | inc | inc | inc | | |
""" InstrumentPosition
abstract type InstrumentPosition end
function showloc_full(io::IO, Loc::T) where {T<:InstrumentPosition}
F = fieldnames(T)
println(io, T, " with fields:")
for f in F
fn = lpad(String(f), 5, " ")
println(io, fn, ": ", getfield(Loc,f))
end
return nothing
end
function loctyp2code(Loc::InstrumentPosition)
T = typeof(Loc)
c = UInt8(
if T == GeoLoc
0x01
elseif T == UTMLoc
0x02
elseif T == XYLoc
0x03
elseif T == NodalLoc
0x04
else
0x00
end
)
return c
end
function code2loctyp(c::UInt8)
T::Type = (
if c == 0x01
GeoLoc
elseif c == 0x02
UTMLoc
elseif c == 0x03
XYLoc
elseif c == 0x04
NodalLoc
else
GenLoc
end
)
return T
end
"""
GenLoc
Generic instrument location with two fields:
* datum::String
* loc::Array{Float64,1}
"""
mutable struct GenLoc <: InstrumentPosition
datum::String
loc::Array{Float64,1}
GenLoc(datum::String, loc::Array{Float64,1}) = new(datum, loc)
end
GenLoc(; datum::String = "", loc::Array{Float64,1} = Float64[]) = GenLoc(datum, loc)
GenLoc(X::Array{Float64,1}) = GenLoc("", X)
getindex(x::GenLoc, i::Int64) = getindex(getfield(x, :loc), i)
setindex!(x::GenLoc, y::Float64, i::Int64) = setindex!(getfield(x, :loc), y, i)
function show(io::IO, Loc::GenLoc)
if get(io, :compact, false) == false
showloc_full(io, Loc)
else
print(io, repr(getfield(Loc, :loc), context=:compact => true))
end
return nothing
end
function write(io::IO, Loc::GenLoc)
write(io, Int64(sizeof(Loc.datum)))
write(io, Loc.datum)
write(io, Int64(length(Loc.loc)))
write(io, Loc.loc)
return nothing
end
read(io::IO, ::Type{GenLoc}) = GenLoc(String(fastread(io, fastread(io, Int64))),
read!(io, Array{Float64, 1}(undef, fastread(io, Int64))))
isempty(Loc::GenLoc) = min(isempty(Loc.datum), isempty(Loc.loc))
hash(Loc::GenLoc) = hash(Loc.datum, hash(Loc.loc))
isequal(S::GenLoc, U::GenLoc) = min(isequal(S.datum, U.datum), isequal(S.loc, U.loc))
==(S::GenLoc, U::GenLoc) = isequal(S, U)
sizeof(Loc::GenLoc) = 16 + sizeof(Loc.datum) + sizeof(Loc.loc)
"""
GeoLoc
Standard instrument location description:
* datum::String
* lat::Float64 (North is positive)
* lon::Float64 (East is positive)
* el::Float64 (above sea level is positive)
* dep::Float64
* az::Float64 (clockwise from north)
* inc::Float64 (downward from +z = 0°)
"""
mutable struct GeoLoc <: InstrumentPosition
datum::String
lat::Float64
lon::Float64
el::Float64
dep::Float64
az::Float64
inc::Float64
function GeoLoc(
datum ::String ,
lat ::Float64,
lon ::Float64,
el ::Float64,
dep ::Float64,
az ::Float64,
inc ::Float64
)
return new(datum, lat, lon, el, dep, az, inc)
end
end
GeoLoc(;
datum ::String = "",
lat ::Float64 = zero(Float64),
lon ::Float64 = zero(Float64),
el ::Float64 = zero(Float64),
dep ::Float64 = zero(Float64),
az ::Float64 = zero(Float64),
inc ::Float64 = zero(Float64)
) = GeoLoc(datum, lat, lon, el, dep, az, inc)
function show(io::IO, loc::GeoLoc)
if get(io, :compact, false) == false
showloc_full(io, loc)
else
c = :compact => true
print(io, repr(getfield(loc, :lat), context=c), " N, ",
repr(getfield(loc, :lon), context=c), " E, ",
repr(getfield(loc, :el), context=c), " m")
end
return nothing
end
function write(io::IO, Loc::GeoLoc)
datum = codeunits(getfield(Loc, :datum))
L = Int64(length(datum))
write(io, L)
write(io, datum)
write(io, Loc.lat)
write(io, Loc.lon)
write(io, Loc.el)
write(io, Loc.dep)
write(io, Loc.az)
write(io, Loc.inc)
return nothing
end
read(io::IO, ::Type{GeoLoc}) = GeoLoc(
String(fastread(io, fastread(io, Int64))),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64)
)
function isempty(Loc::GeoLoc)
q::Bool = isempty(getfield(Loc, :datum))
for f in (:lat, :lon, :el, :dep, :az, :inc)
q = min(q, getfield(Loc, f) == 0.0)
end
return q
end
function hash(Loc::GeoLoc)
h = hash(getfield(Loc, :datum))
for f in (:lat, :lon, :el, :dep, :az, :inc)
h = hash(getfield(Loc, f), h)
end
return h
end
function isequal(S::GeoLoc, U::GeoLoc)
q::Bool = isequal(getfield(S, :datum), getfield(U, :datum))
if q == false
return q
else
for f in (:lat, :lon, :el, :dep, :az, :inc)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
end
==(S::GeoLoc, U::GeoLoc) = isequal(S, U)
sizeof(Loc::GeoLoc) = 104 + sizeof(getfield(Loc, :datum))
"""
UTMLoc
UTM instrument location
* datum::String
* zone::Int8
* hemi:: Char (hemisphere)
* E::UInt64 (Easting, in meters)
* N::UInt64 (Northing, in meters)
* el::Float64
* dep::Float64
* az::Float64 (clockwise from north)
* inc::Float64 (downward from +z = 0°)
"""
mutable struct UTMLoc <: InstrumentPosition
datum::String
zone::Int8
hemi::Char
E::UInt64
N::UInt64
el::Float64
dep::Float64
az::Float64
inc::Float64
UTMLoc(
datum::String,
zone::Int8,
hemi::Char,
E::UInt64,
N::UInt64,
el::Float64,
dep::Float64,
az::Float64,
inc::Float64
) = new(datum, zone, hemi, E, N, el, dep, az, inc)
end
UTMLoc(; datum ::String = "",
zone ::Int8 = zero(Int8),
hemi ::Char = ' ',
E ::UInt64 = zero(UInt64),
N ::UInt64 = zero(UInt64),
el ::Float64 = zero(Float64),
dep ::Float64 = zero(Float64),
az ::Float64 = zero(Float64),
inc ::Float64 = zero(Float64)
) = UTMLoc(datum, zone, hemi, E, N, el, dep, az, inc)
function show(io::IO, loc::UTMLoc)
if get(io, :compact, false) == false
showloc_full(io, loc)
else
print(io, getfield(loc, :zone), " ",
getfield(loc, :hemi), " ",
getfield(loc, :E), " ",
getfield(loc, :N))
end
return nothing
end
function write(io::IO, Loc::UTMLoc)
write(io, sizeof(Loc.datum))
write(io, Loc.datum)
write(io, Loc.zone)
write(io, Loc.hemi)
write(io, Loc.E)
write(io, Loc.N)
write(io, Loc.el)
write(io, Loc.dep)
write(io, Loc.az)
write(io, Loc.inc)
return nothing
end
read(io::IO, ::Type{UTMLoc}) = UTMLoc(
String(fastread(io, fastread(io, Int64))),
fastread(io, Int8),
fastread(io, Char),
fastread(io, UInt64),
fastread(io, UInt64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64)
)
function isempty(Loc::UTMLoc)
q::Bool = isempty(getfield(Loc, :datum))
q = min(q, getfield(Loc, :zone) == zero(Int8))
q = min(q, getfield(Loc, :hemi) == ' ')
q = min(q, getfield(Loc, :E) == zero(UInt64))
q = min(q, getfield(Loc, :N) == zero(UInt64))
for f in (:el, :dep, :az, :inc)
q = min(q, getfield(Loc, f) == zero(Float64))
end
return q
end
function hash(Loc::UTMLoc)
h = hash(getfield(Loc, :datum))
for f in (:zone, :hemi, :E, :N, :el, :dep, :az, :inc)
h = hash(getfield(Loc, f), h)
end
return h
end
function isequal(S::UTMLoc, U::UTMLoc)
q::Bool = isequal(getfield(S, :datum), getfield(U, :datum))
if q == false
return q
else
for f in (:zone, :hemi, :E, :N, :el, :dep, :az, :inc)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
end
==(S::UTMLoc, U::UTMLoc) = isequal(S, U)
sizeof(Loc::UTMLoc) = 114 + sizeof(Loc.datum)
"""
XYLoc
Locally defined instrument position:
* datum::String
* x::Float64 (meters)
* y::Float64 (meters)
* z::Float64 (meters)
* az::Float64 (clockwise from north)
* inc::Float64 (downward from +z = 0°)
* ox::Float64 (origin, typically geographic)
* oy::Float64
* oz::Float64
"""
mutable struct XYLoc <: InstrumentPosition
datum::String
x::Float64
y::Float64
z::Float64
az::Float64
inc::Float64
ox::Float64
oy::Float64
oz::Float64
XYLoc(
datum ::String,
x ::Float64,
y ::Float64,
z ::Float64,
az ::Float64,
inc ::Float64,
ox ::Float64,
oy ::Float64,
oz ::Float64
) = new(datum, x, y, z, az, inc, ox, oy, oz)
end
XYLoc(; datum ::String = "",
x ::Float64 = zero(Float64),
y ::Float64 = zero(Float64),
z ::Float64 = zero(Float64),
az ::Float64 = zero(Float64),
inc ::Float64 = zero(Float64),
ox ::Float64 = zero(Float64),
oy ::Float64 = zero(Float64),
oz ::Float64 = zero(Float64)
) = XYLoc(datum, x, y, z, az, inc, ox, oy, oz)
function show(io::IO, loc::XYLoc)
if get(io, :compact, false) == false
showloc_full(io, loc)
else
c = :compact => true
print(io, "x ", repr(getfield(loc, :x), context=c),
", y ", repr(getfield(loc, :x), context=c),
", z ", repr(getfield(loc, :x), context=c))
end
return nothing
end
function write(io::IO, Loc::XYLoc)
write(io, sizeof(Loc.datum))
write(io, Loc.datum)
write(io, Loc.x)
write(io, Loc.y)
write(io, Loc.z)
write(io, Loc.az)
write(io, Loc.inc)
write(io, Loc.ox)
write(io, Loc.oy)
write(io, Loc.oz)
return nothing
end
read(io::IO, ::Type{XYLoc}) = XYLoc(
String(fastread(io, fastread(io, Int64))),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64)
)
function isempty(Loc::XYLoc)
q::Bool = isempty(getfield(Loc, :datum))
for f in (:x, :y, :z, :az, :inc, :ox, :oy, :oz)
q = min(q, getfield(Loc, f) == 0.0)
end
return q
end
function hash(Loc::XYLoc)
h = hash(getfield(Loc, :datum))
for f in (:x, :y, :z, :az, :inc, :ox, :oy, :oz)
h = hash(getfield(Loc, f), h)
end
return h
end
function isequal(S::XYLoc, U::XYLoc)
q::Bool = isequal(getfield(S, :datum), getfield(U, :datum))
if q == false
return q
else
for f in (:x, :y, :z, :az, :inc, :ox, :oy, :oz)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
end
==(S::XYLoc, U::XYLoc) = isequal(S, U)
sizeof(Loc::XYLoc) = 136 + sizeof(getfield(Loc, :datum))
"""
NodalLoc
Instrument position along a nodal array
* x::Float64 (meters)
* y::Float64 (meters)
* z::Float64 (meters)
"""
mutable struct NodalLoc <: InstrumentPosition
x::Float64
y::Float64
z::Float64
NodalLoc(
x ::Float64,
y ::Float64,
z ::Float64,
) = new(x,y,z)
end
NodalLoc(;
x ::Float64 = zero(Float64),
y ::Float64 = zero(Float64),
z ::Float64 = zero(Float64),
) = NodalLoc(x,y,z)
function show(io::IO, loc::NodalLoc)
if get(io, :compact, false) == false
showloc_full(io, loc)
else
c = :compact => true
print(io, "x ", repr(getfield(loc, :x), context=c),
", y ", repr(getfield(loc, :x), context=c),
", z ", repr(getfield(loc, :x), context=c))
end
return nothing
end
function write(io::IO, Loc::NodalLoc)
write(io, Loc.x)
write(io, Loc.y)
write(io, Loc.z)
return nothing
end
read(io::IO, ::Type{NodalLoc}) = NodalLoc(
fastread(io, Float64),
fastread(io, Float64),
fastread(io, Float64),
)
function isempty(Loc::NodalLoc)
q::Bool = true
for f in (:x, :y, :z)
q = min(q, getfield(Loc, f) == 0.0)
end
return q
end
function hash(Loc::NodalLoc)
h = hash(zero(UInt64))
for f in (:x, :y, :z)
h = hash(getfield(Loc, f), h)
end
return h
end
function isequal(S::NodalLoc, U::NodalLoc)
q = true
for f in (:x, :y, :z)
q = min(q, getfield(S,f) == getfield(U,f))
end
return q
end
==(S::NodalLoc, U::NodalLoc) = isequal(S, U)
sizeof(Loc::NodalLoc) = 48
const default_loc = GeoLoc()
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 17074 | # import Base:getindex, setindex!, show, read, write, isequal, ==, isempty, sizeof, copy, hash
export CoeffResp, InstrumentResponse, GenResp, MultiStageResp, PZResp, PZResp64
@doc """
**InstrumentResponse**
An abstract type whose subtypes (PZResp) describe instrument frequency responses.
Additional structures can be added for custom types.
""" InstrumentResponse
abstract type InstrumentResponse end
function showresp_full(io::IO, Resp::T) where {T<:InstrumentResponse}
F = fieldnames(T)
println(io, T, " with fields:")
for f in F
fn = lpad(String(f), 5, " ")
println(io, fn, ": ", getfield(Resp,f))
end
return nothing
end
function resptyp2code(Resp::Union{InstrumentResponse, Nothing})
T = typeof(Resp)
if T == PZResp
return 0x01
elseif T == PZResp64
return 0x02
elseif T == CoeffResp
return 0x03
elseif T == MultiStageResp
return 0x04
elseif T == Nothing
return 0xff
else
return 0x00
end
end
function code2resptyp(c::UInt8)
if c == 0x00
return GenResp
elseif c == 0x01
return PZResp
elseif c == 0x02
return PZResp64
elseif c == 0x03
return CoeffResp
elseif c == 0x04
return MultiStageResp
elseif c == 0xff
return Nothing
end
end
copy(R::T) where {T<:InstrumentResponse} = deepcopy(R)
"""
GenResp
Generic instrument response with two fields:
* desc::String (descriptive string)
* resp::Array{Complex{Float64},2}
"""
mutable struct GenResp <: InstrumentResponse
desc::String
resp::Array{Complex{Float64},2}
function GenResp(S::String, X::Array{Complex{Float64},2}) where {T<:Complex}
return new(S, X)
end
end
# GenResp default
GenResp(;
desc::String = "",
resp::Array{Complex{Float64},2} = Array{Complex{Float64},2}(undef, 0, 0)
) = GenResp(desc, resp)
GenResp(X::Array{Complex{Float64},2}) = GenResp(desc = "", resp = X)
# How we read from file
GenResp(s::String, X::Array{T,2}, Y::Array{T,2}) where {T <: Real} = GenResp(s, complex.(Float64.(X), Float64.(Y)))
getindex(x::GenResp, i::Int64) = getindex(getfield(x, :resp), i)
getindex(x::GenResp, i::Int64, j::Int64) = getindex(getfield(x, :resp), i, j)
setindex!(x::GenResp, y::Number, i::Int64) = setindex!(getfield(x, :resp), complex(y), i)
setindex!(x::GenResp, y::Number, i::Int64, j::Int64) = setindex!(getfield(x, :resp), complex(y), i, j)
function show(io::IO, Resp::GenResp)
if get(io, :compact, false) == false
showresp_full(io, Resp)
else
resp = getfield(Resp, :resp)
M,N = size(resp)
M1 = min(M,2)
N1 = min(N,2)
print(io, "[")
for i = 1:M1
for j = 1:N1
print(io, repr(resp[i,j], context=:compact=>true))
if j == N1 && i < M1
if N > N1
print(io, " … ")
end
print(io, "; ")
elseif j == N1 && i == M1
if N > N1
print(io, " … ;")
end
if M > M1
print(io, " … ")
end
print(io, "]")
else
print(io, ", ")
end
end
end
print(io, " (")
print(io, getfield(Resp, :desc))
print(io, ")")
end
return nothing
end
function write(io::IO, R::GenResp)
write(io, Int64(sizeof(R.desc)))
write(io, getfield(R, :desc))
nr, nc = size(R.resp)
write(io, Int64(nr))
write(io, Int64(nc))
write(io, getfield(R, :resp))
return nothing
end
read(io::IO, ::Type{GenResp}) = GenResp(
String(fastread(io, fastread(io, Int64))),
read!(io, Array{Complex{Float64},2}(undef, fastread(io, Int64), fastread(io, Int64)))
)
isempty(R::GenResp) = min(isempty(R.desc), isempty(R.resp))
isequal(R1::GenResp, R2::GenResp) = min(isequal(R1.desc, R2.desc), isequal(R1.resp, R2.resp))
==(R1::GenResp, R2::GenResp) = isequal(R1, R2)
hash(R::GenResp) = hash(R.desc, hash(R.resp))
sizeof(R::GenResp) = 16 + sizeof(getfield(R, :desc)) + sizeof(getfield(R, :resp))
@doc """
PZResp([c = c, p = p, z = z])
Instrument response with three fields. Optionally, fields can be set with keywords at creation.
| F | Type | Meaning
|:--- |:--- |:---- |
| a0 | Float32 | normalization constant. Equivalencies: |
| | | = DSP.jl Type `ZeroPoleGain`, field `:k` |
| | | = SEED RESP "A0 normalization factor:" |
| | | = SEED v2.4 Blockette [53], field 7 |
| | | != FDSN station XML v1.1 |
| | | <Response> |
| | | <InstrumentSensitivity> |
| | | <Value> |
| f0 | Float32 | frequency of normalization by a0; NOT |
| | | always geophone corner frequency |
| p | Array{Complex{Float32},1} | Complex poles of transfer function |
| z | Array{Complex{Float32},1} | Complex zeroes of transfer function |
PZResp64([c = c, p = p, z = z])
As PZResp, but fields use Float64 precision.
PZResp(X::Array{Complex{T},2} [, rev=true])
Convert X to a PZResp64 (if `T == Float64`) or PZResp (default) object. Assumes format X = [p z], i.e., poles are in X[:,1] and zeros in X[:,2]; specify `rev=true` if the column assignments are X = [z p].
### External References
Seed v2.4 manual, http://www.fdsn.org/pdf/SEEDManual_V2.4.pdf
IRIS Resp format, https://ds.iris.edu/ds/nodes/dmc/data/formats/resp/
Julia DSP filter Types, https://juliadsp.github.io/DSP.jl/stable/filters/
See also: `resp_a0!`, `update_resp_a0!`, `DSP.ZeroPoleGain`
""" PZResp
mutable struct PZResp <: InstrumentResponse
a0::Float32
f0::Float32
p::Array{Complex{Float32},1}
z::Array{Complex{Float32},1}
function PZResp( a0::Float32,
f0::Float32,
p::Array{Complex{Float32},1},
z::Array{Complex{Float32},1} )
return new(a0, f0, p, z)
end
end
@doc (@doc PZResp)
mutable struct PZResp64 <: InstrumentResponse
a0::Float64
f0::Float64
p::Array{Complex{Float64},1}
z::Array{Complex{Float64},1}
function PZResp64(a0::Float64,
f0::Float64,
p::Array{Complex{Float64},1},
z::Array{Complex{Float64},1} )
return new(a0, f0, p, z)
end
end
# PZResp default
PZResp( ;
a0::Float32 = 1.0f0,
f0::Float32 = 1.0f0,
p::Array{Complex{Float32},1} = Array{Complex{Float32},1}(undef, 0),
z::Array{Complex{Float32},1} = Array{Complex{Float32},1}(undef, 0)
) = PZResp(a0, f0, p, z)
PZResp64( ;
a0::Float64 = 1.0,
f0::Float64 = 1.0,
p::Array{Complex{Float64},1} = Array{Complex{Float64},1}(undef, 0),
z::Array{Complex{Float64},1} = Array{Complex{Float64},1}(undef, 0)
) = PZResp64(a0, f0, p, z)
# How we read from file
# PZResp(a0::Float32, f0::Float32, pr::Array{Float32,1}, pi::Array{Float32,1},
# zr::Array{Float32,1}, zi::Array{Float32,1}) = PZResp(a0, f0, complex.(pr, pi), complex.(zr, zi))
# PZResp64(a0::Float64, f0::Float64, pr::Array{Float64,1}, pi::Array{Float64,1},
# zr::Array{Float64,1}, zi::Array{Float64,1}) = PZResp64(a0, f0, complex.(pr, pi), complex.(zr, zi))
# Convert from a 2-column complex response
function PZResp(X::Array{Complex{T},2}; rev::Bool=false) where {T <: AbstractFloat}
@assert size(X,2) == 2
if rev
p = X[:,2]
z = X[:,1]
else
p = X[:,1]
z = X[:,2]
end
if T == Float64
return PZResp64(1.0, 1.0, p, z)
else
return PZResp(1.0f0, 1.0f0, p, z)
end
end
function show(io::IO, Resp::Union{PZResp,PZResp64})
if get(io, :compact, false) == false
showresp_full(io, Resp)
else
c = :compact => true
print(io, "a0 ", repr(getfield(Resp, :a0), context=c), ", ",
"f0 ", repr(getfield(Resp, :f0), context=c), ", ",
length(getfield(Resp, :z)), "z, ",
length(getfield(Resp, :p)), "p")
end
return nothing
end
function write(io::IO, R::Union{PZResp,PZResp64})
write(io, R.a0)
write(io, R.f0)
p = getfield(R, :p)
write(io, Int64(lastindex(p)))
write(io, p)
z = getfield(R, :z)
write(io, Int64(lastindex(z)))
write(io, z)
return nothing
end
read(io::IO, ::Type{PZResp}) = PZResp(
fastread(io, Float32),
fastread(io, Float32),
read!(io, Array{Complex{Float32},1}(undef, fastread(io, Int64))),
read!(io, Array{Complex{Float32},1}(undef, fastread(io, Int64)))
)
read(io::IO, ::Type{PZResp64}) = PZResp64(
fastread(io, Float64),
fastread(io, Float64),
read!(io, Array{Complex{Float64},1}(undef, fastread(io, Int64))),
read!(io, Array{Complex{Float64},1}(undef, fastread(io, Int64)))
)
isempty(R::Union{PZResp,PZResp64}) = min(
R.a0 == one(typeof(R.a0)),
R.f0 == one(typeof(R.f0)),
isempty(getfield(R, :p)),
isempty(getfield(R, :z))
)
function isequal(R1::Union{PZResp,PZResp64}, R2::Union{PZResp,PZResp64})
q = isequal(getfield(R1, :a0), getfield(R2, :a0))
if q == true
q = min(q, isequal(getfield(R1, :f0), getfield(R2, :f0)))
q = min(q, isequal(getfield(R1, :z), getfield(R2, :z)))
q = min(q, isequal(getfield(R1, :p), getfield(R2, :p)))
end
return q
end
==(R1::Union{PZResp,PZResp64}, R2::Union{PZResp,PZResp64}) = isequal(R1, R2)
function hash(R::Union{PZResp,PZResp64})
h = hash(R.a0)
h = hash(R.f0, h)
h = hash(R.p, h)
return hash(R.z, h)
end
sizeof(R::Union{PZResp,PZResp64}) = 32 + 2*sizeof(getfield(R, :a0)) + sizeof(getfield(R, :z)) + sizeof(getfield(R, :p))
const flat_resp = PZResp(p = Complex{Float32}[complex(1.0, 1.0)], z = Complex{Float32}[2.0/Complex(1.0, -1.0)])
# Multi-Stage Instrument Responses
@doc """
CoeffResp
Coefficient response object; a response is expressed as the coefficients of the
numerator `:b` and denominator `:a`.
| F | Type | Meaning |
|:--- |:--- |:---- |
| b | String | Numerator coefficients |
| a | String | Denominator coefficients |
!!! warning
translate_resp does not work on CoeffResp
""" CoeffResp
mutable struct CoeffResp <: InstrumentResponse
b::Array{Float64,1} # Numerator coeffs
a::Array{Float64,1} # Denominator coeffs
function CoeffResp( b::Array{Float64,1},
a::Array{Float64,1} )
return new(b, a)
end
end
# CoeffResp default
CoeffResp( ;
b::Array{Float64,1} = Float64[],
a::Array{Float64,1} = Float64[],
) = CoeffResp(b, a)
function show(io::IO, Resp::CoeffResp)
if get(io, :compact, false) == false
showresp_full(io, Resp)
else
c = :compact => true
b = repr(Resp.b, context=c)
bstr = length(b) > 10 ? b[1:9] * "…" : b
a = repr(Resp.a, context=c)
astr = length(a) > 10 ? a[1:9] * "…" : a
print(io, "b = ", bstr, ", a = ", astr)
end
return nothing
end
function write(io::IO, R::CoeffResp)
b = getfield(R, :b)
write(io, Int64(lastindex(b)))
write(io, b)
a = getfield(R, :a)
write(io, Int64(lastindex(a)))
write(io, a)
return nothing
end
read(io::IO, ::Type{CoeffResp}) = CoeffResp(
read!(io, Array{Float64,1}(undef, fastread(io, Int64))),
read!(io, Array{Float64,1}(undef, fastread(io, Int64)))
)
isempty(R::CoeffResp) = min(
isempty(getfield(R, :b)),
isempty(getfield(R, :a))
)
function isequal(R1::CoeffResp, R2::CoeffResp)
q = isequal(getfield(R1, :a), getfield(R2, :a))
if q == true
q = min(q, isequal(getfield(R1, :b), getfield(R2, :b)))
end
return q
end
==(R1::CoeffResp, R2::CoeffResp) = isequal(R1, R2)
hash(R::CoeffResp) = hash(R.b, hash(R.a))
sizeof(R::CoeffResp) = 16 + sizeof(getfield(R, :b)) + sizeof(getfield(R, :a))
const RespStage = Union{CoeffResp, PZResp, PZResp64, GenResp, Nothing}
"""
MultiStageResp
Multi-stage instrument response including digitization and decimation; contains
the level of detail that only German-speaking Swiss care about. Each stage
(subfield `:stage`) is another InstrumentResponse subtype with the following
optional information indexed within each field by stage number:
| F | Type | Meaning | Units
|:--- |:--- |:---- | :---
| stage | Stage | response transfer func |
| fs | Float64 | input sample rate | Hz
| gain | Float64 | stage gain |
| fg | Float64 | frequency of gain | Hz
| delay | Float64 | stage delay | s
| corr | Float64 | correction applied | s
| fac | Int64 | decimation factor |
| os | Int64 | decimation offset |
| i | String | units in |
| o | String | units out |
!!! warning
translate_resp only modifies the first stage of a MultiStageResp.
"""
mutable struct MultiStageResp <: InstrumentResponse
stage::Array{RespStage,1}
fs::Array{Float64,1}
gain::Array{Float64,1}
fg::Array{Float64,1}
delay::Array{Float64,1}
corr::Array{Float64,1}
fac::Array{Int64,1}
os::Array{Int64,1}
i::Array{String,1}
o::Array{String,1}
function MultiStageResp()
return new(
Array{RespStage,1}(undef, 0),
Array{Float64,1}(undef, 0),
Array{Float64,1}(undef, 0),
Array{Float64,1}(undef, 0),
Array{Float64,1}(undef, 0),
Array{Float64,1}(undef, 0),
Array{Int64,1}(undef, 0),
Array{Int64,1}(undef, 0),
String[],
String[]
)
end
function MultiStageResp(n::UInt)
Resp = new(Array{RespStage,1}(undef,n),
zeros(Float64,n),
zeros(Float64,n),
zeros(Float64,n),
zeros(Float64,n),
zeros(Float64,n),
zeros(Int64,n),
zeros(Int64,n),
Array{String,1}(undef,n),
Array{String,1}(undef,n)
)
# Fill these fields with something to prevent undefined reference errors
fill!(Resp.stage, nothing)
fill!(Resp.i, "")
fill!(Resp.o, "")
return Resp
end
end
MultiStageResp(n::Int) = n > 0 ? MultiStageResp(UInt(n)) : MultiStageResp()
function append!(R1::MultiStageResp, R2::MultiStageResp)
for f in (:stage, :fs, :gain, :fg, :delay, :corr, :fac, :os, :i, :o)
append!(getfield(R1, f), getfield(R2, f))
end
return nothing
end
function show(io::IO, Resp::MultiStageResp)
println(io, "MultiStageResp (", length(Resp.stage), " stages) with fields:")
if get(io, :compact, false) == false
W = max(80, displaysize(io)[2]) - show_os
for f in (:stage, :fs, :gain, :fg, :delay, :corr, :fac, :os, :i, :o)
fn = lpad(String(f), 5, " ")
if f == :stage
i = 1
N = length(Resp.stage)
str = "stage: [" * join([
string(typeof(Resp.stage[i]), " (", repr(Resp.stage[i], context=:compact => true), ")") for i = 1:N], ", ") * "]"
else
str = fn * ": " * repr(getfield(Resp,f), context=:compact => true)
end
println(io, str_trunc(str, W))
end
else
print(io, length(Resp.stage), "-stage MultiStageResp")
end
return nothing
end
function write(io::IO, R::MultiStageResp)
N = length(R.stage)
write(io, N)
codes = zeros(UInt8, N)
for i in 1:N
codes[i] = resptyp2code(R.stage[i])
end
write(io, codes)
for i in 1:N
if codes[i] != 0xff
write(io, R.stage[i])
end
end
for f in (:fs, :gain, :fg, :delay, :corr, :fac, :os)
write(io, getfield(R, f))
end
write_string_vec(io, R.i)
write_string_vec(io, R.o)
return nothing
end
function read(io::IO, ::Type{MultiStageResp})
N = fastread(io, Int64)
codes = fastread(io, N)
A = Array{RespStage,1}(undef, N)
fill!(A, nothing)
for i = 1:N
if codes[i] != 0xff
A[i] = read(io, code2resptyp(codes[i]))
end
end
R = MultiStageResp(N)
read!(io, R.fs)
read!(io, R.gain)
read!(io, R.fg)
read!(io, R.delay)
read!(io, R.corr)
read!(io, R.fac)
read!(io, R.os)
R.i = read_string_vec(io, BUF.buf)
R.o = read_string_vec(io, BUF.buf)
R.stage = A
return R
end
isempty(R::MultiStageResp) = isempty(R.stage)
isequal(A::MultiStageResp, R::MultiStageResp) =
minimum([isequal(getfield(A, f), getfield(R,f)) for f in fieldnames(MultiStageResp)])
==(A::MultiStageResp, R::MultiStageResp) = isequal(A::MultiStageResp, R::MultiStageResp)
function hash(R::MultiStageResp)
h = hash([hash(i) for i in 1:length(R.stage)])
h = hash(R.fs, h)
h = hash(R.gain, h)
h = hash(R.fg, h)
h = hash(R.delay, h)
h = hash(R.corr, h)
h = hash(R.fac, h)
h = hash(R.os, h)
h = hash(R.i, h)
h = hash(R.o, h)
return h
end
sizeof(R::MultiStageResp) = 80 + 8*length(R.i) + 8*length(R.o) +
sum([sizeof(getfield(R, f)) for f in (:fs, :gain, :fg, :delay, :corr, :fac, :os)]) +
(isempty(R.stage) ? 0 : sum([sizeof(i) for i in R.stage])) +
sum([sizeof(j) for j in R.i]) +
sum([sizeof(j) for j in R.o])
const default_resp = PZResp()
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7220 | # Define default values for keyword arguments.
mutable struct SLDefs
port::Int64
gap::Int64
kai::Int64
refresh::Real
seq::String
u::String
x_on_err::Bool
SLDefs( port::Int64,
gap::Int64,
kai::Int64,
refresh::Real,
seq::String,
u::String,
x_on_err::Bool
) = new(port, gap, kai, refresh, seq, u, x_on_err)
end
mutable struct FiltDefs
fl::Float64
fh::Float64
np::Int64
rp::Int64
rs::Int64
rt::String
dm::String
FiltDefs( fl::Float64,
fh::Float64,
np::Int64,
rp::Int64,
rs::Int64,
rt::String,
dm::String) = new(fl, fh, np, rp, rs, rt, dm)
end
mutable struct KWDefs
SL::SLDefs
Filt::FiltDefs
comp::UInt8
fmt::String
full::Bool
nd::Real
n_zip::Int64
nx_add::Int64
nx_new::Int64
opts::String
prune::Bool
rad::Array{Float64,1}
reg::Array{Float64,1}
si::Bool
src::String
to::Int64
v::Integer
w::Bool
y::Bool
end
"""
SeisIO.KW
A mutable structure containing default keyword argument values in SeisIO.
Arguments that accept keywords in SeisIO.KW use the default values when a
keyword isn't specified.
### Keywords
| KW | Default | Allowed Data Types | Meaning |
|----------|:-----------|:-------------------|:-------------------------------|
| comp | 0x00 | UInt8 | compress data on write?[^1] |
| fmt | "miniseed" | String | request data format |
| full | false | Bool | read full headers? |
| n_zip | 100000 | Int64 | compress if length(x) > n_zip |
| nd | 1 | Real | number of days per subrequest |
| nx_add | 360000 | Int64 | minimum length increase of an |
| | | | undersized data array |
| nx_new | 8640000 | Int64 | number of samples allocated |
| | | | for a new data channel |
| opts | "" | String | user-specified options[^2] |
| prune | true | Bool | call prune! after get_data? |
| rad | [] | Array{Float64,1} | radius search: `[center_lat,` |
| | | | `center_lon, r_min, r_max]` |
| | | | in decimal degrees (°) |
| reg | [] | Array{Float64,1} | geographic search region: |
| | | | `[min_lat, max_lat,` |
| | | | `min_lon, max_lon,` |
| | | | `min_dep, max_dep]` |
| | | | lat, lon in degrees (°) |
| | | | dep in km with down = + |
| si | true | Bool | autofill request station info? |
| src | "IRIS" | String | data source; `?seis_www` lists |
| to | 30 | Int64 | timeout (s) for web requests |
| v | 0 | Integer | verbosity |
| w | false | Bool | write requests to disk? |
| y | false | Bool | sync after web requests? |
[^1]: If `comp == 0x00`, never compress data; if `comp == 0x01`, only compress channel `i` if `length(S.x[i]) > KW.n_zip`; if `comp == 0x02`, always compress data.
[^2]: Format like an http request string, e.g. "szsrecs=true&repo=realtime" for FDSN. String shouldn't begin with an ampersand.
### SeisIO.KW.SL
Seedlink-specific keyword default values. SeedLink also uses some general keywords.
| Name | Default | Type | Description |
|:--------|:--------|:--------|:---------------------------------- |
| gap | 3600 | Real | allowed time since last packet [s] [^1] |
| kai | 600 | Real | keepalive interval [s] |
| port | 18000 | Int64 | port number |
| refresh | 20 | Real | base refresh interval [s] |
| seq | "" | String | starting sequence no. (hex), e.g., "5BE37A" |
| u | (iris) | String | Default URL ("rtserve.iris.washington.edu") |
| xonerr | true | Bool | exit on error? |
[^1]: A channel is considered non-transmitting (hence, excluded from the SeedLink session) if the time since last packet exceeds `gap` seconds.
### SeisIO.KW.Filt
Default keyword values for time-series filtering.
| Name | Default | Type | Description |
|:------|:--------------|:--------|:------------------------------------|
| fl | 1.0 | Float64 | lower corner frequency [Hz] [^1] |
| fh | 15.0 | Float64 | upper corner frequency [Hz] [^1] |
| np | 4 | Int64 | number of poles |
| rp | 8 | Int64 | pass-band ripple (dB) |
| rs | 30 | Int64 | stop-band ripple (dB) |
| rt | "Bandpass" | String | response type (type of filter) |
| dm | "Butterworth" | String | design mode (name of filter) |
[^1]: Remember the (counter-intuitive) convention that the lower corner frequency (fl) is used in a Highpass filter, and fh is used in a Lowpass filter. This convention is preserved in SeisIO.
"""
const KW = KWDefs(
SLDefs(18000, # port::Int64
3600, # gap::Int64
600, # kai::Int64
20.0, # refresh::Real
"", # seq::String
"rtserve.iris.washington.edu", # u::String
true ), # x_on_err::Bool
FiltDefs(1.0, # fl::Float64
15.0, # fh::Float64
4, # np::Int64
8, # rp::Int64
30, # rs::Int64
"Bandpass", # rt::String
"Butterworth" ), # dm::String
0x00, # comp::Bool
"miniseed", # fmt::String
false, # full::Bool
1, # nd::Real
100000, # n_zip::Int64
360000, # nx_add::Int64
8640000, # nx_new::Int64
"", # opts::String
true, # prune::Bool
Float64[], # rad: Array{Float64,1}
Float64[], # reg: Array{Float64,1}
true, # si::Bool
"IRIS", # src::String
30, # to::Int64
0, # v::Integer (verbosity)
false, # w::Bool (write to disk)
false) # y::Bool (syc)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5474 | export SeisChannel
@doc (@doc SeisData)
mutable struct SeisChannel <: GphysChannel
id ::String
name ::String
loc ::InstrumentPosition
fs ::Float64
gain ::Float64
resp ::InstrumentResponse
units ::String
src ::String
misc ::Dict{String,Any}
notes ::Array{String,1}
t ::Array{Int64,2}
x ::FloatArray
function SeisChannel(
id ::String,
name ::String,
loc ::InstrumentPosition,
fs ::Float64,
gain ::Float64,
resp ::InstrumentResponse,
units ::String,
src ::String,
misc ::Dict{String,Any},
notes ::Array{String,1},
t ::Array{Int64,2},
x ::FloatArray
)
return new(id, name, loc, fs, gain, resp, units, src, misc, notes, t, x)
end
end
# Are keywords type-stable now?
SeisChannel(;
id ::String = "",
name ::String = "",
loc ::InstrumentPosition = GeoLoc(),
fs ::Float64 = default_fs,
gain ::Float64 = default_gain,
resp ::InstrumentResponse = PZResp(),
units ::String = "",
src ::String = "",
misc ::Dict{String,Any} = Dict{String,Any}(),
notes ::Array{String,1} = Array{String,1}(undef, 0),
t ::Array{Int64,2} = Array{Int64,2}(undef, 0, 2),
x ::FloatArray = Array{Float32,1}(undef, 0)
) = SeisChannel(id, name, loc, fs, gain, resp, units, src, misc, notes, t, x)
function getindex(S::SeisData, j::Int)
C = SeisChannel()
for f in datafields
setfield!(C, f, getindex(getfield(S,f), j))
end
return C
end
setindex!(S::SeisData, C::SeisChannel, j::Int) = (
[(getfield(S, f))[j] = getfield(C, f) for f in datafields];
return S)
function isempty(Ch::SeisChannel)
q::Bool = min(Ch.gain == default_gain, Ch.fs == default_fs)
if q == true
for f in (:id, :loc, :misc, :name, :notes, :resp, :src, :t, :units, :x)
q = min(q, isempty(getfield(Ch, f)))
end
end
return q
end
# ============================================================================
# Conversion and push to SeisData
function SeisData(C::SeisChannel)
S = SeisData(1)
for f in datafields
setindex!(getfield(S, f), getfield(C, f), 1)
end
return S
end
function push!(S::SeisData, C::SeisChannel)
for i in datafields
push!(getfield(S,i), getfield(C,i))
end
S.n += 1
return nothing
end
# This intentionally undercounts exotic objects in :misc (e.g. a nested Dict)
# because those objects aren't written to disk or created by SeisIO
function sizeof(C::SeisChannel)
s = 96
for f in datafields
v = getfield(C,f)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc
k = collect(keys(v))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(v)
s += sizeof(p)
if typeof(p) == Array{String,1}
s += sum([sizeof(j) for j in p])
end
end
end
end
return s
end
function write(io::IO, S::SeisChannel)
write(io, Int64(sizeof(S.id)))
write(io, S.id) # id
write(io, Int64(sizeof(S.name)))
write(io, S.name) # name
write(io, loctyp2code(S.loc))
write(io, S.loc) # loc
write(io, S.fs) # fs
write(io, S.gain) # gain
write(io, resptyp2code(S.resp))
write(io, S.resp) # resp
write(io, Int64(sizeof(S.units)))
write(io, S.units) # units
write(io, Int64(sizeof(S.src)))
write(io, S.src) # src
write_misc(io, S.misc) # misc
write_string_vec(io, S.notes) # notes
write(io, Int64(size(S.t,1)))
write(io, S.t) # t
write(io, typ2code(eltype(S.x)))
write(io, Int64(length(S.x)))
write(io, S.x) # x
return nothing
end
read(io::IO, ::Type{SeisChannel}) = SeisChannel(
String(fastread(io, fastread(io, Int64))), # id
String(fastread(io, fastread(io, Int64))), # name
read(io, code2loctyp(fastread(io))), # loc
fastread(io, Float64), # fs
fastread(io, Float64), # gain
read(io, code2resptyp(fastread(io))), # resp
String(fastread(io, fastread(io, Int64))), # units
String(fastread(io, fastread(io, Int64))), # src
read_misc(io, getfield(BUF, :buf)), # misc
read_string_vec(io, getfield(BUF, :buf)), # notes
read!(io, Array{Int64, 2}(undef, fastread(io, Int64), 2)), # t
read!(io, Array{code2typ(read(io,UInt8)), 1}(undef, fastread(io, Int64))), # x
)
convert(::Type{SeisData}, C::SeisChannel) = SeisData(C)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 10097 | export SeisData
# This is type-stable for S = SeisData() but not for keyword args
@doc """
SeisData
A custom structure designed to contain the minimum necessary information for
processing univariate geophysical data.
SeisChannel
A single channel designed to contain the minimum necessary information for
processing univariate geophysical data.
## Fields
| **Field** | **Description** |
|:-------|:------ |
| :n | Number of channels [^1] |
| :c | TCP connections feeding data to this object [^1] |
| :id | Channel id. Uses NET.STA.LOC.CHA format when possible |
| :name | Freeform channel name |
| :loc | Location (position) vector; any subtype of InstrumentPosition |
| :fs | Sampling frequency in Hz; fs=0.0 for irregularly-sampled data. |
| :gain | Scalar gain |
| :resp | Instrument response; any subtype of InstrumentResponse |
| :units | String describing data units. UCUM standards are assumed. |
| :src | Freeform string describing data source. |
| :misc | Dictionary for non-critical information. |
| :notes | Timestamped notes; includes automatically-logged information. |
| :t | Matrix of time gaps in integer μs, formatted [Sample# Length] |
| :x | Time-series data |
[^1]: Not present in SeisChannel objects.
See also: `InstrumentPosition`, `PZResp`
""" SeisData
mutable struct SeisData <: GphysData
n::Int64
id::Array{String,1} # id
name::Array{String,1} # name
loc::Array{InstrumentPosition,1} # loc
fs::Array{Float64,1} # fs
gain::Array{Float64,1} # gain
resp::Array{InstrumentResponse,1} # resp
units::Array{String,1} # units
src::Array{String,1} # src
misc::Array{Dict{String,Any},1} # misc
notes::Array{Array{String,1},1} # notes
t::Array{Array{Int64,2},1} # time
x::Array{FloatArray,1} # data
c::Array{TCPSocket,1} # connections
function SeisData()
return new(0,
Array{String,1}(undef,0),
Array{String,1}(undef,0),
Array{InstrumentPosition,1}(undef,0),
Array{Float64,1}(undef,0),
Array{Float64,1}(undef,0),
Array{InstrumentResponse,1}(undef,0),
Array{String,1}(undef,0),
Array{String,1}(undef,0),
Array{Dict{String,Any},1}(undef,0),
Array{Array{String,1},1}(undef,0),
Array{Array{Int64,2},1}(undef,0),
Array{FloatArray,1}(undef,0),
Array{TCPSocket,1}(undef,0)
)
end
function SeisData( n::Int64,
id::Array{String,1} , # id
name::Array{String,1} , # name
loc::Array{InstrumentPosition,1} , # loc
fs::Array{Float64,1} , # fs
gain::Array{Float64,1} , # gain
resp::Array{InstrumentResponse,1} , # resp
units::Array{String,1} , # units
src::Array{String,1} , # src
misc::Array{Dict{String,Any},1} , # misc
notes::Array{Array{String,1},1} , # notes
t::Array{Array{Int64,2},1} , # time
x::Array{FloatArray,1})
return new(n,
id, name, loc, fs, gain, resp, units, src, misc, notes, t, x,
Array{TCPSocket,1}(undef,0)
)
end
function SeisData(n::UInt)
S = new(n,
Array{String,1}(undef,n),
Array{String,1}(undef,n),
Array{InstrumentPosition,1}(undef,n),
Array{Float64,1}(undef,n),
Array{Float64,1}(undef,n),
Array{InstrumentResponse,1}(undef,n),
Array{String,1}(undef,n),
Array{String,1}(undef,n),
Array{Dict{String,Any},1}(undef,n),
Array{Array{String,1},1}(undef,n),
Array{Array{Int64,2},1}(undef,n),
Array{FloatArray,1}(undef,n),
Array{TCPSocket,1}(undef,0)
)
# Fill these fields with something to prevent undefined reference errors
fill!(S.id, "") # id
fill!(S.name, "") # name
fill!(S.src, "") # src
fill!(S.units, "") # units
fill!(S.fs, default_fs) # fs
fill!(S.gain, default_gain) # gain
for i = 1:n
S.notes[i] = Array{String,1}(undef,0) # notes
S.misc[i] = Dict{String,Any}() # misc
S.t[i] = Array{Int64,2}(undef,0,2) # t
S.x[i] = Array{Float32,1}(undef,0) # x
S.loc[i] = GeoLoc() # loc
S.resp[i] = PZResp() # resp
end
return S
end
SeisData(n::Int) = n > 0 ? SeisData(UInt(n)) : SeisData()
end
# This intentionally undercounts exotic objects in :misc (e.g. a nested Dict)
# because those objects aren't written to disk or created by SeisIO
function sizeof(S::SeisData)
s = sizeof(S.c) + 120
for f in datafields
V = getfield(S,f)
s += sizeof(V)
for i = 1:S.n
v = getindex(V, i)
s += sizeof(v)
if f == :notes
if !isempty(v)
s += sum([sizeof(j) for j in v])
end
elseif f == :misc
k = collect(keys(v))
s += sizeof(k) + 64 + sum([sizeof(j) for j in k])
for p in values(v)
s += sizeof(p)
if typeof(p) == Array{String,1}
s += sum([sizeof(j) for j in p])
end
end
end
end
end
return s
end
function read(io::IO, ::Type{SeisData})
Z = getfield(BUF, :buf)
L = getfield(BUF, :int64_buf)
# read begins ------------------------------------------------------
N = fastread(io, Int64)
checkbuf_strict!(L, 2*N)
fast_readbytes!(io, Z, 3*N)
c1 = copy(Z[1:N])
c2 = copy(Z[N+1:2*N])
y = code2typ.(getindex(Z, 2*N+1:3*N))
cmp = fastread(io)
read!(io, L)
nx = getindex(L, N+1:2*N)
if cmp == 0x01
checkbuf_8!(Z, maximum(nx))
end
return SeisData(N,
read_string_vec(io, Z),
read_string_vec(io, Z),
InstrumentPosition[read(io, code2loctyp(getindex(c1, i))) for i = 1:N],
fastread(io, Float64, N),
fastread(io, Float64, N),
InstrumentResponse[read(io, code2resptyp(getindex(c2, i))) for i = 1:N],
read_string_vec(io, Z),
read_string_vec(io, Z),
[read_misc(io, Z) for i = 1:N],
[read_string_vec(io, Z) for i = 1:N],
[read!(io, Array{Int64, 2}(undef, getindex(L, i), 2)) for i = 1:N],
FloatArray[cmp == 0x01 ?
(fast_readbytes!(io, Z, getindex(nx, i)); Blosc.decompress(getindex(y,i), Z)) :
read!(io, Array{getindex(y,i), 1}(undef, getindex(nx, i)))
for i = 1:N])
end
function write(io::IO, S::SeisData)
N = getfield(S, :n)
LOC = getfield(S, :loc)
RESP = getfield(S, :resp)
T = getfield(S, :t)
X = getfield(S, :x)
MISC = getfield(S, :misc)
NOTES = getfield(S, :notes)
cmp = false
if KW.comp != 0x00
nx_max = maximum([sizeof(getindex(X, i)) for i = 1:S.n])
if (nx_max > KW.n_zip) || (KW.comp == 0x02)
cmp = true
Z = getfield(BUF, :buf)
checkbuf_8!(Z, nx_max)
end
end
codes = Array{UInt8,1}(undef, 3*N)
L = Array{Int64,1}(undef, 2*N)
# write begins ------------------------------------------------------
write(io, N)
p = fastpos(io)
fastskip(io, 19*N+1)
write_string_vec(io, S.id) # id
write_string_vec(io, S.name) # name
i = 0 # loc
while i < N
i = i + 1
loc = getindex(LOC, i)
setindex!(codes, loctyp2code(loc), i)
write(io, loc)
end
write(io, S.fs) # fs
write(io, S.gain) # gain
i = 0 # resp
while i < N
i = i + 1
resp = getindex(RESP, i)
setindex!(codes, resptyp2code(resp), N+i)
write(io, resp)
end
write_string_vec(io, S.units) # units
write_string_vec(io, S.src) # src
for i = 1:N; write_misc(io, getindex(MISC, i)); end # misc
for i = 1:N; write_string_vec(io, getindex(NOTES, i)); end # notes
i = 0 # t
while i < N
i = i + 1
t = getindex(T, i)
setindex!(L, size(t,1), i)
write(io, t)
end
i = 0 # x
while i < N
i = i + 1
x = getindex(X, i)
nx = lastindex(x)
if cmp
l = zero(Int64)
while l == 0
l = Blosc.compress!(Z, x, level=5)
(l > zero(Int64)) && break
nx_max = nextpow(2, nx_max)
checkbuf_8!(Z, nx_max)
@warn(string("Compression ratio > 1.0 for channel ", i, "; are data OK?"))
end
xc = view(Z, 1:l)
write(io, xc)
setindex!(L, l, N+i)
else
write(io, x)
setindex!(L, nx, N+i)
end
setindex!(codes, typ2code(eltype(x)), 2*N+i)
end
q = fastpos(io)
fastseek(io, p)
write(io, codes)
write(io, cmp)
write(io, L)
fastseek(io, q)
# write ends ------------------------------------------------------
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6363 | abstract type SeedBlk end
# [201] Murdock Event Detection Blockette (60 bytes)
mutable struct Blk201 <: SeedBlk
sig::Array{Float32,1}
flags::UInt8
t::Array{Int32,1}
det::String
snr::Array{UInt8,1} # includes spots for lookback and pick algorithm
Blk201() = new(zeros(Float32, 3),
0x00,
Array{Int32,1}(undef,7),
"None",
zeros(UInt8,8))
end
# [500] Timing Blockette (200 bytes)
mutable struct Blk500 <: SeedBlk
vco_correction::Float32
t::Array{Int32,1}
μsec::Int8
reception_quality::Int8
exception_count::UInt32
exception_type::String
clock_model::String
clock_status::String
Blk500() = new(0.0f0,
Array{Int32,1}(undef,7),
Int8(0),
Int8(0),
0x0000,
"",
"",
"")
end
# [2000] Variable Length Opaque Data Blockette
mutable struct Blk2000 <: SeedBlk
n::UInt32
NB::UInt16
os::UInt16
flag::UInt8
hdr::Array{UInt8,1}
data::Array{UInt8,1}
Blk2000() = new(
0x00000000,
0x0000,
0x0000,
0x00,
Array{UInt8,1}(undef,0),
Array{UInt8,1}(undef,0)
)
end
# Calibration blockettes: [300], [310], [320], [390]
mutable struct BlkCalib <: SeedBlk
t::Array{Int32,1}
n::UInt8
flags::UInt8
dur1::UInt32
dur2::UInt32
amplitude::Float32
period::Float32
channel::Array{UInt8,1}
ref::UInt32
coupling::Array{UInt8,1}
rolloff::Array{UInt8,1}
noise::Array{UInt8,1}
BlkCalib() = new( Array{Int32,1}(undef,7),
0x00,
0x00,
0x00000000,
0x00000000,
zero(Float32),
zero(Float32),
Array{UInt8,1}(undef, 3),
0x00000000,
Array{UInt8,1}(undef, 12),
Array{UInt8,1}(undef, 12),
Array{UInt8,1}(undef, 8)
)
end
mutable struct SeisIOBuf
fmt::UInt8 # 0x0a
nx::UInt16 # 0x1000
wo::UInt8 # 0x01
tc::Int32 # Time correction
swap::Bool
calibs::Array{UInt16,1}
# hdr:
seq::Vector{UInt8}
hdr::Vector{UInt8}
hdr_old::Vector{UInt8}
id::Array{UInt8,1}
id_str::String
#= order of u16:
1 year
2 jdy
3 record start time
4 beginning of data
5 first blockette
6 position of next blockette (relative to record begin) =#
# Values read from file/stream
dt::Float64
r1::Int16
r2::Int16
r1_old::Int16
r2_old::Int16
Δ::Int64
xs::Bool
k::Int
n::UInt16
# Data-related
x::Array{Float32,1} # Data buffer
buf::Array{UInt8,1} # Buffer for reading UInt8 data
uint16_buf::Array{UInt16,1} # Used in SEG Y and SEED
uint32_buf::Array{UInt32,1} # Unsigned 32-bit steim-encoded data
int16_buf::Array{Int16,1} # Buffer for int16s in headers
int32_buf::Array{Int32,1} # Buffer for int32s in headers
int64_buf::Array{Int64,1} # Buffer for int64s; used in rseis/wseis
sac_fv::Array{Float32,1}
sac_iv::Array{Int32,1}
sac_cv::Array{UInt8,1}
sac_dv::Array{Float64,1}
flags::Array{UInt8,1} # SEED flags (Stored as four UInt8s)
# For parsing dates
date_buf::Array{Int32,1}
# For mini-SEED data header
dh_arr::Array{UInt8, 1}
dh_buf::Base.GenericIOBuffer{Array{UInt8,1}}
# Blockette containers
B201::Blk201
B500::Blk500
B2000::Blk2000
Calib::BlkCalib
function SeisIOBuf()
new(0xff, # fmt
0x1000, # nx
0x01, # wo
zero(Int32), # tc
false, # swap
# SEED calibiration blockettes (all use the same parser)
UInt16[0x012c, 0x0136, 0x0140, 0x0186],
# SEED header
Array{UInt8,1}(undef,8), # seq::Vector{UInt8}
Array{UInt8,1}(undef,12), # hdr::Vector{UInt8}
Array{UInt8,1}(undef,12), # hdr_old::Vector{UInt8}
Array{UInt8,1}(undef,15), # id::Array{UInt8,1}
"", # id_str::String
# computed
0.0, # dt::Float64
zero(Int16), # r1::Int16
zero(Int16), # r2::Int16
zero(Int16), # r1_old::Int16
zero(Int16), # r2_old::Int16
0, # Δ::Int64
false, # xs::Bool
0, # k::Int
0x0000, # n::UInt16
# data-related arrays
Array{Float32,1}(undef, 65535), # x::Array{Float32,1}
Array{UInt8,1}(undef, 65535), # buf::Array{UInt8,1}
Array{UInt16,1}(undef, 6), # uint16_buf::Array{UInt16,1}
Array{UInt32,1}(undef, 16384), # uint32_buf::Array{UInt32,1}
Array{Int16,1}(undef, 62), # int16_buf::Array{Int16,1}
Array{Int32,1}(undef, 100), # int32_buf::Array{Int32,1}
Array{Int64,1}(undef, 8), # int64_buf::Array{Int64,1}
Array{Float32,1}(undef, 70), # sac_fv::Array{Float32,1}
Array{Int32,1}(undef, 40), # sac_iv::Array{Int32,1}
Array{UInt8,1}(undef, 192), # sac_cv::Array{UInt8,1}
Array{Float64,1}(undef,22), # sac_dv::Array{Float64,1}
Array{UInt8,1}(undef, 4), # flags::Array{UInt8,1}
# dedicated array for parsing dates
Array{Int32,1}(undef, 7), # date_buf::Array{Int32,1}
# dedicated array for mini-SEED data header
Array{UInt8,1}(undef, 48), # dh_arr::Array{UInt8,1}
IOBuffer(), # dh_buf::Base.GenericIOBuffer{Array{UInt8,1}}
# blockette fields
Blk201(), # Blk201:: Blk201
Blk500(), # Blk500:: Blk500
Blk2000(), # Blk2000:: Blk2000
BlkCalib() # Calib:: BlkCalib
)
end
end
const BUF = SeisIOBuf()
BUF.dh_buf = IOBuffer(BUF.dh_arr)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5666 | showtail(b::Bool) = b ? "…" : ""
ngaps(t::Array{Int64,2}) = max(0, size(t,1)-2 + (t[end,2] == 0 ? 0 : 1))
function str_trunc(str::String, W::Int64)
i = 0
j = firstindex(str)
for c in eachindex(str)
i += 1
if i > W-2
return str[1:j] * "… "
end
j = c
end
return(rpad(str, W))
end
function show_str(io::IO, S::Array{String,1}, w::Int, N::Int64)
for i = 1:N
print(io, str_trunc(S[i], w))
end
println(io, showtail(N<length(S)))
return nothing
end
function show_t(io::IO, T::Array{Array{Int64,2},1}, w::Int, N::Int64, fs::Array{Float64,1})
for i = 1:N
if isempty(T[i])
s = ""
elseif fs[i] == 0.0
s = string(timestamp(T[i][1,2]*μs), " (", size(T[i], 1), " vals)")
else
s = string(timestamp(T[i][1,2]*μs), " (", ngaps(T[i]), " gaps)")
end
print(io, str_trunc(s, w))
end
println(io, showtail(N<length(T)))
return
end
function mkxstr(N::Int64, X::Union{ Array{AbstractArray{Float64,1},1},
Array{AbstractArray{Float32,1},1},
Array{Union{AbstractArray{Float64,1},
AbstractArray{Float32,1}},1} })
# Fill matrix of X values
vx = 5
X_str = Array{String,2}(undef, vx, N)
fill!(X_str, "")
for j = 1:N
x = getindex(X, j)
nx = length(x)
if nx == 0
X_str[1,j] = "(empty)"
continue
else
if nx < vx
for i = 1:nx
X_str[i,j] = @sprintf("%+10.3e", x[i])
end
else
for i = 1:vx-3
X_str[i,j] = @sprintf("%+10.3e", x[i])
end
X_str[vx-2,j] = " ..."
X_str[vx-1,j] = @sprintf("%+10.3e", x[nx])
end
X_str[vx,j] = string("(nx = ", nx, ")")
end
end
return X_str
end
# Fill matrix of X value strings
function mkxstr(X::FloatArray)
vx = 5
X_str = Array{String,2}(undef, vx, 1)
fill!(X_str, "")
nx = lastindex(X)
if nx == 0
X_str[1,1] = "(empty)"
elseif nx < vx
for i = 1:nx
X_str[i,1] = @sprintf("%+10.3e", X[i])
end
else
nx_str = string(nx)
for i = 1:vx-3
X_str[i,1] = @sprintf("%+10.3e", X[i])
end
X_str[vx-2,1] = " ..."
X_str[vx-1,1] = @sprintf("%+10.3e", X[nx])
X_str[vx,1] = string("(nx = ", nx_str, ")")
end
return X_str
end
function show_x(io::IO, X_str::Array{String,2}, w::Int64, b::Bool)
(vx, N) = size(X_str)
# Display
for i = 1:vx
if i > 1
print(io, " "^show_os)
end
for j = 1:N
x_str = X_str[i,j]
L = length(x_str)
print(io, x_str)
if (x_str == "(empty)" || x_str == "") && N == 1
return nothing
end
print(io, " "^(w-L))
end
print(io, showtail(b))
if i < vx
print(io, "\n")
end
end
return nothing
end
function show_conn(io::IO, C::Array{TCPSocket,1})
print(io, string(sum([isopen(i) for i in C]), " open, ", length(C), " total"))
if !isempty(C)
for (m,c) in enumerate(C)
print(io, "\n")
if isopen(c)
(url,port) = getpeername(c)
print(io, " "^show_os, "(", m, ") ", url, ":", Int(port))
else
print(io, " "^show_os, "(", m, ") (closed)")
end
end
end
return nothing
end
summary(S::GphysData) = string(typeof(S), " with ", S.n, " channel", S.n == 1 ? "" : "s")
summary(S::GphysChannel) = string(typeof(S), " with ",
length(S.x), " sample", (length(S.x) == 1 ? "" : "s"), ", gaps: ", ngaps(S.t))
# GphysData
function show(io::IO, S::T) where {T<:GphysData}
W = max(80, displaysize(io)[2]) - show_os
w = min(W, 35)
nc = getfield(S, :n)
N = min(nc, div(W-1, w))
M = min(N+1, nc)
println(io, T, " with ", nc, " channels (", N, " shown)")
F = fieldnames(T)
for f in F
if (f in unindexed_fields) == false
targ = getfield(S, f)
t = typeof(targ)
fstr = uppercase(String(f))
print(io, lpad(fstr, show_os-2), ": ")
if t == Array{String,1}
show_str(io, targ, w, N)
elseif f == :notes || f == :misc
show_str(io, String[string(length(getindex(targ, i)), " entries") for i = 1:M], w, N)
elseif f == :pha
show_str(io, String[string(length(getindex(targ, i)), " phases") for i = 1:M], w, N)
elseif f == :t
show_t(io, targ, w, N, S.fs)
elseif f == :x
x_str = mkxstr(N, getfield(S, :x))
show_x(io, x_str, w, N<nc)
else
show_str(io, String[repr("text/plain", targ[i], context=:compact=>true) for i = 1:M], w, N)
end
elseif f == :c
print(io, "\n", lpad("C", show_os-2), ": ")
show_conn(io, S.c)
end
end
return nothing
end
show(S::SeisData) = show(stdout, S)
# GphysChannel
function show(io::IO, C::T) where T<:GphysChannel
W = max(80,displaysize(io)[2]-2)-show_os
w = min(W, 36)
nx = length(C.x)
F = fieldnames(T)
println(io, T, " with ", nx, " samples")
for f in F
targ = getfield(C, f)
t = typeof(targ)
fstr = uppercase(String(f))
print(io, lpad(fstr, show_os-2), ": ")
if t == String
println(io, targ)
elseif (t <: AbstractFloat || t <: InstrumentPosition || t<: InstrumentResponse)
println(io, repr("text/plain", targ, context=:compact=>true))
elseif f == :notes
println(io, string(length(targ), " entries"))
elseif f == :misc
println(io, string(length(targ), " entries"))
elseif f == :pha
println(io, string(length(targ), " phases"))
elseif f == :t
show_t(io, [targ], w, 1, [C.fs])
else
x_str = mkxstr(getfield(C, :x))
show_x(io, x_str, w, false)
end
end
return nothing
end
show(C::GphysChannel) = show(stdout, C)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 365 | function add_chan!(S::GphysData, C::GphysChannel, strict::Bool)
if isempty(S)
push!(S, C)
return 1
end
j = strict ? channel_match(S, C) : findid(S, C.id)
if j > 0
T1 = t_extend(S.t[j], C.t[1,2], length(C.x), S.fs[j])
if T1 != nothing
S.t[j] = T1
end
append!(S.x[j], C.x)
else
push!(S, C)
j = S.n
end
return j
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 638 | export getbandcode
"""
getbandcode(fs, fc=FC)
Get SEED-compliant one-character band code corresponding to instrument sample
rate `fs` and corner frequency `FC`. If unset, `FC` is assumed to be 1 Hz.
"""
function getbandcode(fs::Real; fc::Real = 1.0)
fs ≥ 1000.0 && return fc ≥ 0.1 ? 'G' : 'F'
fs ≥ 250.0 && return fc ≥ 0.1 ? 'C' : 'D'
fs ≥ 80.0 && return fc ≥ 0.1 ? 'E' : 'H'
fs ≥ 10.0 && return fc ≥ 0.1 ? 'S' : 'B'
fs > 1.0 && return 'M'
fs > 0.1 && return 'L'
fs > 1.0e-2 && return 'V'
fs > 1.0e-3 && return 'U'
fs > 1.0e-4 && return 'R'
fs > 1.0e-5 && return 'P'
fs > 1.0e-6 && return 'T'
return 'Q'
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2566 | export channel_match
channel_match( S::GphysData, j::Integer, fs::Float64) = (j == 0 ? 0 : S.fs[j] == fs ? j : 0)
function channel_match( S::GphysData,
j::Integer,
fs::Float64,
gain::AbstractFloat,
loc::InstrumentPosition,
resp::InstrumentResponse,
units::String
)
(j == 0) && (return 0)
return (
try
@assert(S.fs[j] == fs)
@assert(S.gain[j] == gain)
@assert(S.units[j] == units)
@assert(loc == S.loc[j])
@assert(resp == S.resp[j])
j
catch err
0
end
)
end
function channel_match( S::GphysData, C::GphysChannel )
j = findid(S, C.id)
j = channel_match( S, j, C.fs, C.gain, C.loc, C.resp, C.units)
return j
end
function channel_match( C::GphysChannel, D::GphysChannel ; use_gain::Bool=true )
ff = use_gain ? (:id, :fs, :gain, :loc, :resp, :units) : (:id, :fs, :loc, :resp, :units)
return all([isequal(getfield(C, f), getfield(D, f)) for f in ff ])
end
# This will seek out a match and correct unset values if a partial match is found
function cmatch_p!( C::GphysChannel, D::GphysChannel )
fs_match = max( C.fs == D.fs , C.fs == default_fs , D.fs == default_fs )
gain_match = max( C.gain == D.gain , C.gain == default_gain , D.gain == default_gain )
loc_match = max( C.loc == D.loc , C.loc == default_loc , D.loc == default_loc )
resp_match = max( C.resp == D.resp , C.resp == default_resp , D.resp == default_resp )
units_match = max( C.units == D.units, C.units == "" , D.units == "" )
m = min(C.id == D.id, fs_match, gain_match, loc_match, resp_match, units_match)
if m
# Fill any "unset" values in D from C
( D.fs == default_fs ) && ( D.fs = C.fs )
( D.gain == default_gain ) && ( D.gain = C.gain )
( D.loc == default_loc ) && ( D.loc = deepcopy(C.loc) )
( D.resp == default_resp ) && ( D.resp = deepcopy(C.resp) )
( D.units == "" ) && ( D.units = identity(C.units) )
# Fill any "unset" values in C from D
( C.fs == default_fs ) && ( C.fs = D.fs )
( C.gain == default_gain ) && ( C.gain = D.gain )
( C.loc == default_loc ) && ( C.loc = deepcopy(D.loc) )
( C.resp == default_resp ) && ( C.resp = deepcopy(D.resp) )
( C.units == "" ) && ( C.units = identity(D.units) )
end
return m
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1308 | @doc """
findid(id::String, S::GphysData)
findid(S::GphysData, id::String)
Get the index of the first channel in S where `id.==S.id` is true. Returns 0
for failure.
findid(S::GphysData, T::GphysData)
Get index corresponding to the first channel in T that matches each ID in S;
equivalent to [findid(id,T) for id in S.id].
findid(C::SeisChannel, S::SeisData)
findid(S::SeisData, C::SeisChannel)
Get the index to the first channel `c` in S where `S.id[c]==C.id`.
""" findid
function findid(id::String, ID::Array{String,1})
c = 0
for i = 1:length(ID)
if ID[i] == id
c = i
break
end
end
return c
end
function findid(id::DenseArray{UInt8,1}, ID::Array{String,1})
c = 0
for i = 1:length(ID)
if codeunits(ID[i]) == id
c = i
break
end
end
return c
end
function findid(id::Union{Regex,String}, S::T) where {T<:GphysData}
j=0
for i=1:length(S.id)
if S.id[i] == id
j=i
break
end
end
return j
end
findid(S::T, id::Union{String,Regex}) where {T<:GphysData} = findid(id, S)
# DND ...why the fuck is findfirst so fucking slow?!
findid(C::TC, S::TS) where {TC<:GphysChannel, TS<:GphysData} = findid(getfield(C, :id), S)
findid(S::TS, C::TC) where {TC<:GphysChannel, TS<:GphysData} = findid(getfield(C, :id), S)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2399 | export filt_seis_chans!, get_seis_channels
"""
get_seis_channels(S::GphysData)
Get an array with the channel numbers of all seismic data channels in `S`.
Assumes each ID in S ends with an alphanumeric three-digit channel code and that
ID fields are separated by periods: for example, two channels with IDs
"XX.YYY.00.EHZ" and "_.YHY" both have an instrument code of 'H'.
Channel codes less than two characters long (e.g. "Z" in "AA.BBB.CC.Z") are ignored.
SEED channel codes of seismic and seismoacoustic data (for which operations
like detrend! and taper! are sane) include D, G, H, J, L, M, N, P, Z.
See also: `inst_codes`
"""
function get_seis_channels(S::GphysData;
chans::ChanSpec=Int64[])
if chans == Int64[]
chans = Int64.(collect(1:S.n))
elseif typeof(chans) == UnitRange
chans = Int64.(collect(chans))
elseif typeof(chans) <: Integer
chans = [Int64(chans)]
end
keep = falses(length(chans))
@inbounds for (n,i) in enumerate(chans)
id = S.id[i]
L = length(id)
for j = L:-1:1
if id[j] == '.'
j > L-2 && break
if (id[j+2] in seis_inst_codes) && (length(S.x[i]) > 0)
keep[n] = true
break
end
end
end
end
return chans[keep]
end
@doc """
filt_seis_chans!(chans::ChanSpec, S::GphysData)
filt_seis_chans(chans::ChanSpec, S::GphysData)
Filter a channel list `chans` to channels in `S` that contain seismic data.
Assumes each ID in S ends with an alphanumeric three-digit channel code and that
ID fields are separated by periods: for example, if two channels have IDs
"XX.YYY.00.EHZ" and "_.YHY", each has an instrument code of 'H'.
Channel codes less than two characters long (e.g. "Z" in "AA.BBB.CC.Z") are ignored.
SEED channel codes of seismic and seismoacoustic data (for which operations
like detrend! and taper! are sane) include D, G, H, J, L, M, N, P, Z.
See also: `inst_codes`
""" filt_seis_chans!
function filt_seis_chans!(chans::Array{Int64,1}, S::GphysData)
@inbounds for n = length(chans):-1:1
id = S.id[chans[n]]
L = length(id)
for j = L:-1:1
if id[j] == '.'
if (j > L-2)
deleteat!(chans, n)
break
end
if (id[j+2] in seis_inst_codes) == false
deleteat!(chans, n)
end
break
elseif j == 1
deleteat!(chans, n)
end
end
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1350 | # Given a SeisData structure S, create:
# H, a set of hashes where each unique hash matches a unique set of inputs
# parameters (e.g. :fs, eltype(:x))
# X, a set of views corresponding to each H
# Get sets of channels where all channels in inds[i] have matching properties
# as specified in A
function get_unique(S::T, A::Array{String,1}, chans::ChanSpec) where {T<:GphysData}
J = lastindex(A)
N = getfield(S, :n)
H = Array{UInt64,1}(undef, length(chans))
fields = fieldnames(T)
h = Array{UInt64,1}(undef, J)
@inbounds for (c,i) in enumerate(chans)
for (j, str) in enumerate(A)
sym = Symbol(str)
if sym in fields
h[j] = hash(getfield(S, sym)[i])
else
h[j] = hash(getfield(Main, sym)(getfield(S, :x)[i]))
end
end
H[c] = hash(h)
end
# Get unique hashes
Uh = unique(H)
Nh = length(Uh)
inds = Array{Array{Int64,1},1}(undef, Nh)
@inbounds for n = 1:Nh
inds[n] = Array{Int64,1}(undef,0)
for (c,i) in enumerate(chans)
# Uses an order of magnitude less memory than findall
if H[c] == Uh[n]
push!(inds[n], i)
continue
end
end
end
# One last pass...sort by size of eltype(S.x[i[1]]) in descending order
el_size = [sizeof(eltype(S.x[i[1]])) for i in inds]
ii = sortperm(el_size, rev=true)
inds = inds[ii]
return inds
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 802 | # low-memory way of accessing data by segment
function get_views(S::GphysData, inds::Array{Int64,1})
L = Array{Int64,1}(undef,0)
X = Array{SubArray,1}(undef,0)
for i in inds
n_seg = size(S.t[i],1)-1
for k = 1:n_seg
si = S.t[i][k,1]
ei = S.t[i][k+1,1] - (k == n_seg ? 0 : 1)
push!(X, view(S.x[i], si:ei))
lx = ei-si+1
push!(L, lx)
end
end
ii = sortperm(L, rev=true)
X = X[ii]
L = L[ii]
return L,X
end
function get_views(C::GphysChannel)
L = Array{Int64,1}(undef,0)
X = Array{SubArray,1}(undef,0)
n_seg = size(C.t,1)-1
for k = 1:n_seg
si = C.t[k,1]
ei = C.t[k+1,1] - (k == n_seg ? 0 : 1)
push!(X, view(C.x, si:ei))
lx = ei-si+1
push!(L, lx)
end
ii = sortperm(L, rev=true)
X = X[ii]
L = L[ii]
return L,X
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 10450 | export guess
function guess_ftype(io::IO, swap::Bool, sz::Int64, v::Integer)
str = String[]
# =========================================================================
# Robust file tests: exact "magic number", start sequence, etc....
# SeisIO Native ------------------------------------------------------------
seekstart(io)
try
@assert fastread(io, 6) == UInt8[0x53, 0x45, 0x49, 0x53, 0x49, 0x4f]
return ["seisio"]
catch err
(v > 2) && @warn(string("Test for SeisIO native format threw error:", err))
end
# AH-2 --------------------------------------------------------------------
seekstart(io)
try
mn = (swap ? bswap : identity)(fastread(io, Int32))
@assert mn == 1100
push!(str, "ah2")
catch err
(v > 2) && @warn(string("Test for AH-2 threw error:", err))
end
# Bottle ------------------------------------------------------------------
seekstart(io)
try
ms = (swap ? bswap : identity)(fastread(io, Int16))
@assert ms == 349
fastskip(io, 2)
@assert (swap ? bswap : identity)(fastread(io, Int32)) == 40
@assert (swap ? bswap : identity)(fastread(io, Float64)) > 0.0
@assert (swap ? bswap : identity)(fastread(io, Float32)) > 0.0f0
@assert (swap ? bswap : identity)(fastread(io, Int32)) > 0
@assert (swap ? bswap : identity)(fastread(io, Int32)) in (0, 1, 2)
push!(str, "bottle")
catch err
(v > 2) && @warn(string("Test for Bottle threw error:", err))
end
# GeoCSVm Lennartz ASCII --------------------------------------------------
seekstart(io)
try
line = readline(io)
@assert eof(io) == false
if occursin("GeoCSV", line)
while startswith(line, "#")
line = readline(io)
end
if length(split(line)) == 2
push!(str, "geocsv")
else
push!(str, "geocsv.slist")
end
else hdr = split(line)
if hdr[2] == "station"
push!(str, "lennartz")
end
end
catch err
(v > 2) && @warn(string("Test for GeoCSV threw error:", err))
end
# SUDS --------------------------------------------------------------------
seekstart(io)
try
@assert fastread(io) == 0x53
@assert iscntrl(Char(fastread(io))) == false
sid = (swap ? bswap : identity)(fastread(io, Int16))
nbs = (swap ? bswap : identity)(fastread(io, Int32))
nbx = (swap ? bswap : identity)(fastread(io, Int32))
@assert zero(Int16) ≤ sid < Int16(34) # should be in range 1:33
@assert nbs > zero(Int32)
@assert nbx ≥ zero(Int32)
push!(str, "suds")
catch err
(v > 2) && @warn(string("test for SUDS threw error:", err))
end
# mSEED -------------------------------------------------------------------
seekstart(io)
try
# Sequence number is numeric
seq = Char.(fastread(io, 6))
@assert(all(isnumeric.(seq)))
# Next character is one of 'D', 'R', 'M', 'Q'
@assert fastread(io) in (0x44, 0x52, 0x4d, 0x51)
push!(str, "mseed")
catch err
(v > 2) && @warn(string("test for mini-SEED threw error:", err))
end
# # FDSN station XML ---------------------------------------------------------
# seekstart(io)
# try
# @assert occursin("FDSNStationXML", String(fastread(io, 255)))
# push!(str, "sxml")
# catch err
# (v > 2) && @warn(string("test for station XML threw error:", err))
# end
#
# # SEED RESP ---------------------------------------------------------------
# seekstart(io)
# try
# line = readline(io)
# @assert eof(io) == false
# for i = 1:6
# while startswith(line, "#")
# line = readline(io)
# end
# @assert startswith(line, "B0")
# end
# push!(str, "resp")
# catch err
# (v > 2) && @warn(string("test for SEED RESP threw error:", err))
# end
# =========================================================================
# Non-robust file tests: exact "magic number", start sequence, etc....
((v > 0) && isempty(str)) && @info("file has no unique identifier; checking content.")
# AH-1 --------------------------------------------------------------------
seekstart(io)
try
fastskip(io, 12)
mn = (swap ? bswap : identity)(fastread(io, Int32))
@assert mn == 6
fastskip(io, 8)
mn = (swap ? bswap : identity)(fastread(io, Int32))
@assert mn == 8
fastseek(io, 700)
mn = (swap ? bswap : identity)(fastread(io, Int32))
@assert mn == 80
fastseek(io, 784)
mn = (swap ? bswap : identity)(fastread(io, Int32))
@assert mn == 202
push!(str, "ah1")
catch err
(v > 2) && @warn(string("test for AH-1 threw error:", err))
end
# SAC ---------------------------------------------------------------------
seekstart(io)
try
autoswap = should_bswap(io)
seekstart(io)
delta = (autoswap ? bswap : identity)(fastread(io, Float32))
@assert delta ≥ 0.0f0
fastseek(io, 280)
tt = (autoswap ? bswap : identity).(read!(io, zeros(Int32, 5)))
@assert tt[1] > 1900
@assert 0 < tt[2] < 367
@assert -1 < tt[3] < 24
@assert -1 < tt[4] < 60
@assert -1 < tt[5] < 60
push!(str, "sac")
catch err
(v > 0) && @warn(string("test for SAC threw error:", err))
end
# SEGY --------------------------------------------------------------------
seekstart(io)
try
fastseek(io, 3212)
# First few Int16s; 3, 5, 7 are mandatory
shorts = read!(io, zeros(Int16, 7))
swap && (shorts .= bswap.(shorts))
if v > 2
println("δ = ", shorts[3], " μs, nx = ", shorts[5], ", fmt = ", shorts[7])
end
@assert shorts[3] > zero(Int16) # sample interval in μs
@assert shorts[5] ≥ zero(Int16) # number of samples per trace
@assert shorts[7] in one(Int16):Int16(8) # data format code
# Three UInt16s/Int16s, all mandatory
fastseek(io, 3501)
u16 = read!(io, zeros(UInt16, 3))
swap && (u16 .= bswap.(u16))
try
@assert u16[1] < 0x0400
@assert u16[2] in (0x0000, 0x0001)
@assert u16[3] < 0x8000 # equivalently, a positive Int16 value
catch
u16 .= bswap.(u16)
swap && @warn("Inconsistent file header endianness!")
end
@assert u16[1] < 0x0400
@assert u16[2] in (0x0000, 0x0001)
@assert u16[3] < 0x8000
push!(str, "segy")
catch err
(v > 0) && @warn(string("test for SEGY threw error:", err))
end
# UW ----------------------------------------------------------------------
seekstart(io)
try
N = (swap ? bswap : identity)(fastread(io, Int16))
@assert N ≥ zero(Int16)
[@assert (swap ? bswap : identity)(fastread(io, Int32)) ≥ zero(Int32) for i = 1:4]
fastskip(io, 26)
@assert fastread(io) in (0x20, 0x31, 0x32)
fastseekend(io)
fastskip(io, -4)
nstructs = (swap ? bswap : identity)(fastread(io, Int32))
(v > 2) && println("nstructs = ", nstructs)
@assert nstructs ≥ zero(Int32)
@assert (12*nstructs)+4 < fastpos(io)
push!(str, "uw")
catch err
(v > 2) && @warn(string("test for UW threw error:", err))
end
# Win32 -------------------------------------------------------------------
seekstart(io)
try
date_arr = zeros(Int64, 6)
fastskip(io, 4)
date_hex = fastread(io, 8)
t_new = datehex2μs!(date_arr, date_hex)
(v > 2) && println(u2d(t_new*μs))
@assert t_new > 0
fastskip(io, 4)
nb = (swap ? bswap : identity)(fastread(io, UInt32))
@assert (nb + fastpos(io)) ≤ sz
fastskip(io, 4)
V = (swap ? bswap : identity)(fastread(io, UInt16))
C = UInt8(V >> 12)
N = V & 0x0fff
@assert C in 0x00:0x04
@assert 0x0000 < N < 0x0066 # No station in Japan samples above 100 Hz
push!(str, "win32")
catch err
(v > 2) && @warn(string("test for win32 threw error:", err))
end
# PASSCAL -----------------------------------------------------------------
seekstart(io)
try
fastseek(io, 114)
nx = (swap ? bswap : identity)(fastread(io, Int16))
dt = (swap ? bswap : identity)(fastread(io, Int16))
fastseek(io, 156)
yy = (swap ? bswap : identity)(fastread(io, Int16))
jj = (swap ? bswap : identity)(fastread(io, Int16))
@assert yy in Int16(1950):Int16(3000)
@assert jj in one(Int16):Int16(366)
if dt == typemax(Int16)
fastseek(io, 200)
dt = (swap ? bswap : identity)(fastread(io, Int32))
end
if nx == typemax(Int16)
fastseek(io, 228)
nx = (swap ? bswap : identity)(fastread(io, Int32))
end
@assert dt > zero(Int32)
@assert nx > zero(Int32)
push!(str, "passcal")
catch err
(v > 2) && @warn(string("test for PASSCAL threw error:", err))
end
return str
end
@doc """
function guess(fname[, v=V])
Try to guess the file type of file `fname`. Keyword `v` controls verbosity.
Only recognizes file formats supported by SeisIO.read_data.
Returns a tuple: (ftype::String, swap::Bool)
* `ftype` is the file type string to pass to `read_data`, except in these cases:
+ if ftype == "unknown", guess couldn't identify the file type.
+ if ftype contains a comma-separated list, the file type couldn't be
uniquely determined.
* `swap` determines whether or not file should be byte-swapped by `read_data`.
Generally `swap=true` for big-Endian files, with two exceptions:
+ in SAC and mini-SEED, tests for endianness are built into the file format,
so the value of `swap` is irrelevant.
+ if ftype = "unknown", swap=nothing is possible.
### Warnings
1. false positives are possible for file formats outside the scope of SeisIO.
2. SEGY endianness isn't reliable. In theory, SEGY headers are bigendian; in
practice, SEGY headers are whatever the manufacturer imagines them to be, and
endianness can be little, or mixed (e.g., a common situation is little-endian
file header and big-endian trace header).
""" guess
function guess(file::String; v::Integer=KW.v)
safe_isfile(file) || error("File not found!")
sz = stat(file).size
io = open(file, "r")
str_le = guess_ftype(io, false, sz, v)
str_be = guess_ftype(io, true, sz, v)
close(io)
swap = false
if length(str_le) == 0 && length(str_be) == 0
ftype = ["unknown"]
elseif length(str_be) == 0
ftype = str_le
elseif length(str_le) == 0
swap = true
ftype = str_be
else
if str_le == str_be
swap = false
ftype = str_le
else
swap = nothing
ftype = unique(vcat(str_le, str_be))
end
end
fstr = length(ftype) > 1 ? join(ftype, ",") : ftype[1]
return (fstr, swap)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1623 | export inst_codes, inst_code
@doc """
inst_codes(S::GphysData)
Get the instrument code of each channel in `S`.
inst_code(S::GphysData, i::Integer)
Get the instrument code of channel `i`.
inst_code(C::GphysChannel)
Get the instrument code from `C.id`.
Assumes each ID ends with an alphanumeric three-digit channel code and that
ID fields are separated by periods: for example, two channels with IDs
"XX.YYY.00.EHZ" and "_.YHY" each have an instrument code of 'H'.
Channel codes less than two characters long (e.g. "Z" in "AA.BBB.CC.Z") are ignored.
SEED channel codes of seismic and seismoacoustic data (for which operations
like `detrend!` and `taper!` are sane) include D, G, H, J, L, M, N, P, Z.
SEED channel codes of seismometers (for which `translate_resp!` and
`remove_resp!` are sane) are H, J, L, M, N, P, Z.
""" inst_codes
function inst_codes(S::GphysData)
N = S.n
codes = Array{Char, 1}(undef, N)
fill!(codes, '\0')
@inbounds for i = 1:N
id = S.id[i]
L = length(id)
for j = L:-1:1
if id[j] == '.'
j > L-2 && break
setindex!(codes, id[j+2], i)
break
end
end
end
return codes
end
@doc (@doc inst_codes)
function inst_code(S::GphysData, i::Integer)
N = S.n
id = S.id[i]
L = length(id)
c = '\0'
for j = L:-1:1
if id[j] == '.'
j > L-2 && break
c = id[j+2]
break
end
end
return c
end
@doc (@doc inst_codes)
function inst_code(C::GphysChannel)
L = length(C.id)
c = '\0'
for j = L:-1:1
if C.id[j] == '.'
j > L-2 && break
c = C.id[j+2]
break
end
end
return c
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 786 | export mkchans
function mkchans(chans::ChanSpec, S::GphysData; f::Symbol=:x, keepempty::Bool=false, keepirr::Bool=true)
chan_list = (if chans == Int64[]
Int64.(collect(1:S.n))
elseif typeof(chans) <: UnitRange
Int64.(collect(chans))
elseif typeof(chans) <: Integer
[Int64(chans)]
else
chans
end)
# added 2020-02-12; prevents processing an empty channel
k = trues(length(chan_list))
if keepempty == false
F = getfield(S, f)
for (j,i) in enumerate(chan_list)
if isempty(F[i])
k[j] = false
end
end
end
# added 2020-03-08: option to auto-delete irregular channels
if keepirr == false
for (j,i) in enumerate(chan_list)
if S.fs[i] == 0.0
k[j] = false
end
end
end
return chan_list[k]
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 835 | function nx_max(S::GphysData)
N = 0
window_lengths = Array{Int64,1}(undef,0)
t = Array{Int64,2}(undef,0,0)
for i = 1:S.n
if S.fs[i] > 0.0
t = getfield(S, :t)[i]
window_lengths = diff(t[:,1])
window_lengths[end] += 1
N = max(N, maximum(window_lengths))
end
end
return N
end
function nx_max(S::GphysData, C::ChanSpec)
N = 0
window_lengths = Array{Int64,1}(undef,0)
t = Array{Int64,2}(undef,0,0)
for i = 1:S.n
if i in C
if S.fs[i] > 0.0
t = getfield(S, :t)[i]
window_lengths = diff(t[:,1])
window_lengths[end] += 1
N = max(N, maximum(window_lengths))
end
end
end
return N
end
function nx_max(C::GphysChannel)
@assert C.fs > 0.0
window_lengths = diff(C.t[:,1])
window_lengths[end] += 1
return maximum(window_lengths)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3473 | # TO DO: Comment parse_chstr.jl, even I can't figure out what I've done here anymore
# function parse_charr(chan_in::Array{String,1}; d='.'::Char, fdsn=false::Bool)
function parse_charr(chan_in::Array{String, 1}, d::Char, fdsn::Bool)
N = length(chan_in)
chan_data = Array{String,2}(undef, 0, 5)
# Initial pass to parse to string array
for i = 1:N
chan_line = [strip(String(j)) for j in split(chan_in[i], d, keepempty=true, limit=5)]
L = length(chan_line)
if L < 5
resize!(chan_line, 5)
chan_line[L+1:5] .= ""
end
chan_data = vcat(chan_data, reshape(chan_line, 1, 5))
end
return fdsn == true ? minreq(String.(chan_data)) : map(String, chan_data)
end
function parse_chstr(chan_in::String, d::Char, fdsn::Bool, SL::Bool)
# function parse_chstr(chan_in::String; d=','::Char, fdsn=false::Bool, SL=false::Bool)
chan_out = Array{String, 2}(undef, 0, 5)
if safe_isfile(chan_in)
return parse_chstr(join([strip(j, ['\r','\n']) for j in
filter(i -> !startswith(i, ['#','*']), open(readlines, chan_in))],','), ',', false, false)
else
chan_data = [strip(String(j)) for j in split(chan_in, d)]
for j = 1:length(chan_data)
# Build array
tmp_data = map(String, split(chan_data[j], '.'))
L = length(tmp_data)
if L < 5
append!(tmp_data, Array{String,1}(undef, 5-L))
tmp_data[L+1:5] .= ""
end
chan_out = cat(chan_out, reshape(tmp_data, 1, 5), dims=1)
end
end
if fdsn == true
minreq!(chan_out)
end
N = SL ? 4 : 5
return chan_out[:,1:N]
end
function parse_sl(CC::Array{String,2})
L = size(CC,1)
S = CC[:,2].*(" "^L).*CC[:,1]
P = [(i = isempty(i) ? "??" : i) for i in CC[:,3]] .*
[(i = isempty(i) ? "???" : i) for i in CC[:,4]] .* fill(".", L) .*
[(i = isempty(i) ? "D" : i) for i in CC[:,5]]
return S,P
end
# FDSNWS
# http://service.iris.edu/fdsnws/dataselect/1/query?net=IU&sta=ANMO&loc=00&cha=BHZ&start=2010-02-27T06:30:00.000&end=2010-02-27T10:30:00.000
# IRISWS
# http://service.iris.edu/irisws/timeseries/1/query?net=IU&sta=ANMO&loc=00&cha=BHZ&starttime=2005-01-01T00:00:00&endtime=2005-01-02T00:00:00
# IRISWS
function build_stream_query(C::Array{String,1}, d0::String, d1::String; estr=""::String)
net_str = isempty(C[1]) ? estr : "net="*C[1]
sta_str = isempty(C[2]) ? estr : "&sta="*C[2]
loc_str = isempty(C[3]) ? estr : "&loc="*C[3]
cha_str = isempty(C[4]) ? estr : "&cha="*C[4]
return net_str * sta_str * loc_str * cha_str * string("&start=", d0, "&end=", d1)
end
# ============================================================================
# Purpose: Create the most compact set of requests, one per row
# """
# minreq!(S::Array{String,2})
#
# Reduce `S` to the most compact possible set of SeedLink request query strings
# that completely cover its string requests.
# """
function minreq!(S::Array{String,2})
d = ','
(M,N) = size(S)
K = Array{Int64, 1}(undef, N)
T = Array{String, 2}(undef, M, N)
for n = 1:N
for m = 1:M
T[m,n] = join(S[m,1:N.!=n],d)
end
K[n] = length(unique(T[:,n]))
end
(L,J) = findmin(K)
if L != M
V = T[:,J]
U = unique(V)
Q = Array{String,2}(undef, L, N)
for i = 1:L
j = findall(V.==U[i])
Q[i,1:N.!=J] = split(V[j[1]],d)
Q[i,J] = join(S[j,J],d)
end
S = Q
end
S = map(String, S)
return nothing
end
minreq(S::Array{String,2}) = (T = deepcopy(S); minreq!(T); return T)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 267 | function split_id(sid::AbstractString; c::String=".")
id = String.(split(sid, c, keepempty=true))
L = length(id)
if L < 4
id2 = Array{String, 1}(undef, 4-L)
fill!(id2, "")
append!(id, id2)
elseif L > 4
deleteat!(id, 5:L)
end
return id
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 945 | #=
Purpose: time utilities that depend on custom Types go here
Difference from CoreUtils/time.jl functions here require SeisIO Types to work
=#
function mk_t!(C::GphysChannel, nx::Integer, ts_new::Int64)
T = Array{Int64, 2}(undef, 2, 2)
setindex!(T, one(Int64), 1)
setindex!(T, nx, 2)
setindex!(T, ts_new, 3)
setindex!(T, zero(Int64), 4)
setfield!(C, :t, T)
return nothing
end
function check_for_gap!(S::GphysData, i::Integer, ts_new::Int64, nx::Integer, v::Integer)
Δ = round(Int64, sμ / getindex(getfield(S, :fs), i))
T = getindex(getfield(S, :t), i)
T1 = t_extend(T, ts_new, nx, Δ)
if T1 != nothing
if v > 1
te_old = endtime(T, Δ)
δt = ts_new - te_old
(v > 1) && println(stdout, lpad(S.id[i], 15), ": time difference = ", lpad(δt, 16), " μs (old end = ", lpad(te_old, 16), ", new start = ", lpad(ts_new, 16), ", gap = ", lpad(δt-Δ, 16), " μs)")
end
S.t[i] = T1
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5651 | export units2ucum, validate_units, vucum
# Units issues
# (1) UCUM has not standardized the abbreviations of units used in information technology. For now, use "By" for Bytes and "bit" for bits, cf. https://hl7.org/fhir/2017Jan/valueset-ucum-units.html ; but neither appears in the most recent HL7 UCUM value set.
# Fixing a few common units mistakes that I keep seeing
const units_table = Dict{String,String}(
"bits/sec" => "bit/s",
"pascals" => "Pa",
"degrees" => "deg",
"reboots" => "{reboots}",
"percent" => "%",
"gaps" => "{gaps}",
"bytes" => "By",
"cycles" => "{cycles}",
"counts" => "{counts}",
"COUNTS" => "{counts}",
"M**3/M**3" => "m3/m3",
"M/S" => "m/s",
"PA" => "Pa",
"M/M" => "m/m",
"M/S**2" => "m/s2",
"M/S/S" => "m/s2",
"M" => "m"
)
fix_units(s::AbstractString) = get(units_table, s, s)
function strip_exp!(k::Array{Int64,1}, u::Array{UInt32,1}, j::Int64)
(j == length(u)) && return j
i = j
# ^+, ^-, **+, **-
if u[i] == 0x0000002b || u[i] == 0x0000002d
push!(k, i)
i = i+1
end
if i ≤ length(u)
while i < length(u)
if u[i] in 0x00000030:0x00000039 #'0':'9'
i = i+1
else # end of digits reached; break
break
end
end
# end
end
return i
end
# Home of a string function to convert units to SI
"""
units2ucum(str)
Convert unit string `str` to UCUM syntax.
Rules applied to strings:
* Deletes "power" notation for integer powers: (^, ^+, ^-, **, **+, **-)
* Deletes "power" notation for special cases: (^+1, ^-1, **+1, **-1)
* Replaces multiplication symbols (*, ⋅, ×) with .
"""
function units2ucum(str::String)
u = [UInt32(str[i]) for i in eachindex(str)]
# set denominator position if '-' && no '/' to before the last alphabetical sequence
# preceding
m = 0
d = false
for i = 1:length(u)
if u[i] == 0x0000002f
d = false
break
elseif u[i] == 0x0000002d
if m == 0
m = i
end
d = true
end
end
if d
j = m
f = false
while j > 1
j -= 1
if (u[j] in 0x00000061:0x0000007a) || (u[j] in 0x00000041:0x0000005a)
# start of alphabetical unit sequence
if f == false
f = true
end
elseif f == true
insert!(u, j, 0x0000002f)
break
end
end
end
i = 1
k = Int64[]
while i < length(u)
# expressions that start with ^
if u[i] == 0x0000005e
#= Delete:
^
^+
^-
^+1
^-1
=#
push!(k, i)
i = strip_exp!(k, u, i+1)
# expressions that start with *
elseif u[i] == 0x0000002a
if i+1 ≤ length(u)
#= Delete:
**
**+
**-
**+1
**-1
=#
if u[i+1] == 0x0000002a
push!(k, i)
push!(k, i+1)
i = strip_exp!(k, u, i+2)
else
# Change to "."
u[i] = 0x0000002e
if i > 1
if u[i-1] == 0x0000002f
push!(k, i)
end
end
i = i+1
end
end
# expressions that start with /, (space)
elseif u[i] in (0x0000002f, 0x00000020)
# delete whitespace after a / or space
j = i
while j < length(u)
j = j+1
if u[j] in (0x000000d7, 0x00000020, 0x0000002e, 0x000022c5)
push!(k, j)
else
i = j-1
break
end
end
i = i+1
else
i = i+1
end
end
deleteat!(u, k)
# Replace multiplication operators with periods
for i = 1:length(u)
if u[i] in (0x000000d7, 0x00000020, 0x000022c5)
u[i] = 0x0000002e
end
end
# Get the position of the "/", if one exists
m = 0
for i = 1:length(u)
if u[i] == 0x0000002f
m = i
break
end
end
if m > 0
k = Int64[]
# delete "." in "./"
if m > 1
if u[m-1] == 0x0000002e
push!(k, m-1)
end
end
# delete all powers of "1" after the /
for i = m+1:length(u)
if u[i] == 0x00000031
if i == length(u)
push!(k, i)
break
elseif (u[i+1] in 0x00000030:0x00000039) == false
push!(k, i)
end
end
end
deleteat!(u, k)
end
return String(Char.(u))
end
"""
vucum(str::String)
Test whether `str` is a valid UCUM unit string.
"""
function vucum(str::String; v::Integer=0)
rstr = identity(str)
if any([occursin(c, rstr) for c in ('%', '^', '[', ']', '{', '}')])
rstr = replace(rstr, "%" => "%25")
rstr = replace(rstr, "^" => "%5E")
rstr = replace(rstr, "[" => "%5B")
rstr = replace(rstr, "]" => "%5D")
rstr = replace(rstr, "{" => "%7B")
rstr = replace(rstr, "}" => "%7D")
end
url = "https://ucum.nlm.nih.gov/ucum-service/v1/isValidUCUM/" * rstr
(v > 1) && println("URL = ", url)
tf = try
req = request("GET", url, timeout=30)
b = String(req.body)
if b == "true"
true
else
false
end
catch err
@warn(string("Error thrown for unit string: ", str))
false
end
return tf
end
"""
validate_units(S::GphysData)
Test whether unit strings in S.units are valid under the UCUM standard.
validate_units(C::GphysChannel)
Test whether C.units is valid under the UCUM standard.
"""
function validate_units(S::GphysData)
u = join(S.units, '.')
if vucum(u) == true
return trues(S.n)
else
isv = falses(S.n)
for i = 1:S.n
isv[i] = vucum(S.units[i])
end
return isv
end
end
validate_units(C::GphysChannel) = vucum(C.units)
ucum_to_seed = Dict{String,String}(
"{counts}" => "counts",
"m/s2" => "M/S**2"
)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2425 | # An order of magnitude faster than parse
function buf_to_uint(buf::Array{UInt8,1}, L::T) where T<:Integer
o = one(UInt64)
n = zero(UInt64)
p = UInt64(L-o)
i = zero(T)
t = UInt64(10)
while i < L
i += one(T)
n += UInt64(getindex(buf, i)-0x30)*t^p
p -= o
end
return n
end
function buf_to_int(buf::Array{UInt8,1}, L::Int64)
c = 10^(L-1)
n = zero(Int64)
for i = 1:L
n += c*(buf[i]-0x30)
c = div(c, 10)
end
return n
end
function buf_to_i16(buf::Array{UInt8,1}, i::Int16, j::Int16)
c = Int16(10)^(i-j)
n = zero(Int16)
for p = j:i
n += c * Int16(buf[p]-0x30)
c = div(c, Int16(10))
end
return n
end
# Factor of two faster than parse(Float64, str) for a string
# Order of magnitude faster than parse(Float64, String[uu]) for a u8 array
function buf_to_double(buf::Array{UInt8,1}, L::Int64)
read_state = 0x00
mant_sgn = 1.0e0
exp_sgn = 1.0e0
j = one(Int64)
zz = zero(Int64)
z = zero(Int8)
o = one(Int8)
i = z
imax = Int8(16)
p0 = Int64(10)^Int64(imax)
p = p0
t = Int64(10)
d = zz
m = zz
q = zz
v = zz
x = zz
while j ≤ L
c = buf[j]
if read_state == 0x00
if is_u8_digit(c) && (i < imax)
p = div(p,t)
i += o
v += Int64(c-0x30)*p
m = p
elseif c == 0x2d # '-'
mant_sgn = -1.0e0
elseif c == 0x2b # '+'
mant_sgn = 1.0e0
elseif c == 0x2e # '.'
# transition to state 1
read_state = 0x01
i = z
p = p0
elseif c in (0x45, 0x46, 0x65, 0x66) # 'E', 'F', 'e', 'f'
# transition to state 2 (no decimal)
read_state = 0x02
i = z
p = p0
end
elseif read_state == 0x01
if is_u8_digit(c) && i < imax
p = div(p,t)
i += o
d += Int64(c-0x30)*p
elseif c in (0x45, 0x46, 0x65, 0x66)
# transition to state 2
read_state = 0x02
i = z
p = p0
end
else
if is_u8_digit(c) && (i < imax)
p = div(p,t)
i += o
x += Int64(c-0x30)*p
q = p
elseif c == 0x2d # '-'
exp_sgn = -1.0e0
elseif c == 0x2b # '+'
exp_sgn = 1.0e0
else
break
end
end
j += 1
end
v1 = Float64(div(v, m))
v2 = Float64(d) / Float64(p0)
if q > zz
v3 = 10.0e0^(exp_sgn * Float64(div(x,q)))
else
v3 = one(Float64)
end
return mant_sgn * (v1 + v2) * v3
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2320 | # purpose: read ASCII values from a stream
function parse_digits(io::IO, c::UInt8, N::UInt8)
v = zero(UInt32)
p = 0x00989680
i = 0x00
m = 0x00000001
while is_u8_digit(c)
fasteof(io) && break
if i < N
p = div(p, 0x0a)
i += 0x01
v += (c-0x30)*p
m = p
end
c = fastread(io)
end
fastskip(io, -1)
return v, m
end
function stream_int(io::IO, L::Int64)
p = 10^(L-1)
n = zero(Int64)
@inbounds for i = 1:L
c = fastread(io)
if is_u8_digit(c)
n += p*(c-0x30)
end
p = div(p, 10)
end
return n
end
function stream_float(io::IO, c::UInt8)
read_state = 0x00
zz = 0x00000000
p0 = 0x00989680
v = zz
m = 0x00000001
d = zz
x = zz
q = zz
msgn = true
esgn = true
read_exp = false
while true
c = fastread(io)
if is_u8_digit(c)
if read_exp
x,q = parse_digits(io, c, 0x04)
else
v,m = parse_digits(io, c, 0x08)
end
# '-'
elseif c == 0x2d
if read_exp
esgn = false
else
msgn = false
end
# '+'
elseif c == 0x2b
continue
# '.'
elseif c == 0x2e
c = fastread(io)
d, u = parse_digits(io, c, 0x08)
# 'E', 'F', 'e', 'f'
elseif c in (0x45, 0x46, 0x65, 0x66)
read_exp = true
sgn = false
else
break
end
end
v1 = Float32(div(v, m)) + Float32(d) / Float32(p0)
if q > zz
if esgn
v3 = 10.0f0^Float32(div(x,q))
else
v3 = 10.0f0^(-1.0f0*Float32(div(x,q)))
end
else
v3 = one(Float32)
end
if msgn
return v1 * v3
else
return -v1 * v3
end
end
function stream_time(io::IO, T::Array{Y,1}) where Y<:Integer
fill!(T, zero(Y))
i = mark(io)
k = 1
while true
c = fastread(io)
if c in (0x2d, 0x2e, 0x3a, 0x54)
L = fastpos(io)-i-1
reset(io)
T[k] = stream_int(io, L)
fastskip(io, 1)
i = mark(io)
k += 1
# exit on any non-digit character except (',', '.', ':', 'T')
elseif c < 0x30 || c > 0x39
L = fastpos(io)-i-1
if L > 0
reset(io)
T[k] = stream_int(io, L)
fastskip(io, 1)
end
break
end
end
y = T[1]
if T[2] == zero(Y)
return y2μs(y[1])
else
j = md2j(y, T[2], T[3])
return mktime(y, j, T[4], T[5], T[6], T[7])
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 288 | function string_time(str::String, T::Array{Y,1}) where Y<:Integer
if isdigit(str[end])
s = str*"~"
io = IOBuffer(s)
else
io = IOBuffer(str)
end
seekstart(io)
t = stream_time(io, T)
close(io)
return t
end
string_time(str::String) = string_time(str, BUF.date_buf)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8241 | export web_chanspec, seis_www, track_on!, track_off!
# Generic handler for getting data by HTTP
# Returns:
# R::Array{UInt8,1}, either request body or error data
# parsable::Bool, whether or not R is parsable
function get_http_req(url::String, req_info_str::String, to::Int; status_exception::Bool=false)
(R::Array{UInt8,1}, parsable::Bool) = try
req = request( "GET", url, webhdr,
readtimeout = to,
status_exception = status_exception )
if req.status == 200
(req.body, true)
else
@warn(string("Request failed", req_info_str,
"\nRESPONSE = ", req.status, " (", statustext(req.status), ")",
"\n\nHTTP response is in misc[\"data\"]"))
(Array{UInt8,1}(string(req)), false)
end
catch err
T = typeof(err)
@warn( string( "Error thrown", req_info_str,
"\n\nERROR = ", T,
"\n\nTrying to store error message in misc[\"data\"]"
)
)
msg_data::Array{UInt8,1} = Array{UInt8,1}( try; string(getfield(err, :response)); catch; try; string(getfield(err, :msg)); catch; ""; end; end )
(msg_data, false)
end
return R, parsable
end
function get_http_post(url::String, body::String, to::Int; status_exception::Bool=false)
try
req = request( "POST", url, webhdr, body,
readtimeout = to,
status_exception = status_exception)
if req.status == 200
return (req.body, true)
else
@warn(string( "Request failed!\nURL: ", url, "\nPOST BODY: \n", body, "\n",
"RESPONSE: ", req.status, " (", statustext(req.status), ")\n" ) )
return (Array{UInt8,1}(string(req)), false)
end
catch err
@warn(string( "Error thrown:\nURL: ", url, "\nPOST BODY: \n", body, "\n",
"ERROR TYPE: ", typeof(err), "\n" ) )
msg_data::Array{UInt8,1} = Array{UInt8,1}( try; string(getfield(err, :response)); catch; try; string(getfield(err, :msg)); catch; ""; end; end )
return (msg_data, false)
end
end
datareq_summ(src::String, ID::String, d0::String, d1::String) = ("\n" * src *
" query:\nID = " * ID * "\nSTART = " * d0 * "\nEND = " * d1)
# ============================================================================
# Utility functions not for export
hashfname(str::Array{String,1}, ext::String) = string(hash(str), ".", ext)
# Start tracking channel IDs and data lengths
"""
track_on!(S::SeisData)
Track changes to S.id, changes to channel structure of S, and the sizes of data
vectors in S.x. Does not track data processing operations to any channel i
unless length(S.x[i]) changes for channel i.
**Warning**: If you have or suspect gapped data in any channel, do not use
ungap! while tracking is active.
"""
function track_on!(S::SeisData)
if S.n > 0
ids = unique(getfield(S, :id))
nxs = zeros(Int64, S.n)
for i = 1:S.n
nxs[i] = length(S.x[i])
end
S.misc[1]["track"] = (ids, nxs)
end
return nothing
end
# Stop tracking channel IDs and data lengths; report which have changed
"""
u = track_off!(S::SeisData)
Turn off tracking in S and return a boolean vector of which channels have
been added or altered significantly.
"""
function track_off!(S::SeisData)
k = findfirst([haskey(S.misc[i],"track") for i = 1:S.n])
if S.n == 0
return nothing
elseif k == nothing
return trues(S.n)
end
u = falses(S.n)
(ids, nxs) = S.misc[k]["track"]
for (n, id) in enumerate(S.id)
j = findfirst(x -> x == id, ids)
if j == nothing
u[n] = true
else
if nxs[j] != length(S.x[n])
u[n] = true
end
end
end
delete!(S.misc[k], "track")
return u
end
function savereq(D::Array{UInt8,1}, ext::String, id::String, s::String)
if ext == "miniseed"
ext = "mseed"
elseif occursin("sac", ext)
ext = "SAC"
end
s_str = int2tstr(tstr2int(s))
yy = s_str[1:4]
mm = s_str[6:7]
dd = s_str[9:10]
HH = s_str[12:13]
MM = s_str[15:16]
SS = s_str[18:19]
nn = lpad(div(parse(Int64, s_str[21:26]), 1000), 3, '0')
jj = lpad(md2j(yy, mm, dd), 3, '0')
fname = join([yy, jj, HH, MM, SS, nn, id, "R", ext], '.')
safe_isfile(fname) && @warn(string("File ", fname, " contains an identical request; overwriting."))
f = open(fname, "w")
write(f, D)
close(f)
return fname
end
"""
| String | Source |
|:------:|:-------------------|
|BGR | http://eida.bgr.de |
|EMSC | http://www.seismicportal.eu |
|ETH | http://eida.ethz.ch |
|GEONET | http://service.geonet.org.nz |
|GFZ | http://geofon.gfz-potsdam.de |
|ICGC | http://ws.icgc.cat |
|INGV | http://webservices.ingv.it |
|IPGP | http://eida.ipgp.fr |
|IRIS | http://service.iris.edu |
|IRISPH5| http://service.iris.edu/ph5ws/ |
|ISC | http://isc-mirror.iris.washington.edu |
|KOERI | http://eida.koeri.boun.edu.tr |
|LMU | http://erde.geophysik.uni-muenchen.de |
|NCEDC | http://service.ncedc.org |
|NIEP | http://eida-sc3.infp.ro |
|NOA | http://eida.gein.noa.gr |
|ORFEUS | http://www.orfeus-eu.org |
|RESIF | http://ws.resif.fr |
|SCEDC | http://service.scedc.caltech.edu |
|TEXNET | http://rtserve.beg.utexas.edu |
|USGS | http://earthquake.usgs.gov |
|USP | http://sismo.iag.usp.br |
"""
seis_www = Dict("BGR" => "http://eida.bgr.de",
"EMSC" => "http://www.seismicportal.eu",
"ETH" => "http://eida.ethz.ch",
"GEONET" => "http://service.geonet.org.nz",
"GFZ" => "http://geofon.gfz-potsdam.de",
"ICGC" => "http://ws.icgc.cat",
"INGV" => "http://webservices.ingv.it",
"IPGP" => "http://eida.ipgp.fr",
"IRIS" => "http://service.iris.edu",
"ISC" => "http://isc-mirror.iris.washington.edu",
"KOERI" => "http://eida.koeri.boun.edu.tr",
"LMU" => "http://erde.geophysik.uni-muenchen.de",
"NCEDC" => "http://service.ncedc.org",
"NIEP" => "http://eida-sc3.infp.ro",
"NOA" => "http://eida.gein.noa.gr",
"ODC" => "http://www.orfeus-eu.org",
"ORFEUS" => "http://www.orfeus-eu.org",
"RESIF" => "http://ws.resif.fr",
"SCEDC" => "http://service.scedc.caltech.edu",
"TEXNET" => "http://rtserve.beg.utexas.edu",
"USGS" => "http://earthquake.usgs.gov",
"USP" => "http://sismo.iag.usp.br")
ph5_www = Dict("IRISPH5" => "http://service.iris.edu")
function fdsn_uhead(src::String)
if !occursin("PH5",src)
return haskey(seis_www, src) ? seis_www[src] * "/fdsnws/" : src
else
return haskey(ph5_www, src) ? ph5_www[src] * "/ph5ws/" : src
end
end
@doc """
web_chanspec
## Specifying Channel IDs in Web Requests
| Str | L | Meaning | Example |
|:--- |:---|:----- |:----- |
| NET | 2 | Network code | "IU" |
| STA | 5 | Station code | "ANMO" |
| LOC | 2 | Location identifier | "00" |
| CHA | 3 | Channel code | "BHZ" |
A channel is uniquely specified by four substrings (NET, STA, LOC, CHA), which
can be formatted as a String or a String array. Each substring has a maximum
safe length of `L` characters (column 2 in the table).
### Acceptable Channel ID Formats
| Type | Example |
|:----- |:----- |
| String | "PB.B004.01.BS1, PB.B004.01.BS2" |
| Array{String, 1} | ["PB.B004.01.BS1","PB.B004.01.BS2"] |
| Array{String, 2} | ["PB" "B004" "01" "BS?"; "PB" "B001" "01" "BS?"] |
The `LOC` field can be blank in FDSN requests with get_data; for example,
`chans="UW.ELK..EHZ"; get_data("FDSN", chans)`.
#### SeedLink only
For SeedLink functions (`seedlink!`, `has_stream`, etc.), channel IDs can
include a fifth field (i.e. NET.STA.LOC.CHA.T) to set the "type" flag (one of
DECOTL, for Data, Event, Calibration, blOckette, Timing, or Logs). Note that
SeedLink calibration, timing, and logs are not in the scope of SeisIO.
See also: `get_data`, `seedlink`
"""
function web_chanspec()
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 9763 | # export FDSNevq, FDSNevt, FDSNsta
export FDSNsta
# =============================================================================
# No export
function fdsn_chp(chans::ChanOpts, v::Integer)
# Parse channel config
if isa(chans, String)
C = parse_chstr(chans, ',', true, false)
elseif isa(chans, Array{String, 1})
C = parse_charr(chans, '.', true)
else
C = copy(chans)
end
minreq!(C)
v > 1 && println(stdout, "Most compact request form = ", C)
return C
end
"""
S = FDSNsta(chans, KW)
Retrieve station/channel info for formatted parameter file (or string) `chans`
into an empty SeisData structure.
Standard keywords: rad, reg, src, to, v
Other keywords:
* msr: get MultiStage (full) responses?
* s: Start time
* t: Termination (end) time
* xf: Name of XML file to save station metadata
See also: `web_chanspec`, `parsetimewin`, `get_data!`, `SeisIO.KW`
"""
function FDSNsta( chans::ChanOpts="*";
msr::Bool = false, # MultiStageResp
rad::Array{Float64,1} = KW.rad, # Search radius
reg::Array{Float64,1} = KW.reg, # Search region
s::TimeSpec = 0, # Start
src::String = KW.src, # Source server
t::TimeSpec = (-600), # End or Length (s)
to::Int64 = KW.to, # Read timeout (s)
v::Integer = KW.v, # Verbosity
xf::String = "FDSNsta.xml" # XML filename
)
d0, d1 = parsetimewin(s, t)
v > 0 && @info(tnote("Querying FDSN stations"))
URL = string(fdsn_uhead(src), "station/1/query")
BODY = "level=response\nformat=xml\n"
wc = "*"
# Add geographic search to BODY
if !isempty(rad)
BODY *= string( "latitude=", rad[1], "\n",
"longitude=", rad[2], "\n",
"minradius=", rad[3], "\n",
"maxradius=", rad[4], "\n")
end
if !isempty(reg)
BODY *= string( "minlatitude=", reg[1], "\n",
"maxlatitude=", reg[2], "\n",
"minlongitude=", reg[3], "\n",
"maxlongitude=", reg[4], "\n" )
end
# Add channel search to BODY
if chans == wc
(isempty(reg) && isempty(rad)) && error("No query! Please specify a search radius, a rectangular search region, or some channels.")
BODY *= string("* * * * ", d0, " ", d1, "\n")
else
C = fdsn_chp(chans, v)
Nc = size(C,1)
for i = 1:Nc
str = ""
for j = 1:4
str *= (" " * (isempty(C[i,j]) ? wc : C[i,j]))
end
BODY *= string(str, " ", d0, " ", d1, "\n")
end
end
if v > 1
printstyled("request url:", color=:light_green)
println(URL)
printstyled("request body: \n", color=:light_green)
println(BODY)
end
open(xf, "w") do io
request("POST", URL, webhdr, BODY, response_stream=io)
end
# Build channel list
v > 0 && @info(tnote("Building list of channels"))
io = open(xf, "r")
xsta = read(io, String)
close(io)
S = FDSN_sta_xml(xsta, msr, d0, d1, v)
# ===================================================================
# Logging
note!(S, string( "+meta ¦ ", URL ))
for i in 1:S.n
id = split_id(S.id[i])
if isempty(id[3])
id[3] = "--"
end
note!(S, i, string("POST ¦ ", join(id, " "), " ", d0, " ", d1))
end
# ===================================================================
return S
end
function FDSNget!(U::SeisData,
chans::ChanOpts,
d0::String,
d1::String,
autoname::Bool,
fmt::String,
msr::Bool,
nd::Real,
opts::String,
rad::Array{Float64,1},
reg::Array{Float64,1},
si::Bool,
src::String,
to::Int64,
v::Integer,
w::Bool,
xf::String,
y::Bool)
parse_err = false
n_badreq = 0
wc = "*"
fname = ""
# (1) Time-space query for station info
S = (if si
FDSNsta(chans,
msr = msr,
rad = rad,
reg = reg,
s = d0,
src = src,
t = d1,
to = to,
v = v,
xf = xf
)
else
SeisData()
end)
# (1a) Can we autoname the file? True iff S.n == 1
(S.n == 1) || (autoname = false)
# (2) Build ID strings for data query
ID_str = Array{String,1}(undef, S.n)
if S.n > 0
N_ch = S.n
for i in 1:N_ch
ID_mat = split(S.id[i], ".")
ID_mat[isempty.(ID_mat)] .= wc
ID_str[i] = join(ID_mat, " ")
end
else
C = fdsn_chp(chans, v)[:,1:4]
C[isempty.(C)] .= "*"
N_ch = size(C,1)
ID_str = [join(C[i, :], " ") for i in 1:N_ch]
end
if v > 1
printstyled("data query strings:\n", color=:light_green)
for i = 1:length(ID_str)
println(stdout, ID_str[i])
end
end
# (3) Data query
v > 0 && @info(tnote("Data query begins"))
URL = string(fdsn_uhead(src), "dataselect/1/query")
if occursin("ncedc", URL) || occursin("scedc", URL)
BODY = ""
if fmt != "miniseed"
@warn(string("format ", fmt, " ignored; server only allows miniseed."))
end
elseif occursin("ph5ws",URL)
BODY = "reqtype=FDSN\n"
if fmt ∉ ["mseed","miniseed","sac","segy1","segy2","geocsv","geocsv.tspair","geocsv.slist"]
@warn(string("format ", fmt, " ignored; server only allows:\n" *
"mseed\n" *
"sac\n" *
"segy1\n" *
"geocsv\n" *
"geocsv.tspair\n" *
"geocsv.slist")
)
end
fmt = fmt == "miniseed" ? "mseed" : fmt
BODY *= "format=" * fmt * "\n"
if !isempty(opts)
OPTS = split(opts, "&")
for opt in OPTS
BODY *= string(opt, "\n")
end
end
else
BODY = "format=" * fmt * "\n"
if !isempty(opts)
OPTS = split(opts, "&")
for opt in OPTS
BODY *= string(opt, "\n")
end
end
end
# Set the data source
fill!(S.src, URL)
# Create variables for query
ts = tstr2int(d0)
ti = round(Int64, nd*86400000000)
te = tstr2int(d1)
t1 = deepcopy(ts)
rn = 0
while t1 < te
rn += 1
os = rn > 1 ? 1 : 0
t1 = min(ts + ti, te)
s_str = int2tstr(ts + os)
t_str = int2tstr(t1)
qtail = string(" ", s_str, " ", t_str, "\n")
QUERY = identity(BODY)
for i = 1:N_ch
QUERY *= ID_str[i]*qtail
end
if v > 1
printstyled(string("request url: ", URL, "\n"), color=:light_green)
printstyled(string("request body: \n", QUERY), color=:light_green)
end
# Request via "POST"
if w
if fmt == "miniseed"
ext = "mseed"
else
ext = fmt
end
# Generate filename
yy = s_str[1:4]
mm = s_str[6:7]
dd = s_str[9:10]
HH = s_str[12:13]
MM = s_str[15:16]
SS = s_str[18:19]
nn = lpad(div(parse(Int64, s_str[21:26]), 1000), 3, '0')
jj = lpad(md2j(yy, mm, dd), 3, '0')
if autoname
fname = join([yy, jj, HH, MM, SS, nn, S.id[1], "R", ext], '.')
else
fname = join([yy, jj, HH, MM, SS, nn, "FDSNWS", src, ext], '.')
end
safe_isfile(fname) && @warn(string("File ", fname, " contains an identical request; overwriting."))
open(fname, "w") do io
request("POST", URL, webhdr, QUERY, readtimeout=to, response_stream=io)
end
io = open(fname, "r")
parsable = true
else
(R, parsable) = get_http_post(URL, QUERY, to)
io = IOBuffer(R)
end
(v > 1) && println("parsable = ", parsable)
# Parse data (if we can)
if parsable
if fmt == "mseed" || fmt == "miniseed"
parsemseed!(S, io, KW.nx_add, KW.nx_add, true, v)
elseif fmt == "geocsv" || fmt == "geocsv.tspair"
read_geocsv_tspair!(S, io)
elseif fmt == "geocsv.slist"
read_geocsv_slist!(S, io)
else
parse_err = true
n_badreq += 1
push!(S, SeisChannel(id = string("XX.FMT..", lpad(n_badreq, 3, "0")),
misc = Dict{String,Any}( "url" => URL,
"body" => QUERY,
"raw" => read(io))))
note!(S, S.n, "unparseable format; raw bytes in :misc[\"raw\"]")
end
else
# Should only happen with an error message (parsable to String) in io
parse_err = true
n_badreq += 1
push!(S, SeisChannel(id = string("XX.FAIL..", lpad(n_badreq, 3, "0")),
misc = Dict{String,Any}( "url" => URL,
"body" => QUERY,
"msg" => String(read(io)))))
note!(S, S.n, "request failed; response in :misc[\"msg\"]")
end
close(io)
ts += ti
end
# ===================================================================
# Logging
note!(S, string( "+source ¦ ", URL ))
for i in 1:S.n
id = split_id(S.id[i])
if isempty(id[3])
id[3] = "--"
end
note!(S, i, string("POST ¦ ", join(id, " "), " ", d0, " ", d1))
if w
wstr = string(timestamp(), " ¦ write ¦ get_data(\"FDSN\" ... w=true) ¦ wrote raw download to file ", fname)
for i in 1:S.n
push!(S.notes[i], wstr)
end
end
end
# ===================================================================
append!(U,S)
# Done!
v > 0 && @info(string(timestamp(), ": done FDSNget query."))
v > 1 && @info(string("n_badreq = ", n_badreq))
return parse_err
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3271 | function irisws(cha::String,
d0::String,
d1::String,
fmt::String,
opts::String,
to::Int64,
v::Integer,
w::Bool)
# init
parse_err = false
parsable = false
fname = ""
# parse channel string cha
c = (parse_chstr(cha, ',', false, false)[1,:])[1:min(end,4)]
if isempty(c[3])
c[3] = "--"
end
ID = join([c[1], c[2], strip(c[3],'-'), c[4]], '.')
# Build query url
url = "http://service.iris.edu/irisws/timeseries/1/query?" *
build_stream_query(c, d0, d1) * "&scale=" * (fmt == "miniseed" ? "AUTO" : "1.0") * "&format=" * fmt
v > 0 && println(url)
req_info_str = datareq_summ("IRISWS data", ID, d0, d1)
# see CHANGELOG, 2020-05-28
# Do request
(R, parsable) = get_http_req(url, req_info_str, to)
if parsable
if w
fname = savereq(R, fmt, ID, d0)
end
if fmt == "sacbl"
Ch = read_sac_stream(IOBuffer(R), false, false)
elseif fmt == "miniseed"
S = SeisData()
parsemseed!(S, IOBuffer(R), KW.nx_add, KW.nx_add, true, v)
Ch = S[1]
elseif fmt == "geocsv"
S = SeisData()
read_geocsv_tspair!(S, IOBuffer(R))
Ch = S[1]
else
# other parsers not supported
parse_err = true
Ch = SeisChannel(id = string("XX.FMT..001"),
misc = Dict{String,Any}(
"url" => url,
"raw" => read(IOBuffer(R))
)
)
note!(Ch, "unparseable format; raw bytes in :misc[\"raw\"]")
end
else
parse_err = true
Ch = SeisChannel(id = string("XX.FAIL..001"),
misc = Dict{String,Any}(
"url" => url,
"msg" => String(read(IOBuffer(R)))
)
)
note!(Ch, "request failed; response in :misc[\"msg\"]")
end
setfield!(Ch, :src, url)
note!(Ch, "+source ¦ " * url)
# fill :id and empty fields if no parse_err
if parse_err == false
setfield!(Ch, :id, ID)
if isempty(Ch.name)
Ch.name = deepcopy(ID)
end
unscale!(Ch) # see CHANGELOG, 2020-05-28
Ch.loc = GeoLoc() # see CHANGELOG, 2020-05-28
end
if parsable && w
push!(Ch.notes, string(timestamp(), " ¦ write ¦ get_data(\"IRIS\", ... w=true) ¦ wrote raw download to file ", fname))
end
return parse_err, Ch
end
# Programming note: if this method is the default, and S is only modified
# within the for loop, then S is copied to a local scope and the newly-added
# data are deleted upon return
function IRISget!(S::GphysData,
C::Array{String, 1},
d0::String,
d1::String,
fmt::String,
opts::String,
to::Int64,
v::Integer,
w::Bool)
parse_err = false
if fmt == "mseed"
fmt = "miniseed"
elseif fmt == "sac"
fmt = "sacbl"
end
U = SeisData()
K = size(C, 1)
v > 0 && println("IRISWS data request begins...")
for k = 1:K
(p, Ch) = irisws(C[k], d0, d1, fmt, opts, to, v, w)
push!(U, Ch)
parse_err = max(parse_err, p)
end
append!(S, U)
return parse_err
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 20357 | export seedlink, seedlink!, sl_info, has_sta, has_stream
# ========================================================================
# Utility functions not for export
function timed_wait!(conn::TCPSocket, to::Real, b::Bool)
n = bytesavailable(conn)
m = copy(n)
ts = time()
te = copy(ts)
while (te-ts) < to && m == n
sleep(1.0)
b = eof(conn)
te = time()
n = bytesavailable(conn)
end
return nothing
end
function sl_cparse(C::ChanOpts)
if isa(C, String)
sta,pat = parse_sl(parse_chstr(C, ',', false, false))
elseif ndims(C) == 1
sta, pat = parse_sl(parse_charr(C, '.', false))
else
sta, pat = parse_sl(C)
end
return sta, pat
end
# This was deprecated in Julia 0.6; hard-copied here, still works
function sync_add(r::Task)
spawns = get(task_local_storage(), :SPAWNS, ())
if spawns != ()
push!(spawns[1], r)
tls_r = Base.get_task_tls(r)
tls_r[:SUPPRESS_EXCEPTION_PRINTING] = true
end
r
end
function get_sl_ver(vline::String)
# Versioning will break if SeedLink switches to VV.PPP.NNN format
ver = 0.0
vinfo = split(vline)
for i in vinfo
if startswith(i, 'v')
try
ver = Meta.parse(i[2:end])
catch
continue
end
end
end
return ver
end
function check_sta_exists(sta::Array{String,1}, xstr::String)
xstreams = get_elements_by_tagname(root(parse_string(xstr)), "station")
xid = join([join([attribute(xstreams[i], "network"),
attribute(xstreams[i], "name")],'.') for i=1:length(xstreams)], ' ')
N = length(sta)
x = falses(N)
for i = 1:N
id = split(sta[i], '.', keepempty=true)
sid = join(id[1:2],'.')
if occursin(sid, xid)
x[i] = true
end
end
return x
end
function check_stream_exists(S::Array{String,1}, xstr::String;
gap::Real = KW.SL.gap,
to::Int64 = KW.to
)
a = ["seedname","location","type"]
N = length(S)
x = falses(N)
xstreams = get_elements_by_tagname(root(parse_string(xstr)), "station")
xid = String[join([attribute(xstreams[i], "network"),
attribute(xstreams[i], "name")],'.') for i=1:length(xstreams)]
for i = 1:N
# Assumes the combination of network name and station name is unique
id = split(S[i], '.', keepempty=true)
sid = join(id[1:2], '.')
K = findid(sid, xid)
if K > 0
t = Inf
# Syntax requires that contains(string, "") returns true for any string
p = ["","",""]
for j = 3:length(id)
p[j-2] = replace(id[j], "?" => "")
end
R = get_elements_by_tagname(xstreams[K], "stream")
if !isempty(R)
for j = 1:length(R)
if prod([occursin(p[i], attribute(R[j], a[i])) for i=1:length(p)]) == true
te = replace(attribute(R[j], "end_time"), " " => "T")
t = min(t, time()-d2u(Dates.DateTime(te)))
end
end
end
# Treat station as "present" if there's a match
if minimum(t) < gap
x[i] = true
end
end
end
return x
end
# ========================================================================
"""
info_xml = sl_info(level=LEVEL::String; u=URL::String, port=PORT::Integer)
Retrieve XML output of SeedLink command "INFO `LEVEL`" from server `URL:PORT`.
Returns formatted XML. `LEVEL` must be one of "ID", "CAPABILITIES",
"STATIONS", "STREAMS", "GAPS", "CONNECTIONS", "ALL".
"""
function sl_info(level::String; # verbosity
to::Int64 = KW.to, # timeout [s]
u::String = KW.SL.u, # url
port::Int64 = KW.SL.port # port #
)
conn = connect(TCPSocket(), u, port)
write(conn, string("INFO ", level, "\r"))
b = false
timed_wait!(conn, to, b)
buf = BUF.buf
if (bytesavailable(conn) == 0) || (isopen(conn) == false)
@warn(("Connection sent no data after ", to, " seconds; closing."))
(isopen(conn)) && close(conn)
x = Array{UInt8, 1}(undef, 0)
else
eflg = false
x = Array{UInt8, 1}(undef, 1048576)
i = 0x0000000000000001
c = 0x01
k = 0x0000
while true
N = conn.buffer.size
checkbuf!(buf, N)
# buffer to :buf from conn
unsafe_read(conn.buffer, pointer(buf), N)
conn.buffer.ptr = 1
conn.buffer.size = 0
# copy to x after filtering out 0x00 and first 64 vals of each 520-byte packet
j = 0x0000000000000000
while j < N
j += 0x0000000000000001
k += 0x0001
if i + 0x00000000000001c8 > length(x)
resize!(x, length(x) + 524288)
end
if k >= 0x0040
c = getindex(buf, j)
if c != 0x00
setindex!(x, c, i)
i += 0x0000000000000001
end
end
if k == 0x0208
if buf[j] == 0x00
eflg = true
break
end
k = 0x0000
end
end
eflg && break
timed_wait!(conn, to, b)
end
close(conn)
deleteat!(x, i:length(x))
end
resize!(buf, 65535)
return String(x)
end
"""
has_sta(sta[, u=url, port=N])
Check that streams exist at `url` for stations `sta`, formatted
NET.STA. Use "?" to match any single character. Returns `true` for
stations that exist. `sta` can also be the name of a valid config
file or a 1d string array.
Returns a BitArray with one value per entry in `sta.`
SeedLink keywords: gap, port
"""
function has_sta(C::String;
u::String = KW.SL.u, # url base, no "http://""
port::Int64 = KW.SL.port # port number
)
sta, pat = parse_sl(parse_chstr(C, ',', false, false))
# This exists to convert SeedLink syntax (SSS NN) to FDSN (NN.SSS)
for i = 1:length(sta)
s = split(sta[i], ' ')
sta[i] = join([s[2], s[1]], '.')
end
# This exists to convert SeedLink syntax (SSS NN) to FDSN (NN.SSS)
return check_sta_exists(sta, sl_info("STATIONS", u=u, port=port))
end
has_sta( sta::Array{String,1};
u::String = KW.SL.u, # url base, no "http://""
port::Int64 = KW.SL.port # port number
) = check_sta_exists(sta, sl_info("STATIONS", u=u, port=port))
has_sta( sta::Array{String,2};
u::String = KW.SL.u, # url base, no "http://""
port::Int64 = KW.SL.port # port number
) = check_sta_exists([join(sta[i,:], '.') for i=1:size(sta,1)],
sl_info("STATIONS", u=u, port=port))
"""
has_stream(cha[, u=url, port=N, gap=G)
Check that streams with recent data exist at url `u` for channel spec
`cha`, formatted NET.STA.LOC.CHA.DFLAG, e.g. "UW.TDH..EHZ.D,
CC.HOOD..BH?.E". Use "?" to match any single character. Returns `true`
for streams with recent data.
`cha` can also be the name of a valid config file.
has_stream(sta::Array{String,1}, sel::Array{String,1}, u::String, port=N::Int, gap=G::Real)
If two arrays are passed to has_stream, the first should be
formatted as SeedLink STATION patterns (formated "SSSSS NN", e.g.
["TDH UW", "VALT CC"]); the second be an array of SeedLink selector
patterns (formatted LLCCC.D, e.g. ["??EHZ.D", "??BH?.?"]).
SeedLink keywords: gap, port
"""
function has_stream(sta::Array{String,1}, pat::Array{String,1};
u::String = KW.SL.u, # url base, no "http://"
to::Int64 = KW.to, # timeout[s]
port::Int64 = KW.SL.port, # port #
gap::Real = KW.SL.gap, # max gap [s]
d::Char = ' ' # delimiter
)
L = length(sta)
cha = Array{String,1}(undef, L)
for i = 1:L
s = split(sta[i], d)
c = split(pat[i], '.')
cha[i] = join([s[1], s[2], c[1][1:2], c[1][3:5], c[2]], '.')
end
return check_stream_exists(cha, sl_info("STREAMS", u=u, port=port), gap=gap, to=to)
end
has_stream( sta::String;
u::String = KW.SL.u, # url base, no "http://"
to::Int64 = KW.to, # timeout [s]
port::Int64 = KW.SL.port, # port number
gap::Real = KW.SL.gap, # max. gap [s]
d::Char = ','
) = check_stream_exists(String.(split(sta, d)), sl_info("STREAMS", u=u, port=port), gap=gap, to=to)
has_stream( sta::Array{String,1};
u::String = KW.SL.u, # url base, no "http://"
to::Int64 = KW.to, # timeout [s]
port::Int64 = KW.SL.port, # port number
gap::Real = KW.SL.gap # max. gap [s]
) = check_stream_exists(sta, sl_info("STREAMS", u=u, port=port), gap=gap, to=to)
has_stream( sta::Array{String,2};
u::String = KW.SL.u, # url base, no "http://"
to::Int64 = KW.to, # timeout [s]
port::Int64 = KW.SL.port, # port number
gap::Real = KW.SL.gap # max. gap [s]
) = check_stream_exists([join(sta[i,:], '.') for i=1:size(sta,1)],
sl_info("STREAMS", u=u, port=port), gap=gap, to=to)
@doc """
seedlink!(S, mode, chans)
S = seedlink(mode, chans)
Begin acquiring seedlink data to SeisData structure `S` using mode `mode`. New channels are added to `S` automatically based on `chans`. Connections are added to `S.c`. When finished, close connection manually with `close(S.c[n])` where n is connection #.
This function is fully described in the official documentation at https://seisio.readthedocs.io/ in subsection **SeedLink**.
Keywords: gap, kai, port, refresh, s, t, u, v, w, x_on_err
See also: `get_data!`
""" seedlink!
function seedlink!(S::SeisData, mode::String, sta::Array{String,1}, patts::Array{String,1};
gap::Real = KW.SL.gap, # max gap of live channels
kai::Real = KW.SL.kai, # keepalive interval [s]
seq::String = "", # starting sequence number
port::Int64 = KW.SL.port, # port number
refresh::Real = KW.SL.refresh, # s between read attempts
u::String = KW.SL.u, # URL base, no "http://"
s::TimeSpec = 0, # start time
t::TimeSpec = 300, # end (termination) time
v::Integer = KW.v, # vervosity
w::Bool = KW.w, # write directly to disk?
x_on_err::Bool = KW.SL.x_on_err # exit on error?
)
# ==========================================================================
# init, warnings, sanity checks
Ns = size(sta,1)
setfield!(BUF, :swap, false)
# Refresh interval
refresh = maximum([refresh, eps()])
refresh < 10 && @warn(string("refresh = ", refresh, " < 10 s; Julia may freeze if no packets arrive between consecutive read attempts."))
# keepalive interval
if kai < 240
@warn("KeepAlive interval < 240s violates IRIS netiquette guidelines. YOU are responsible if you get banned for hammering.")
end
# Source for logging
src = join([u,port],':')
# ==========================================================================
# connection and server info retrieval
push!(S.c, connect(TCPSocket(),u,port))
q = length(S.c)
# version, server info
write(S.c[q],"HELLO\r")
vline = readline(S.c[q])
sline = readline(S.c[q])
ver = get_sl_ver(vline)
(v > 1) && println("Version = ", ver)
(v > 1) && println("Server = ", strip(sline,['\r','\n']))
# ==========================================================================
# ==========================================================================
# handshaking
# create mode string and filename for -w
(d0,d1) = parsetimewin(s,t)
s = join(split(d0,r"[\-T\:\.]")[1:6],',')
t = join(split(d1,r"[\-T\:\.]")[1:6],',')
if mode in ["TIME", "FETCH"]
if mode == "TIME"
m_str = string("TIME ", s, " ", t, "\r")
else
m_str = string("FETCH", isempty(seq) ? "" : string(" SL", seq, " ", s), "\r")
end
else
# m_str = "DATA\r"
m_str = string("DATA", isempty(seq) ? "" : string(" SL", seq, " ", s), "\r")
end
if w
fname = hashfname([join(sta,','), join(patts,','), s, t, m_str], "mseed")
(v > 0) && println(string("Raw packets will be written to file ", fname, " in dir ", realpath(pwd())))
fid = open(fname, "w")
end
# pass strings to server; check responses carefully
for i = 1:Ns
# pattern selector
sel_str = string("SELECT ", patts[i], "\r")
(v > 1) && println("Sending: ", sel_str)
write(S.c[q], sel_str)
sel_resp = readline(S.c[q])
(v > 1) && println("Response: ", sel_resp)
if occursin("ERROR", sel_resp)
if x_on_err
write(S.c[q], "BYE\r")
close(S.c[q])
deleteat!(S.c, q)
error(string("Error in select string ", patts[i], "; connection closed and deleted, exit with error."))
else
@warn(string("Error in select string ", patts[i], "; previous selector, ", i==1 ? "*" : patts[i-1], " used)."))
end
end
# station selector
sta_str = string("STATION ", sta[i], "\r")
(v > 1) && println("Sending: ", sta_str)
write(S.c[q], sta_str)
sta_resp = readline(S.c[q])
(v > 1) && println("Response: ", sta_resp)
if occursin("ERROR", sta_resp)
if x_on_err
write(S.c[q], "BYE\r")
close(S.c[q])
deleteat!(S.c, q)
error(string("Error in station string ", sta[i], "; connection closed and deleted, exit with error."))
else
@warn(string("Error in station string ", sta[i], " (station excluded)."))
end
end
# mode
(v > 1) && println("Sending: ", m_str)
write(S.c[q], m_str)
m_resp = readline(S.c[q])
(v > 1) && println("Response: ", m_resp)
end
write(S.c[q],"END\r")
# ==========================================================================
# ==========================================================================
# data transfer
k = @task begin
j = 0
while true
if !isopen(S.c[q])
(v > 0) && @info(string(timestamp(), ": SeedLink connection closed."))
w && close(fid)
break
else
#= use of rand() makes it almost impossible for multiple SeedLink
connections to result in one sleeping indefinitely. =#
τ = ceil(Int, refresh*(0.8 + 0.2*rand()))
sleep(τ)
eof(S.c[q])
N = div(bytesavailable(S.c[q]), 520)
if N > 0
#=
This syntax consumes 2x memory but is "safe" in that a partially
downloaded packet will be left in S.c[q], rather than causing the
SEED parser to throw errors and/or hang the REPL. (From experience,
it is NOT guaranteed that full packets are transmitted)
=#
buf = read(S.c[q], 520*N)
io = IOBuffer(buf)
if w
write(fid, copy(io))
end
track_on!(S)
(v > 1) && printstyled(stdout, now(), ": Processing packets ", color=:green)
while !eof(io)
pkt_id = String(read(io, 8))
(v > 1) && @printf(stdout, "%s, ", pkt_id)
parserec!(S, BUF, io, 65535, 65535, true, v)
end
(v > 1) && @printf(stdout, "\b\b...done current packet dump.\n")
seed_cleanup!(S, BUF)
k = track_off!(S)
# ===========================================================
# Source logging
for i in 1:length(k)
if k[i]
if haskey(S.misc[i], "SL_logged")
# Check that :src is still SeedLink
(S.src[i] == u) || (S.src[i] = u)
else
id = split_id(S.id[i])
if isempty(id[3])
id[3] = "??"
end
# this can be sent directly to a SeedLink server later
sl_str = string("commands ¦ SELECT ", id[3], id[4], ".D\r\n",
"STATION ", id[2], " ", id[1], "\r\n",
m_str, "\n")
S.src[i] = string(u) # Set :src to SeedLink
note!(S, i, string( "+source ¦ ", u )) # Log source to :notes
note!(S, i, sl_str) # Log commands to :notes
S.misc[i]["SL_logged"] = true # Flag i as SL_logged
end
end
# ===========================================================
end
end
# SeedLink (non-standard) keep-alive gets sent every kai seconds
j += τ
if j ≥ kai
# Secondary "isopen" loop avoids possible error from race condition
# maybe a Julia bug? First encountered 2017-01-03
if isopen(S.c[q])
j -= kai
write(S.c[q],"INFO ID\r")
end
end
end
end
end
sync_add(k)
Base.enq_work(k)
# ========================================================================
return S
end
function seedlink!(S::SeisData, mode::String, C::ChanOpts;
gap::Real = KW.SL.gap, # max gap of live channels
kai::Real = KW.SL.kai, # keepalive interval [s]
seq::String = "", # starting sequence number
port::Int64 = KW.SL.port, # port number
refresh::Real = KW.SL.refresh, # s between read attempts
u::String = KW.SL.u, # URL base, no "http://"
s::TimeSpec = 0, # start time
t::TimeSpec = 300, # end (termination) time
v::Integer = KW.v, # vervosity
w::Bool = KW.w, # write directly to disk?
x_on_err::Bool = KW.SL.x_on_err # exit on error?
)
sta, pat = sl_cparse(C)
seedlink!(S, mode, sta, pat, u=u, port=port, refresh=refresh, kai=kai, seq=seq, s=s, t=t, x_on_err=x_on_err, v=v, w=w)
return S
end
@doc (@doc seedlink!)
function seedlink(mode::String, sta::Array{String,1}, pat::Array{String,1};
gap::Real = KW.SL.gap, # max gap of live channels
kai::Real = KW.SL.kai, # keepalive interval [s]
seq::String = "", # starting sequence number
port::Int64 = KW.SL.port, # port number
refresh::Real = KW.SL.refresh, # s between read attempts
u::String = KW.SL.u, # URL base, no "http://"
s::TimeSpec = 0, # start time
t::TimeSpec = 300, # end (termination) time
v::Integer = KW.v, # vervosity
w::Bool = KW.w, # write directly to disk?
x_on_err::Bool = KW.SL.x_on_err # exit on error?
)
S = SeisData()
seedlink!(S, mode, sta, pat, u=u, port=port, refresh=refresh, kai=kai, seq=seq, s=s, t=t, x_on_err=x_on_err, v=v, w=w)
return S
end
function seedlink(mode::String, C::ChanOpts;
gap::Real = KW.SL.gap, # max gap of live channels
kai::Real = KW.SL.kai, # keepalive interval [s]
seq::String = "", # starting sequence number
port::Int64 = KW.SL.port, # port number
refresh::Real = KW.SL.refresh, # s between read attempts
u::String = KW.SL.u, # URL base, no "http://"
s::TimeSpec = 0, # start time
t::TimeSpec = 300, # end (termination) time
v::Integer = KW.v, # vervosity
w::Bool = KW.w, # write directly to disk?
x_on_err::Bool = KW.SL.x_on_err # exit on error?
)
S = SeisData()
sta, pat = sl_cparse(C)
seedlink!(S, mode, sta, pat, u=u, port=port, refresh=refresh, kai=kai, seq=seq, s=s, t=t, x_on_err=x_on_err, v=v, w=w)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7824 | export get_data!, get_data
@doc """
S = get_data(method, chans [, keywords])
get_data!(S, method, chans [, keywords])
Wrapper to web requests for time-series data. Request data using `method` from channels `chans` using keywords `KWs`, storing the output in `S`.
* Methods: FDSN, IRIS, PH5
* Channels: See `?web_chanspec`
* Keywords: autoname, demean, detrend, fmt, msr, nd, opts, rad, reg, rr, s, si, src, t, taper, to, ungap, unscale, v, w, xf, y
This function is fully described in the official documentation at https://seisio.readthedocs.io/ under subheading **Web Requests**.
See also: `web_chanspec`, `parsetimewin`, `seis_www`, `SeisIO.KW`
""" get_data
function get_data(method_in::String, C::ChanOpts="*";
autoname::Bool = false , # Auto-generate file names?
demean::Bool = false , # Demean after download?
detrend::Bool = false , # Detrend after download?
fmt::String = KW.fmt , # File format
msr::Bool = false , # Get multi-stage response?
nd::Real = KW.nd , # Number of days per request
opts::String = KW.opts , # Options string
rad::Array{Float64, 1} = KW.rad , # Query radius
reg::Array{Float64, 1} = KW.reg , # Query region
prune::Bool = KW.prune , # Prune empty channels?
rr::Bool = false , # Remove instrument response?
s::TimeSpec = 0 , # Start
si::Bool = KW.si , # Fill station info?
src::String = KW.src , # Data source
taper::Bool = false , # Taper after download?
t::TimeSpec = (-600) , # End or Length (s)
to::Int64 = KW.to , # Timeout (s)
ungap::Bool = false , # Remove time gaps?
unscale::Bool = false , # Unscale (divide by gain)?
v::Integer = KW.v , # Verbosity
w::Bool = KW.w , # Write to disc?
xf::String = "FDSNsta.xml" , # XML save file
y::Bool = KW.y , # Sync
)
# Parse time window
α, ω = parsetimewin(s, t)
# KWs that ovewrite other KWs (rare; keep this behavior to an absolute minimum!)
(autoname == true) && (w = true)
# Generate SeisData
S = SeisData()
# Condense requests as much as possible
if method_in == "FDSN"
if isa(C, String)
C = parse_chstr(C, ',', true, false)
elseif isa(C, Array{String,1})
C = parse_charr(C, '.', true)
end
R = minreq(C)
parse_err = FDSNget!(S, R, α, ω,
autoname, fmt, msr, nd, opts, rad, reg, si, src, to, v, w, xf, y)
elseif method_in == "PH5"
if isa(C, String)
C = parse_chstr(C, ',', true, false)
elseif isa(C, Array{String,1})
C = parse_charr(C, '.', true)
end
R = minreq(C)
parse_err = FDSNget!(S, R, α, ω,
autoname, fmt, msr, nd, opts, rad, reg, si, src*"PH5", to, v, w, xf, y)
elseif method_in == "IRIS"
if isa(C, String)
R = String[strip(String(j)) for j in split(C, ',')]
elseif isa(C, Array{String,2})
NC = size(C,1)
R = Array{String,1}(undef, NC)
for j = 1:NC
R[j] = join(C[j,:],'.')
end
else
R = deepcopy(C)
end
parse_err = IRISget!(S, R, α, ω, fmt, opts, to, v, w)
end
# DND DND DND
# Wrapper to a generic "get" function -- leave as example code
# getfield(SeisIO, Symbol(string(method_in, "get!")))(S, C, α, ω,
# f = f, opts = opts, si = si, src = src, to = to, v = v, w = w)
# DND DND DND
# ========================================================================
# Viable operations for any data type
if prune == true
if parse_err == false
v > 0 && @info(tnote("Removing empty channels."))
prune!(S)
else
v > 0 && @info(tnote("Can't prune empty channels; web request wasn't fully parsed."))
end
end
# Sync
if y == true
v > 0 && @info(tnote("Synchronizing data."))
sync!(S)
end
# ========================================================================
# Operations that only make sense for seismic or seismoacoustic data
if any([demean, detrend, rr, taper, unscale])
# Get list of channels with sane instrument codes
CC = get_seis_channels(S)
# Unscale
if unscale == true
v > 0 && @info(tnote("dividing out gain of seismic channels"))
unscale!(S, chans=CC)
end
# Demean
if demean == true
v > 0 && @info(tnote("removing mean of seismic channels"))
demean!(S, chans=CC)
end
# Detrend
if detrend == true
v > 0 && @info(tnote("detrending seismic channels"))
detrend!(S, chans=CC)
end
# Taper
if taper == true
v > 0 && @info(tnote("tapering seismic channel data"))
taper!(S, chans=CC)
end
# Ungap
if ungap == true
v > 0 && @info(tnote("ungapping seismic channel data"))
ungap!(S, chans=CC)
end
# Remove response
if rr == true
v > 0 && @info(tnote("removing instrument response from seismic channels"))
remove_resp!(S, chans=CC)
end
end
return S
end
@doc (@doc get_data)
function get_data!(S::SeisData, method_in::String, C::ChanOpts="*";
autoname::Bool = false , # Auto-generate file names?
demean::Bool = false , # Demean after download?
detrend::Bool = false , # Detrend after download?
fmt::String = KW.fmt , # File format
msr::Bool = false , # Get multi-stage response?
nd::Real = KW.nd , # Number of days per request
opts::String = KW.opts , # Options string
rad::Array{Float64, 1} = KW.rad , # Query radius
reg::Array{Float64, 1} = KW.reg , # Query region
prune::Bool = KW.prune , # Prune empty channels?
rr::Bool = false , # Remove instrument response?
s::TimeSpec = 0 , # Start
si::Bool = KW.si , # Fill station info?
src::String = KW.src , # Data source
taper::Bool = false , # Taper after download?
t::TimeSpec = (-600) , # End or Length (s)
to::Int64 = KW.to , # Timeout (s)
ungap::Bool = false , # Remove time gaps?
unscale::Bool = false , # Unscale (divide by gain)?
v::Integer = KW.v , # Verbosity
w::Bool = KW.w , # Write to disc?
xf::String = "FDSNsta.xml" , # XML save file
y::Bool = KW.y , # Sync
)
U = get_data(method_in, C,
autoname=autoname,
demean=demean,
detrend=detrend,
fmt=fmt,
msr=msr,
nd=nd,
opts=opts,
prune=prune,
rad=rad,
reg=reg,
rr=rr,
s=s,
si=si,
src=src,
t=t,
taper=taper,
to=to,
ungap=ungap,
unscale=unscale,
v=v,
w=w,
xf=xf,
y=y)
v > 2 && println(stdout, "S = \n", U)
append!(S, U)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 12570 | export read_data, read_data!
function read_data_seisio!(S::SeisData, filestr::String, memmap::Bool, v::Integer)
S_in = rseis(filestr, memmap=memmap)
L = length(S_in)
for i in 1:L
if typeof(S_in[i]) == SeisChannel
push!(S, S_in[i])
break
elseif typeof(S_in[i]) <: GphysChannel
C = convert(SeisChannel, S_in[i])
push!(S, C)
break
elseif typeof(S_in[i]) == SeisData
append!(S, S_in[i])
break
elseif typeof(S_in[i]) <: GphysData
S1 = convert(SeisData, S_in[i])
append!(S, S1)
break
elseif typeof(S_in[i]) == SeisEvent
(v > 0) && @warn(string("Obj ", i, " is type SeisEvent: only reading :data field"))
S1 = convert(SeisData, getfield(S_in[i], :data))
append!(S, S1)
break
else
(v > 0) && @warn(string("Obj ", i, " skipped (Type incompatible with read_data)"))
end
end
return nothing
end
@doc """
S = read_data(fmt, filestr [, keywords])
read_data!(S, fmt, filestr [, keywords])
Read data in file format `fmt` matching file pattern `filestr` into SeisData object `S`.
S = read_data(filestr [, keywords])
read_data!(S, filestr [, keywords])
Read from files matching file pattern `filestr` into SeisData object `S`.
Calls `guess(filestr)` to identify the file type based on the first file
matching pattern `filestr`. Much slower than manually specifying file type.
* Formats: ah1, ah2, bottle, geocsv, geocsv.slist, lennartz, mseed, passcal, suds, sac, segy, seisio, slist, uw, win32
* Keywords: cf, full, jst, memmap, nx_add, nx_new, strict, swap, v, vl
This function is fully described in the official documentation at https://seisio.readthedocs.io/ in section **Time-Series Files**.
See also: `SeisIO.KW`, `get_data`, `guess`, `rseis`
""" read_data!
function read_data!(S::GphysData, fmt::String, fpat::Union{String, Array{String,1}};
cf ::String = "", # win32 channel info file
full ::Bool = false, # full SAC/SEGY hdr
jst ::Bool = true, # are sample times JST (UTC+9)?
ll ::UInt8 = 0x00, # auto-set location field in :id?
memmap ::Bool = false, # use mmap? (DANGEROUS)
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = false, # strict channel matching
swap ::Bool = false, # do byte swap?
v ::Integer = KW.v, # verbosity level
vl ::Bool = false # verbose logging
)
# Variables for tracking changes
N = S.n
fpat_is_array = isa(fpat, Array{String, 1})
fmt_is_seisio = (fmt == "seisio")
opt_strings = ""
last_src = zeros(Int64, S.n)
nx = Array{Int64, 1}(undef, S.n)
for i in 1:S.n
nx[i] = length(S.x[i])
end
if fpat_is_array
one_file = false
files = String[]
for f in fpat
ff = abspath(f)
if safe_isfile(ff)
push!(files, ff)
else
append!(files, ls(ff))
end
end
else
filestr = abspath(fpat)
one_file = safe_isfile(filestr)
if one_file == false
files = ls(filestr)
end
end
if fmt == "sac"
if one_file
read_sac_file!(S, filestr, full, memmap, strict)
else
for (j, fname) in enumerate(files)
read_sac_file!(S, fname, full, memmap, strict)
track_src!(S, j, nx, last_src)
end
end
elseif fmt_is_seisio
if one_file
read_data_seisio!(S, filestr, memmap, v)
else
for (j, fname) in enumerate(files)
read_data_seisio!(S, fname, memmap, v)
track_src!(S, j, nx, last_src)
end
end
elseif ((fmt == "miniseed") || (fmt == "mseed"))
setfield!(BUF, :swap, swap)
if one_file
read_mseed_file!(S, filestr, nx_new, nx_add, memmap, strict, v)
else
for (j, fname) in enumerate(files)
read_mseed_file!(S, fname, nx_new, nx_add, memmap, strict, v)
track_src!(S, j, nx, last_src)
end
end
opt_strings = string("swap = ", swap,
", nx_new = ", nx_new,
", nx_add = ", nx_add)
# ============================================================================
# Data formats that aren't SAC, SEISIO, or SEED begin here and are alphabetical
# by first KW
elseif fmt == "ah1"
if one_file
read_ah1!(S, filestr, full, memmap, strict, v)
else
for (j, fname) in enumerate(files)
read_ah1!(S, fname, full, memmap, strict, v)
track_src!(S, j, nx, last_src)
end
end
opt_strings = string("full = ", full)
elseif fmt == "ah2"
if one_file
read_ah2!(S, filestr, full, memmap, strict, v)
else
for (j, fname) in enumerate(files)
read_ah2!(S, fname, full, memmap, strict, v)
track_src!(S, j, nx, last_src)
end
end
opt_strings = string("full = ", full)
elseif fmt == "bottle"
read_bottle!(S, filestr, nx_new, nx_add, memmap, strict, v)
opt_strings = string("nx_new = ", nx_new,
", nx_add = ", nx_add)
elseif fmt in ("geocsv", "geocsv.tspair", "geocsv", "geocsv.slist")
tspair = (fmt == "geocsv" || fmt == "geocsv.tspair")
if one_file
read_geocsv_file!(S, filestr, tspair, memmap)
else
for (j, fname) in enumerate(files)
read_geocsv_file!(S, fname, tspair, memmap)
track_src!(S, j, nx, last_src)
end
end
elseif fmt == "segy" || fmt == "passcal"
passcal = fmt == "passcal"
if one_file
read_segy_file!(S, filestr, ll, passcal, memmap, full, swap, strict)
else
for (j, fname) in enumerate(files)
read_segy_file!(S, fname, ll, passcal, memmap, full, swap, strict)
track_src!(S, j, nx, last_src)
end
end
opt_strings = string("full = ", full,
", ll = ", ll,
", swap = ", swap)
elseif fmt == "slist" || fmt == "lennartz"
lennartz = fmt == "lennartz"
if one_file
read_slist!(S, filestr, lennartz, memmap, strict, v)
else
for (j, fname) in enumerate(files)
read_slist!(S, fname, lennartz, memmap, strict, v)
track_src!(S, j, nx, last_src)
end
end
elseif fmt == "suds"
if one_file
append!(S, SUDS.read_suds(filestr, memmap=memmap, full=full, v=v))
else
for (j, fname) in enumerate(files)
append!(S, SUDS.read_suds(fname, memmap=memmap, full=full, v=v))
track_src!(S, j, nx, last_src)
end
end
opt_strings, string("full = ", full)
elseif fmt == "uw"
if one_file
UW.uwdf!(S, filestr, full, memmap, strict, v)
else
for (j, fname) in enumerate(files)
UW.uwdf!(S, fname, full, memmap, strict, v)
track_src!(S, j, nx, last_src)
end
end
opt_strings = string("full = ", full)
elseif fmt == "win32" || fmt =="win"
if isa(fpat, Array{String, 1})
for (j, f) in enumerate(fpat)
readwin32!(S, f, cf, jst, nx_new, nx_add, memmap, strict, v)
# here the list of files is already expanded, so this is accurate
track_src!(S, j, nx, last_src)
end
else
readwin32!(S, filestr, cf, jst, nx_new, nx_add, memmap, strict, v)
end
opt_strings = string("cf = \"", abspath(cf), "\"",
", jst = ", jst,
", nx_new = ", nx_new,
", nx_add = ", nx_add)
else
error("Unknown file format!")
end
# ===================================================================
# logging
if !fmt_is_seisio
if isempty(opt_strings)
opt_strings = string("v = ", v, ", vl = ", vl)
else
opt_strings *= string(", v = ", v, ", vl = ", vl)
end
# Update all channels
to_note = Int64[]
if fpat_is_array
for i in 1:S.n
if last_src[i] > 0
S.src[i] = files[last_src[i]]
push!(to_note, i)
end
end
else
# Update existing channels first
for i in 1:N
if length(S.x[i]) > nx[i]
S.src[i] = filestr
push!(to_note, i)
end
end
# Do new channels
chan_view = view(S.src, N+1:S.n)
fill!(chan_view , filestr)
# Note new source
append!(to_note, collect(N+1:S.n))
# note filestr used in read
if vl == false
fread_note!(S, to_note, "read_data!", fmt, filestr, opt_strings)
# For verbose logging, note all files used in the read
elseif one_file == false
files = ls(filestr)
end
end
# For verbose logging, any changed channel logs all files to :notes
if vl && (one_file == false)
for f in files
fread_note!(S, to_note, "read_data!", fmt, f, opt_strings)
end
end
end
# ===================================================================
return nothing
end
@doc (@doc read_data!)
function read_data(fmt::String, filestr::Union{String, Array{String, 1}};
full ::Bool = false, # full SAC/SEGY hdr
cf ::String = "", # win32 channel info file
jst ::Bool = true, # are sample times JST (UTC+9)?
ll ::UInt8 = 0x00, # auto-set location field in :id?
memmap ::Bool = false, # use mmap? (DANGEROUS)
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = false, # strict channel matching
swap ::Bool = false, # do byte swap?
v ::Integer = KW.v, # verbosity level
vl ::Bool = false # verbose logging
)
S = SeisData()
read_data!(S, fmt, filestr,
full = full,
cf = cf,
jst = jst,
ll = ll,
memmap = memmap,
nx_add = nx_add,
nx_new = nx_new,
strict = strict,
swap = swap,
v = v,
vl = vl
)
return S
end
function read_data(filestr::Union{String, Array{String, 1}};
full ::Bool = false, # full SAC/SEGY hdr
cf ::String = "", # win32 channel info file
jst ::Bool = true, # are sample times JST (UTC+9)?
ll ::UInt8 = 0x00, # auto-set location field in :id?
memmap ::Bool = false, # use mmap? (DANGEROUS)
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = false, # strict channel matching
v ::Integer = KW.v, # verbosity level
vl ::Bool = false # verbose logging
)
if isa(filestr, String)
if safe_isfile(filestr)
g = guess(filestr)
else
files = ls(filestr)
g = guess(files[1])
end
else
g = guess(filestr[1])
end
S = SeisData()
read_data!(S, g[1], filestr,
full = full,
cf = cf,
jst = jst,
ll = ll,
memmap = memmap,
nx_add = nx_add,
nx_new = nx_new,
strict = strict,
swap = g[2],
v = v,
vl = vl
)
return S
end
function read_data!(S::GphysData, filestr::Union{String, Array{String, 1}};
full ::Bool = false, # full SAC/SEGY hdr
cf ::String = "", # win32 channel info file
jst ::Bool = true, # are sample times JST (UTC+9)?
ll ::UInt8 = 0x00, # auto-set location field in :id?
memmap ::Bool = false, # use mmap? (DANGEROUS)
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = false, # strict channel matching
v ::Integer = KW.v, # verbosity level
vl ::Bool = false # verbose logging
)
if isa(filestr, String)
if safe_isfile(filestr)
g = guess(filestr)
else
files = ls(filestr)
g = guess(files[1])
end
else
g = guess(filestr[1])
end
read_data!(S, g[1], filestr,
full = full,
cf = cf,
jst = jst,
ll = ll,
memmap = memmap,
nx_add = nx_add,
nx_new = nx_new,
strict = strict,
swap = g[2],
v = v,
vl = vl
)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2933 | export read_meta, read_meta!
@doc """
S = read_meta(fmt, filestr [, keywords])
read_meta!(S, fmt, filestr [, keywords])
Generic wrapper for reading channel metadata (i.e., instrument parameters, responses). Reads metadata in file format `fmt` matching file pattern `filestr` into `S`.
This function is fully described in the official documentation at https://seisio.readthedocs.io/ under subheading **Metadata Files**.
See also: `SeisIO.KW`, `get_data`, `read_data`
""" read_meta!
function read_meta!(S::GphysData, fmt::String, fpat::Union{String, Array{String,1}};
memmap ::Bool = false , # use Mmap.mmap? (unsafe)
msr ::Bool = false , # read as MultiStageResp?
s ::TimeSpec = "0001-01-01T00:00:00" , # Start
t ::TimeSpec = "9999-12-31T12:59:59" , # End or Length (s)
units ::Bool = false , # fill in units of CoeffResp stages?
v ::Integer = KW.v , # verbosity level
)
N = S.n
files = Array{String, 1}(undef, 0)
hashes = zeros(UInt64, S.n)
fpat_is_array = isa(fpat, Array{String, 1})
opts = string("msr=", msr, ", ",
"s=\"", s, "\", ",
"t=\"", t, "\", ",
"units=", units, ", ",
"v=", KW.v, ")" )
if fpat_is_array
one_file = false
for f in fpat
ff = abspath(f)
if safe_isfile(ff)
push!(files, ff)
else
append!(files, ls(ff))
end
end
else
filestr = abspath(fpat)
one_file = safe_isfile(filestr)
if one_file == false
append!(files, ls(filestr))
else
push!(files, filestr)
end
end
isempty(files) && error("No valid files to read!")
if fmt == "resp"
read_seed_resp!(S, files, memmap, units)
else
for fname in files
if fmt == "dataless"
append!(S, read_dataless(fname, memmap=memmap, s=s, t=t, v=v, units=units))
elseif fmt == "sacpz"
read_sacpz!(S, fname, memmap=memmap)
elseif fmt == "sxml"
append!(S, read_sxml(fname, s, t, memmap, msr, v))
else
error("Unknown file format!")
end
track_hdr!(S, hashes, fmt, fname, opts)
end
end
return nothing
end
@doc (@doc read_meta!)
function read_meta(fmt::String, fpat::Union{String, Array{String,1}};
memmap ::Bool = true , # use Mmap.mmap? (unsafe)
msr ::Bool = false , # read as MultiStageResp?
s ::TimeSpec = "0001-01-01T00:00:00" , # Start
t ::TimeSpec = "9999-12-31T12:59:59" , # End or Length (s)
units ::Bool = false , # fill in units of CoeffResp stages?
v ::Integer = KW.v , # verbosity level
)
S = SeisData()
read_meta!(S, fmt, fpat, memmap=memmap, msr=msr, s=s, t=t, units=units, v=v)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 900 | export read_quake
@doc """
Ev = read_quake(fmt, file [, keywords])
Read data in file format `fmt` from `file` into SeisEvent object `Ev`.
* Formats: suds, qml, uw
* Keywords: full, v
Note: because earthquake data are usually discrete, self-contained files, no "in-place" version of `read_quake` exists, and `read_quake` doesn't support wildcards in the file string.
See also: `read_data`, `get_data`, `read_meta`, `UW.readuwevt`
""" read_quake
function read_quake(fmt::String, fname::String;
full ::Bool = false, # full header
v ::Integer = KW.v # verbosity level
)
if fmt == "suds"
Ev = SUDS.readsudsevt(fname, full=full, v=v)
elseif fmt == "uw"
Ev = UW.readuwevt(fname, full=full, v=v)
elseif fmt in ("qml", "quakeml")
hdr, source = read_qml(fname)
Ev = SeisEvent(hdr = hdr[1], source = source[1])
end
return Ev
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 330 | printstyled("Tests complete. Cleaning up...\n", color=:light_green)
flush(out)
close(out)
for fpat in ("*.mseed", "*.SAC", "*.sac", "*.geocsv", "*.xml", "*.h5", "FDSNevq.log")
try
files = ls(fpat)
for f in files
rm(f)
end
catch err
println("Attempting to delete ", fpat, " threw error: ", err)
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2960 | using SeisIO, SeisIO.Quake, SeisIO.SeisHDF
import Printf
# US FDSN example: 5 stations, 2 networks, all channels, last 600 seconds
println(stdout, "Some real data acquisition examples...\n\n")
println(stdout, "preliminary: change directory")
path = Base.source_dir()
println(stdout, "cd(", path, ")")
cd(path)
printstyled(stdout, "FDSN get_data\n", color=:green, bold=true)
CHA = "CC.PALM, UW.HOOD, CC.TIMB, CC.HIYU, UW.TDH"
s = -600
t = u2d(time())
printstyled(stdout, "Command: ", color=:green)
println(stdout, "S_fdsn = get_data(\"FDSN\", \"", CHA, "\", src=\"IRIS\", s=", s, ", t=", t, ")")
S_fdsn = get_data("FDSN", CHA, src="IRIS", s=s, t=t)
printstyled(stdout, "Results: ", color=:green)
println(stdout, "S_fdsn")
show(S_fdsn)
# IRIS example: 6 channels, 30 minutes
printstyled(stdout, "\n\nIRIS get_data\n", color=:green, bold=true)
STA = ["CC.TIMB..BHE", "CC.TIMB..BHN", "CC.TIMB..BHZ", "UW.HOOD..HHE", "UW.HOOD..HHN", "UW.HOOD..HHZ"]
st = -3600
en = -1800
printstyled(stdout, "Command: ", color=:green)
println(stdout, "S_iris = get_data(\"IRIS\", ", STA, ", s=", st, ", t=", en, ")")
S_iris = get_data("IRIS", STA, s=st, t=en)
printstyled(stdout, "Results: ", color=:green)
println(stdout, "S_iris")
show(S_iris)
# The Tohoku-Oki great earthquake, from IRIS FDSN, recorded by boreholes in WA (USA)
printstyled(stdout, "\n\nFDSNevt\n", color=:green, bold=true)
printstyled(stdout, "Command: ", color=:green)
println(stdout, "S_evt = FDSNevt(\"201103110547\", \"PB.B004..EH?,PB.B004..BS?,PB.B001..BS?,PB.B001..EH?\")")
S_evt = FDSNevt("201103110547", "PB.B004..EH?,PB.B004..BS?,PB.B001..BS?,PB.B001..EH?")
printstyled(stdout, "Results: ", color=:green)
println(stdout, "S_evt")
show(S_evt)
# SeisComp3 SeedLink session, IRIS server, TIME mode
printstyled(stdout, "\n\nSeedLink\n", color=:green, bold=true)
sta = "UW.GRUT,UW.H1K,UW.MDW"
s1 = -120
t1 = 120
printstyled(stdout, "Commands: ", color=:green)
println(stdout, "S_sl = seedlink(\"TIME\", \"", sta, "\", s=", s1, ", t=", t1, ")")
S_sl = seedlink("TIME", sta, s=s1, t=t1)
println(stdout, " seedlink!(S_sl, \"DATA\", \"SampleFiles/SL_long_test.conf\")")
seedlink!(S_sl, "DATA", "SampleFiles/SL_long_test.conf")
println(stdout, " sleep(30)")
sleep(30)
println(stdout, " for conn in S_sl.c; close(conn); end")
for conn in S_sl.c; close(conn); end
printstyled(stdout, "Results: ", color=:green)
println(stdout, "S_sl")
show(S_sl)
printstyled(stdout, "\n\n\nNote: ", color=:white, bold=true)
println(stdout, "ALL data from these examples can be written to file.")
printstyled(stdout, "wseis(\"fname.seis\", S)", color=7)
println(stdout, " write S to low-level SeisIO native format in file fname.seis")
printstyled(stdout, "writesac(S)", color=7)
println(stdout, " write S to SAC files with auto-generated names")
printstyled(stdout, "write_hdf5(\"fname.h5\", S)", color=7)
println(stdout, " write S to ASDF (HDF5) file \"fname.h5\"")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 451 | restr_path = Base.source_dir() * "/SampleFiles/Restricted/"
if isdir(restr_path) == false
for i in ["/data/", "/data2"]
restr_dir = i * "SeisIO-TestFiles/SampleFiles/Restricted"
if isdir(restr_dir)
try
run(`cp -r $restr_dir SampleFiles/`)
println("copied SampleFiles/Restricted/ from ", i)
catch err
@warn(err)
end
end
end
else
println("nothing to copy, SampleFiles/Restricted/ exists")
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 268 | try
if Sys.iswindows()
println("cmd /c deltree SampleFiles/")
run(`cmd /c deltree SampleFiles/`)
else
println("rm -rf SampleFiles/")
run(`rm -rf SampleFiles/`)
end
catch err
println("Attempting to delete files failed! Error thrown: ", err)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2302 | import SeisIO
import SeisIO: get_svn
# =====================================================================
# Setup
@info("Please allow 20 minutes for all tests to execute.")
cd(dirname(pathof(SeisIO))*"/../test")
if isdir("SampleFiles") == false
get_svn("https://github.com/jpjones76/SeisIO-TestData/trunk/SampleFiles", "SampleFiles")
end
include("local_restricted.jl")
include("test_helpers.jl")
# Announce test begin
test_start = Dates.now()
ltn = 48
printstyled(stdout,
string(test_start, ": tests begin, path = ", path, ", has_restricted = ",
has_restricted, ", keep_log = ", keep_log, ", keep_samples = ",
keep_samples, "\n"),
color=:light_green,
bold=true)
# =====================================================================
# Run all tests
# grep "include(joinpath" runtests.jl | awk -F "(" '{print $3}' | awk -F "," {'print $1'}
for d in ["CoreUtils", "Types", "RandSeis", "Utils", "NativeIO", "DataFormats", "SEED", "Processing", "Nodal", "Quake", "Web"]
ld = length(d)
ll = div(ltn - ld - 2, 2)
lr = ll + (isodd(ld) ? 1 : 0)
printstyled(string("="^ll, " ", d, " ", "="^lr, "\n"), color=:cyan, bold=true)
for i in readdir(joinpath(path, d))
f = joinpath(d,i)
if endswith(i, ".jl")
printstyled(lpad(" "*f, ltn)*"\n", color=:cyan)
write(out, string("\n\ntest ", f, "\n\n"))
flush(out)
include(f)
end
end
end
# =====================================================================
# Cleanup
include("cleanup.jl")
(keep_samples == true) || include("rm_samples.jl")
keep_log || safe_rm("runtests.log")
# Announce tests end
test_end = Dates.now()
δt = 0.001*(test_end-test_start).value
printstyled(string(test_end, ": tests end, elapsed time (mm:ss.μμμ) = ",
@sprintf("%02i", round(Int, div(δt, 60))), ":",
@sprintf("%06.3f", rem(δt, 60)), "\n"),
color=:light_green,
bold=true)
tut_file = realpath(path * "/../tutorial/install.jl")
ex_file = realpath(path * "/examples.jl")
printstyled("To run the interactive tutorial in a browser, execute: include(\"",
tut_file, "\")\n", color=:cyan, bold=true)
printstyled("To run some data acquisition examples from the Julia prompt, ",
"execute: include(\"", ex_file, "\")\n", color=:cyan)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 145 | path = Base.source_dir()
for i in readdir(path * "/TestHelpers")
if endswith(i, ".jl")
include(joinpath(path, "TestHelpers", i))
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1710 | printstyled(" FastIO\n", color=:light_green)
# create testfile
testfile = "test.dat"
io = open(testfile, "w")
x = rand(UInt8, 1000)
x[255] = 0x0a
write(io, x)
close(io)
function fastio_test!(io::IO)
T = [Bool, Int8, UInt8, Int16, UInt16, Float16, Int32, UInt32, Float32, Int64, UInt64, Float64]
X = Array{Any, 1}(undef, length(T))
Y = similar(X)
r = rand(1:100)
# =====================================================================
# fastpos, fasteof
@test fastpos(io) == position(io) == 0
@test fasteof(io) == eof(io) == false
seekend(io); p1=position(io); seekstart(io)
fastseekend(io); p2=fastpos(io); seekstart(io)
@test p1==p2
# =====================================================================
# test that a sequence of read, skip, and seek operations is identical
skip(io, r)
for (i,t) in enumerate(T)
X[i] = read(io, t)
end
p1 = position(io)
seek(io, r)
p2 = position(io)
seekstart(io)
fastskip(io, r)
for (i,t) in enumerate(T)
Y[i] = fastread(io, t)
end
p3 = fastpos(io)
fastseek(io, r)
p4 = fastpos(io)
@test p1==p3
@test p2==p4
for i=1:length(X)
if isnan(X[i])
@test isnan(Y[i])
else
@test X[i]==Y[i]
end
end
seekstart(io)
s1 = readline(io)
seekstart(io)
s2 = readline(io)
@test s1 == s2
return nothing
end
# =====================================================================
# done with file; read into buffer and repeat tests
printstyled(" IOStream\n", color=:light_green)
io = open(testfile, "r")
fastio_test!(io)
close(io)
printstyled(" generic IO\n", color=:light_green)
readbuf = read(testfile)
io = IOBuffer(readbuf)
fastio_test!(io)
close(io)
safe_rm(testfile)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1035 | printstyled(" diff_x!, int_x!\n", color=:light_green)
# Check that double-integration and double-differentiation are reversible
T = Float64
for i = 1:100
fs = rand(T[10, 20, 40, 50, 100])
δ = one(T)/fs
N = 10^rand(1:5)
N2 = div(N,2)
x = randn(T, N)
y = deepcopy(x)
gaps = [1, rand(2:N2), rand(N2+1:N-1), N]
diff_x!(x, gaps, fs)
int_x!(x, gaps, δ)
diff_x!(x, gaps, fs)
int_x!(x, gaps, δ)
if isapprox(x,y) == false
println("test failed (part 1) after i = ", i, " trials (length(x) = ", length(x), ")")
end
@test isapprox(x,y)
diff_x!(x, gaps, fs)
diff_x!(x, gaps, fs)
int_x!(x, gaps, δ)
int_x!(x, gaps, δ)
if isapprox(x,y) == false
println("test failed (part 2) after i = ", i, " trials (length(x) = ", length(x), ")")
end
@test isapprox(x,y)
int_x!(x, gaps, δ)
int_x!(x, gaps, δ)
diff_x!(x, gaps, fs)
diff_x!(x, gaps, fs)
if isapprox(x,y) == false
println("test failed (part 3) after i = ", i, " trials (length(x) = ", length(x), ")")
end
@test isapprox(x,y)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 433 | right_url = "https://github.com/jpjones76/SeisIO-TestData/trunk/SVN_test"
wrong_url = "https://github.com/jpjones76/SeisIO-TestData/trunk/DOESNT_EXIST"
svn_dir = "SVN_test"
svn_file = joinpath(svn_dir, "test")
try
get_svn(right_url, svn_dir)
@test isfile(svn_file)
str = readline(svn_file)
@test str == "."
rm(svn_file)
rm(svn_dir)
@test_throws ErrorException get_svn(wrong_url, "SVN_test")
catch err
@warn(err)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2670 | printstyled(" safe_isfile\n", color=:light_green)
@test safe_isfile("runtests.jl") == true
@test safe_isfile("foo.jl") == false
printstyled(" safe_isdir\n", color=:light_green)
@test safe_isdir("SampleFiles") == true
@test safe_isdir("Roms") == false
printstyled(" ls\n", color=:light_green)
@test any([occursin("test", i) for i in ls()])
coreutils_flist = [ "test_calculus.jl",
"test_get_svn.jl",
"test_ls.jl",
"test_FastIO.jl",
"test_poly.jl",
"test_read_utils.jl",
"test_time.jl",
"test_ts_timekeeping.jl",
"test_typ2code.jl" ]
S = [
"CoreUtils/",
"CoreUtils/test_ls.jl",
"CoreUtils/poo",
"SampleFiles/UW/*W",
"SampleFiles/UW/02*o",
"CoreUtils/test_*"
]
S_expect = [
coreutils_flist,
["test_ls.jl"],
String[],
["00012502123W", "99011116541W"],
["02062915175o", "02062915205o"],
coreutils_flist,
]
# Test that ls returns the same files as `ls -1`
for (n,v) in enumerate(S)
files = String[splitdir(i)[2] for i in ls(v)]
# if Sys.iswindows() == false
expected = S_expect[n]
@test sort(files) == sort(expected)
# end
[@test isfile(f) for f in ls(v)]
end
println("arch = ", Sys.ARCH)
arch_allowed = Sys.ARCH == :x86_64
# Arm64 = :aarch64
# Test that ls invokes find_regex under the right circumstances
if arch_allowed
@test change_sep(ls(S[5])) == change_sep(regex_find("SampleFiles/", r"02.*o$"))
end
if has_restricted && arch_allowed
println("extended ls tests")
T = path .* [
"/SampleFiles/*",
"/SampleFiles/Restricted/*.cnt",
"/SampleFiles/Restricted/2014092709*cnt"
]
T_expect = [540, 63, 60]
# Test that ls finds the same number of files as bash `ls -1`
for (n,v) in enumerate(T)
files = ls(v)
@test (isempty(files) == false)
if n == 1
@test length(files) ≥ T_expect[n]
else
@test length(files) == T_expect[n]
end
[@test isfile(f) for f in files]
end
# Test that ls invokes find_regex under the right circumstances
@test change_sep(ls(T[1])) == change_sep(regex_find("SampleFiles", r".*$"))
@test change_sep(ls(T[3])) == change_sep(regex_find("SampleFiles", r"Restricted/2014092709.*cnt$"))
else
printstyled(" extended ls tests skipped. (files not found; is this Appveyor?)\n", color=:green)
end
if Sys.iswindows()
@test safe_isfile("http://google.com") == false
@test safe_isdir("http://google.com") == false
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1816 | printstyled(" poly\n", color=:light_green)
import SeisIO:polyfit, polyval
N = 1000
C = 2.0
P = 3
at = 1.0e-5
t = collect(1.0:1.0:Float64(N))
x = C.*t
pf = polyfit(t, x, 1)
@test isapprox(pf[1], C, atol=at)
@test isapprox(pf[2], 0.0, atol=at)
pf = polyfit(t, x, 3)
@test isapprox(pf[1], 0.0, atol=at)
@test isapprox(pf[2], 0.0, atol=at)
@test isapprox(pf[3], C, atol=at)
@test isapprox(pf[4], 0.0, atol=at)
pv = polyval(pf, t)
@test isapprox(pv, x)
y = t.^P
pf = polyfit(t, y, P)
@test isapprox(pf[1], 1.0, atol=at)
for i = 2:P+1
@test isapprox(pf[i], 0.0, atol=at)
end
# this is the current response of CC.VALT..BHE as of 2014,079,00:00:00
resp = PZResp(a0 = 9.092142f11, f0 = 1.0f0,
p = Complex{Float32}[-0.1486+0.1486im, -0.1486-0.1486im, -391.96+850.69im, -391.96-850.69im, -471.24+0.0im, -2199.1+0.0im],
z = Complex{Float32}[0.0+0.0im, 0.0+0.0im])
# test: recover a0 from poles and zeroes in response file using above poly, polyval, polyfit functions
T = typeof(resp.a0)
Z = poly(resp.z)
P = poly(resp.p)
s = Complex{T}(2*pi*im*resp.f0)
a0 = one(T)/T(abs(polyval(Z, s)/polyval(P, s)))
@test isapprox(a0, resp.a0)
# test: adapted from Octave
r = Float64.(collect(0:10:50))
p = poly(r)
p ./= maximum(abs.(p))
x = Float64.(collect(0:5:50))
y = polyval(p,x) + 0.25*sin.(100.0*x)
y2 = similar(y)
for i = 1:length(y)
y2[i] = polyval(p, x[i]) + 0.25*sin(100.0*x[i])
end
pf = polyfit(x, y, length(r))
y3 = polyval(pf, x)
y_expect = [0.00000, -1.34741, 0.20672, 0.16168, 0.23251, -0.45550, 0.05480, 0.47582, -0.17088, -0.99408, -0.24699]
y3_expect = [-0.021348, -1.234641, -0.027165, 0.412554, 0.035480, -0.273558, -0.034148, 0.358771, 0.031322, -1.105561, -0.225033]
@test isapprox(y, y_expect, atol=1.0e-5)
@test isapprox(y2, y_expect, atol=1.0e-5)
@test isapprox(y3, y3_expect, atol=1.0e-5)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1271 | import SeisIO: fill_id!, checkbuf!, checkbuf_strict!, checkbuf_8!, fillx_i4!, fillx_i8!, fillx_i16_le!, fillx_i16_be!, fillx_i24_be!, fillx_i32_le!, fillx_i32_be!, fillx_u32_be!, fillx_u32_le!
# fill_id!(id::Array{UInt8,1},
# checkbuf!(buf::Array{UInt8,1},
# checkbuf!(buf::AbstractArray,
# checkbuf_strict!(buf::AbstractArray,
# checkbuf_8!(buf::Array{UInt8,1},
printstyled(" low-level read utils\n", color=:light_green)
nx = 4
buf = Array{UInt8, 1}(undef, 4nx)
xl = Array{Float32, 1}(undef, nx)
xb = similar(xl)
yl = rand(UInt32, 4)
yb = bswap.(yl)
buf .= reinterpret(UInt8, yl)
fillx_u32_le!(xl, buf, nx, 0)
fillx_u32_be!(xb, buf, nx, 0)
@test Float32.(yl) == xl
@test Float32.(yb) == xb
# Everything else is tested in readers
buf = getfield(BUF, :buf)
x = getfield(BUF, :int32_buf)
nx = 256
os = 3
checkbuf_8!(buf, 65536)
checkbuf_8!(buf, 4*(os + nx))
checkbuf!(x, os + nx)
# test fillx_i16_le!
y = rand(Int16, nx)
copyto!(buf, 1, reinterpret(UInt8, y), 1, 2*nx)
fillx_i16_le!(x, buf, nx, os)
@test x[1+os:nx+os] == y
# test fillx_i32_le!
y = rand(Int32, nx)
copyto!(buf, 1, reinterpret(UInt8, y), 1, 4*nx)
fillx_i32_le!(x, buf, nx, os)
@test x[1+os:nx+os] == y
fillx_i32_be!(x, buf, nx, os)
@test x[1+os:nx+os] == bswap.(y)
resize!(x, 100)
nothing
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 13057 | printstyled(" time\n", color=:light_green)
# Timestamp
t0 = time()
ts0 = String(split(string(u2d(t0)), '.')[1])
ts1 = String(split(timestamp(t0), '.')[1])
ts2 = String(split(timestamp(u2d(t0)), '.')[1])
ts3 = String(split(timestamp(string(u2d(t0))), '.')[1])
ts4 = String(split(timestamp(DateTime(string(u2d(t0)))), '.')[1])
@test ts1 == ts2 == ts3 == ts4 == ts0
# j2md
@test j2md(1, 1) == (1,1)
@test j2md(2000, 1) == (1,1)
@test j2md(2000, 60) == (2,29)
@test j2md(2000, 366) == (12,31)
@test_throws BoundsError j2md(2000, 367)
@test j2md(2012, 366) == (12,31)
@test j2md(2013, 60) == (3,1)
@test j2md(2015, 60) == (3,1)
@test j2md(2015, 365) == (12, 31)
@test j2md(2016, 61) == (3,1)
@test_throws BoundsError j2md(2013, 366)
@test_throws BoundsError j2md(2015, 366)
@test j2md(2100, 60) == (3,1)
# md2j
@test md2j(2000,1,1) == 1
@test md2j(2000,2,29) == 60
@test md2j(2013,3,1) == 60
@test md2j(2100,3,1) == 60
@test md2j(2001,1,1) == 1
@test 60 == md2j(2015,3,1)
@test 61 == md2j(2016,3,1)
@test 365 == md2j(2015,12,31)
@test 365 == md2j(1900,12,31)
# parsetimewin
t1 = t0 + 86400.0
dt0 = u2d(t0)
dt1 = u2d(t1)
st0 = string(dt0)
st1 = string(dt1)
d0, d1 = parsetimewin(dt0, dt1); @test d1 > d0 # dt, dt
d0, d1 = parsetimewin(dt0, t1); @test d1 > d0 # dt, r
d0, d1 = parsetimewin(dt0, st1); @test d1 > d0 # dt, s
d0, d1 = parsetimewin(t0, dt1); @test d1 > d0 # r, dt
d0, d1 = parsetimewin(t0, t1); @test d1 > d0 # r, r
d0, d1 = parsetimewin(t0, st1); @test d1 > d0 # r, s
d0, d1 = parsetimewin(st0, dt1); @test d1 > d0 # s, dt
d0, d1 = parsetimewin(st0, t1); @test d1 > d0 # s, r
d0, d1 = parsetimewin(st0, st1); @test d1 > d0 # s, s
# Checking that parsetimewin sorts correctly; type mismatch intentional
d0, d1 = parsetimewin(0, -600); @test d1 > d0
d0, d1 = parsetimewin(-600.0, 0); @test d1 > d0
d0, d1 = parsetimewin(600, 0.0); @test d1 > d0
d0, d1 = parsetimewin(0.0, 600.0); @test d1 > d0
@test ≈(600000, (DateTime(d1)-DateTime(d0)).value)
d0, d1 = parsetimewin("2016-02-29T23:30:00", "2016-03-01T00:30:00")
@test ≈(3600000, (DateTime(d1)-DateTime(d0)).value)
t = DateTime(now())
s = t-Hour(2)
d0, d1 = parsetimewin(s, t)
@test ≈(7200000, (DateTime(d1)-DateTime(d0)).value)
# t_collapse, t_expand
ts = 1451606400000000
T = Int64[1 ts; 100001 30000000; 250001 12330000; 352303 99000000; 360001 0]
fs = 100.0
Δ = round(Int64, sμ/fs)
t_long = t_expand(T, fs)
@test ≈(T, t_collapse(t_long, fs))
T1 = hcat(cumsum(ones(Int64,size(T,1))), cumsum(T[:,2]))
fs1 = 0.0
@test ≈(T1, t_collapse(t_expand(T1, fs1), fs1))
# starttime
printstyled(stdout, " starttime\n", color=:light_green)
@test starttime(T, fs) == first(t_long)
@test starttime(T, Δ) == starttime(T, fs)
@test starttime(Array{Int64,2}(undef, 0, 0), Δ) == 0
@test starttime(Array{Int64,2}(undef, 0, 0), 100Δ) == 0
@test starttime(Array{Int64,2}(undef, 0, 0), fs) == 0
# why starttime exists: it handles segments that aren't in chronological order
printstyled(stdout, " non-chronological\n", color=:light_green)
Tnco = Int64[1 ts; 1000 -2000Δ; 120000 0]
t_long = t_expand(Tnco, fs);
@test minimum(t_long) != first(t_long)
@test starttime(Tnco, fs) == starttime(Tnco, Δ) == ts - 1001Δ == minimum(t_expand(Tnco, fs))
# endtime
printstyled(stdout, " endtime\n", color=:light_green)
T = Int64[1 ts; 100001 30000000; 250001 12330000; 352303 99000000; 360001 0]
t_long = t_expand(T, fs)
@test endtime(T, fs) == last(t_long)
@test endtime(T, Δ) == endtime(T, fs)
@test endtime(Array{Int64,2}(undef, 0, 0), Δ) == 0
@test endtime(Array{Int64,2}(undef, 0, 0), 100Δ) == 0
@test endtime(Array{Int64,2}(undef, 0, 0), fs) == 0
# endtime must also handle segments that aren't in chronological order
printstyled(stdout, " non-chronological\n", color=:light_green)
nx = 120000
igap = 1000
tgap = 10000
Tnco = Int64[1 ts; nx-2000 tgap*Δ; nx-(igap-1) -8000Δ; nx 0]
t_long = t_expand(Tnco, fs);
@test maximum(t_long) != last(t_long)
@test endtime(Tnco, fs) == endtime(Tnco, Δ) == ts + (nx-igap)*Δ + (tgap-1)*Δ == maximum(t_long)
printstyled(stdout, " t_win, w_time\n", color=:light_green)
printstyled(stdout, " faithful representation of gaps\n", color=:light_green)
fs = 100.0
Δ = round(Int64, sμ/fs)
t = [1 0; 6 980000; 8 100000; 10 0]
# t1 = t_expand(t, fs)
# 0
# 10000
# 20000
# 30000
# 40000
# 1030000
# 1040000
# 1150000
# 1160000
# 1170000
t2 = t_win(t, Δ)
# first last
# sample sample
# time time
# 0 40000 ===> so that collect(w[i,1]:Δ:w[i,2]) for each window == t_expand
# 1030000 1040000
# 1150000 1170000
#
@test t2[1,2] == 40000
@test t2[2,:] == [1030000, 1040000]
@test t2[3,:] == [1150000, 1170000]
printstyled(stdout, " arbitrary windows containing gaps\n", color=:light_green)
t = [1 999999998990000; 101 10000; 297 0]
@test w_time(t_win(t, Δ), Δ) == t
t = [1 999999998990000; 101 10000; 297 1000000; 303 40000; 500 1000000; 10000 0]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " length-0 gap\n", color=:light_green)
t = [1 0; 6 0; 8 0; 10 0]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " negative gap\n", color=:light_green)
t = [1 0; 6 -2Δ; 8 -10Δ; 10 0]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " single-point gap\n", color=:light_green)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 0]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " non-null gap at end\n", color=:light_green)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 Δ]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " negative gap at end\n", color=:light_green)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 -5Δ]
@test w_time(t_win(t, Δ), Δ) == t
printstyled(stdout, " t_bounds\n", color=:light_green)
function test_t_bounds(t::Array{Int64, 2}, Δ::Int64)
(t0, t1) = t_bounds(t, Δ)
W = t_win(t, Δ)
@assert t0 == minimum(W)
@assert t1 == maximum(W)
return nothing
end
fs = 100.0
Δ = round(Int64, sμ/fs)
t = [1 0; 6 980000; 8 100000; 10 0]
test_t_bounds(t, Δ)
t = [1 999999998990000; 101 10000; 297 0]
test_t_bounds(t, Δ)
t = [1 999999998990000; 101 10000; 297 1000000; 303 40000; 500 1000000; 10000 0]
test_t_bounds(t, Δ)
t = [1 0; 6 0; 8 0; 10 0]
test_t_bounds(t, Δ)
t = [1 0; 6 -2Δ; 8 -10Δ; 10 0]
test_t_bounds(t, Δ)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 0]
test_t_bounds(t, Δ)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 Δ]
test_t_bounds(t, Δ)
t = [1 0; 6 2Δ; 7 4Δ; 8 Δ; 10 -5Δ]
test_t_bounds(t, Δ)
printstyled(stdout, " mktime\n", color=:light_green)
fv = 0.0005
iv = Array{Int32,1}([1980, 082, 10, 35, 39, 890])
(m,d) = j2md(iv[1], iv[2])
ts_0 = round(Int64, d2u(DateTime(iv[1], m, d, iv[3], iv[4], iv[5]))*sμ) +
iv[6]*1000 +
round(Int64,fv*1000.0)
ts_1 = Date(iv[1], m, d).instant.periods.value * 86400000000 +
div(Time(iv[3], iv[4], iv[5]).instant.value, 1000) +
iv[6]*1000 +
round(Int64,fv*1000.0) -
SeisIO.dtconst
ts_2 = mktime(iv[1], iv[2], iv[3], iv[4], iv[5], iv[6]*Int32(1000)) +
round(Int64, fv*1000.0)
iv[6] *= Int32(1000)
ts_3 = mktime(iv) + round(Int64, fv*1000.0)
@test ts_0 == ts_1 == ts_2 == ts_3
# timespec()
printstyled(stdout, " int2tstr, tstr2int\n", color=:light_green)
s = "2018-01-01T00:00:00.000001"
t = "2018-01-04T00:00:00.003900"
si = tstr2int(s)
ti = tstr2int(t)
j = loop_time(si, ti)
t = "2018-01-04T00:00:00.39"
for (n,s) in enumerate(["2018-01-01T00:00:00.000001",
"2018-01-01T00:00:00",
"2018-01-01T00:00:00.035",
"2016-02-29T00:00:00.02",
"2018-02-28T00:00:00.33"])
s_str = identity(s)
if length(s) == 19
s_str *= "."
end
s_str = rpad(s_str, 26, '0')
@test (int2tstr(tstr2int(s))) == s_str
if n < 4
@test loop_time(tstr2int(s), tstr2int(t)) == 4
@test loop_time(tstr2int(s), tstr2int(t), ti=43200000000) == 7
@test loop_time(tstr2int(s), tstr2int(t), ti=3600000000) == 73
elseif n == 4
@test loop_time(tstr2int(s), tstr2int("2016-03-02T00:00:02")) == 3
elseif n == 5
@test loop_time(tstr2int(s), tstr2int("2018-03-02T00:00:02")) == 3
end
end
printstyled(stdout, " sort_segs!\n", color=:light_green)
Δ = 20000
ts = 1583455810004000
nx = 40000
gi = 10
gl = 6
W = ts .+ Δ.*[ 0 gi-1
-gl -1
gi nx-gl-1]
W0 = deepcopy(W)
sort_segs!(W)
@test W == W0[[2,1,3], :]
t = sort_segs(w_time(W0, Δ), Δ)
@test t == w_time(W, Δ)
printstyled(stdout, " t_extend\n", color=:light_green)
printstyled(stdout, " time-series\n", color=:light_green)
nx = 6000
Δ = 10000
g = 300000
fs = 100.0
t0 = 1411776000000000
t = [1 t0; nx 0]
ts = t[1,2] + nx*Δ
t2 = [1 ts; nx 0]
# should have only 2 rows
t1 = deepcopy(t)
@test t_extend(t1, ts, nx, fs) == nothing
@test t1 == [1 t0; 2nx 0]
# should have a gap of Δ at point nx+1
@test t_extend(t, ts + Δ, nx, fs) == [1 t0; nx+1 Δ; 2nx 0]
# should correctly log the gaps at nx and nx+1
t1 = [1 t0; nx g]
@test t_extend(t1, ts+g+Δ, nx, fs) == [1 t0; nx g; nx+1 Δ; 2nx 0]
# should only extend the expected length of nx to incorporate new start time
for ts1 in ts : 100Δ : ts + nx
t1 = deepcopy(t)
# @test t_extend(t, ts1, 0, fs) == [1 t[1,2]; nx + div(ts1-endtime(t, Δ), Δ) 0]
@test t_extend(t1, ts1, 0, fs) == nothing
@test t1 == [1 t[1,2]; nx-1 + div(ts1-endtime(t, Δ), Δ) 0]
end
# should initialize a new starter time array at ts
@test t_extend(Array{Int64,2}(undef,0,2), ts, 0, fs) == [1 ts]
t = [1 12356; 1231 333; 14134 0]
ts_new = 8348134123
nx_new = 65536
dt = 20000
fs = 50.0
t2 = deepcopy(t)
t_extend(t2, ts_new, 0, dt)
t1 = t_extend(t, ts_new, nx_new, dt)
# 1 12356
# 1231 333
# 14135 8065441434
# 79670 0
@test size(t1) == (4,2)
@test t1[end,1] == 79670
@test endtime(t1, dt) == t_expand(t1, fs)[end]
@test t1[1:end-2,:] == t2[1:end-1,:]
t = [1 3301; 505 1200; 1024 3]
ts_new = 1181381433
nx_new = 3000
t2 = t_extend(t, ts_new, 1, dt)
t1 = t_extend(t, ts_new, nx_new, dt)
# 1 3301
# 505 1200
# 1024 3
# 1025 1160896929
# 4024 0
@test endtime(t1, dt) == t_expand(t1, fs)[end]
@test t1[1:end-1,:] == t2
t1 = Array{Int64,2}(undef, 0, 0)
t2 = Array{Int64,2}(undef, 0, 2)
@test t_extend(t1, ts_new, nx_new, dt) == t_extend(t2, ts_new, nx_new, dt) == [1 ts_new; nx_new 0]
@test t_extend(t1, ts_new, 0, dt) == t_extend(t2, ts_new, 0, dt) == [1 ts_new]
printstyled(stdout, " irregular\n", color=:light_green)
nx = 200
nn = 120
t1 = zeros(Int64, nx, 2)
t1[1:nx, 1] .= 1:nx
t1[1:nx, 2] .= sort(abs.(rand(1262304000000000:t0, nx)))
t2 = zeros(Int64, nn, 2)
t2[1:nn, 1] .= 1:nn
t2[1:nn, 2] .= sort(abs.(rand(t0+1:1583020800000000, nn)))
t1o = deepcopy(t1)
t2o = deepcopy(t2)
t3 = t_extend(t1, t2[1,2], nn, 0.0)
@test t1 == t1o
@test t2 == t2o
@test size(t3, 1) == nx+nn
@test t3[:, 1] == collect(1:1:nx+nn)
@test t3[nx+1, 2] == t2[1,2]
# These were in test_time_utils.jl
buf = BUF.date_buf
dstr = "2019-06-01T03:50:04.02"
dt = DateTime(dstr)
t = round(Int64, d2u(dt)*sμ)
nx = 12345
# Tests for mk_t
printstyled(" mk_t\n", color=:light_green)
C = randSeisChannel(s=true)
C.x = randn(nx)
mk_t!(C, nx, t)
@test C.t == [1 t; nx 0]
@test C.t == mk_t(nx, t)
# Tests for t_arr
printstyled(" t_arr!\n", color=:light_green)
t_arr!(buf, t)
@test buf[1:6] == Int32[2019, 152, 3, 50, 4, 20]
t = round(Int64, d2u(DateTime("2020-03-01T13:49:00.3"))*sμ)
t_arr!(buf, t)
@test buf[1:6] == Int32[2020, 61, 13, 49, 0, 300]
t = round(Int64, d2u(DateTime("2020-03-01T13:49:00.030"))*sμ)
t_arr!(buf, t)
@test buf[1:6] == Int32[2020, 61, 13, 49, 0, 30]
t = round(Int64, d2u(DateTime("2020-03-01T13:49:00.003"))*sμ)
t_arr!(buf, t)
@test buf[1:6] == Int32[2020, 61, 13, 49, 0, 3]
# Tests for x_inds
printstyled(" x_inds\n", color=:light_green)
function test_xinds(t::Array{Int64, 2})
# Check that the sets of indices match what's in :t
xi = x_inds(t)
nt = size(t, 1)
@test xi[1:nt-1,1] == t[1:nt-1,1]
# Check that the length is what we expect
@test size(xi, 1) == nt - (t[nt,2] == 0 ? 1 : 0)
# Check xi and t_win(t, dt) have the same content
for i in 1:size(xi,1)-1
@test xi[i,2] == xi[i+1,1]-1
for dt in ([250, 500, 1000, 10000, 20000, 100000])
w = t_win(t, dt)
@test xi[i,2]-xi[i,1] == div(w[i,2]-w[i,1], dt)
end
end
end
ts = 1583455810004000
for t in [[1 ts; 639000 0],
[1 ts; 639000 57425593],
[1 ts; 638999 300; 639000 57425593],
[1 ts; 638998 1234; 638999 300; 639000 57425593],
[1 ts; 638998 -1234525827; 639000 0],
[1 ts; 638998 1234; 639000 0],
[1 ts; 8 1234; 9 100000; 10 134123; 11 12; 2400 -3; 3891 3030303; 3892 -30000000; 3893 1234; 3894 57425593],
[1 ts; 3894 0],
[1 ts; 10 13412300; 11 123123123; 184447 3030303; 184448 -30000000; 184449 12300045; 184450 57425593]]
test_xinds(t)
end
for i in 1:100
t = breaking_tstruct(ts, 39000, 100.0)
test_xinds(t)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2854 | printstyled(" time test with synthetic timeseries data\n", color=:light_green)
ts = round(Int64, sμ*d2u(DateTime("2020-03-01T00:00:00")))
fs = 100.0
gain = 32.0
id = "XX.STA..BHZ"
loc = GeoLoc( lat = 45.560121,
lon = -122.617068,
el = 53.04 )
Δ = 10000
files = ["1.sac", "2.sac", "3.sac", "4.sac", "5.sac", "6.sac", "7.sac"]
tos = [ 0.0, 10.0, 20.01, 20.02, 30.0, 40.0, 60.0]
nx = [ 1000, 999, 1, 100, 1, 1000, 1000]
C = SeisChannel(id = id,
gain = gain,
loc = loc,
fs = fs )
S0 = SeisData()
printstyled(" creating files\n", color=:light_green)
for i = 1:length(files)
fname = files[i]
C.x = randn(Float32, nx[i])
C.t = [1 ts+round(Int64, tos[i]*1000000); nx[i] 0]
push!(S0, C)
writesac(C, fname=fname)
end
# Read back in
printstyled(" reading to test all channel-extension cases\n", color=:light_green)
S = SeisData()
read_data!(S, "sac", files[1])
xi = cumsum(nx)
δt = round.(Int64, tos.*sμ) .+ (nx.-1).*Δ
# Expect:
# no gaps, precise read
@test S.t[1] == [1 ts; nx[1] 0]
@test endtime(S.t[1], S.fs[1]) == Δ*(nx[1]-1)+ts
@test S0.x[1] == S.x[1]
@test S0.gain[1] == S.gain[1]
# append file 2 to create case 1
read_data!(S, "sac", files[2])
@test S.t[1] == [1 ts; xi[2] 0]
@test S0.x[2] == S.x[1][nx[1]+1:xi[2]]
# append file 3 to create case 4
read_data!(S, "sac", files[3])
@test S.t[1] == [1 ts; xi[3] 20000]
@test S0.x[3] == S.x[1][xi[2]+1:xi[3]]
# append file 4 to create case 2
read_data!(S, "sac", files[4])
@test S.t[1] == [1 ts; xi[3] 20000; xi[4] 0]
@test S0.x[4] == S.x[1][xi[3]+1:xi[4]]
# append file 5 to create case 3
δt0 = round(Int64, sμ*(tos[5]-tos[4]-nx[4]/fs)) # I canceled a +Δ and a -Δ
read_data!(S, "sac", files[5])
@test S.t[1] == [1 ts; xi[3] 20000; xi[5] δt0]
@test S0.x[5] == S.x[1][xi[4]+1:xi[5]]
# append file 6 to create case 6
t_old = copy(S.t[1])
δt0 = round(Int64, sμ*(tos[6]-tos[5]-nx[5]/fs)) # I canceled a +Δ and a -Δ
read_data!(S, "sac", files[6])
@test S.t[1] == vcat(t_old, [xi[5]+1 δt0; xi[6] 0])
@test S0.x[6] == S.x[1][xi[5]+1:xi[6]]
# append file 7 to create case 5
t_old = copy(S.t[1])
read_data!(S, "sac", files[7])
δt0 = round(Int64, sμ*(tos[7]-tos[6]-nx[7]/fs))
@test S.t[1] == vcat(t_old[1:end-1,1:2], [t_old[end,1]+1 δt0], [xi[7] 0])
# finally, did we get it all right?
printstyled(" final checks\n", color=:light_green)
T = t_collapse(vcat([t_expand(i, fs) for i in S0.t]...), fs)
X = vcat([i for i in S0.x]...)
@test T == S.t[1]
@test X == S.x[1]
@test gain == S.gain[1]
@test fs == S.fs[1]
@test id == S.id[1]
@test ≈(loc.lat, S.loc[1].lat, rtol=2eps(Float32))
@test ≈(loc.lon, S.loc[1].lon, rtol=2eps(Float32))
@test ≈(loc.el, S.loc[1].el, rtol=2eps(Float32))
printstyled(" cleanup\n", color=:light_green)
for i in files
safe_rm(i)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 239 | printstyled(" code2typ, typ2code\n", color=:light_green)
for c = 0x00:0xfe
d = (try
typ2code(code2typ(c))
catch
0xff
end)
if d != 0xff
@test c == d
end
end
@test_throws ErrorException code2typ(0x02)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 14854 | printstyled(" ASDF\n", color=:light_green)
printstyled(" read_hdf5\n", color=:light_green)
hdf = path*"/SampleFiles/HDF5/2days-40hz.h5"
hdf_pat = path*"/SampleFiles/HDF5/2days-40hz.h*"
hdf_out1 = "test1.h5"
hdf_out2 = "test2.h5"
hdf_out3 = "test3.h5"
hdf_out4 = "test4.h5"
hdf_evt = path*"/SampleFiles/HDF5/example.h5"
safe_rm(hdf_out1)
safe_rm(hdf_out2)
safe_rm(hdf_out3)
safe_rm(hdf_out4)
id = "CI.SDD..HHZ"
idr = "C*.SDD..HH?"
ts = "2019-07-07T23:00:00"
te = "2019-07-08T02:00:00"
# id_to_regex
@test id_to_regex("*.*.*.*") == r".*\.*\.*\.*"
@test id_to_regex("C*.SDD..") == r"C.*\.SDD\.\."
@test id_to_regex("CI.SDD..HHZ") == r"CI\.SDD\.\.HHZ"
@test id_to_regex("C*.SDD..HH?") == r"C.*\.SDD\.\.HH."
# id_match
S2 = SeisData(SeisChannel(), SeisChannel(id="CI.SDD..HHZ"))
@test id_match(id, S2) == id_match(idr, S2) == [2]
S1 = SeisData()
read_hdf5!(S1, hdf, ts, te, id = id)
S2 = read_hdf5(hdf, ts, te, id = id)
@test S1 == S2
S2 = read_asdf(hdf, id, ts, te, true, 0)
@test S1.src[1] == abspath(hdf)
S1.src = S2.src
@test S1 == S2
# check file wildcards
S2 = read_hdf5(hdf_pat, DateTime(ts), DateTime(te), id = id)
S1.src = S2.src
@test S1 == S2
# check the default id
S2 = read_hdf5(hdf_pat, ts, te)
S1.src = S2.src
@test S1 == S2
# check that FDSN-style wildcards work
S2 = read_asdf(hdf, idr, ts, te, true, 0)
S1.src = S2.src
@test S1 == S2
# Check channel matching
S2 = SeisData(SeisChannel(id="CI.SDD..HHZ"))
read_asdf!(S2, hdf, idr, ts, te, true, 0)
S1.src = S2.src
@test S1 == S2
S2 = SeisData(SeisChannel(id="CI.SDD..HHZ",
fs=40.0,
t=[1 1562543940000000; 40 0],
x=randn(40)))
read_asdf!(S2, hdf, idr, ts, te, true, 0)
@test S1.x[1] == S2.x[1][41:end]
@test string(u2d(S2.t[1][1,2]*μs)) == "2019-07-07T23:59:00"
@test_throws ErrorException read_hdf5(hdf, ts, te, fmt="MatlabLol")
printstyled(" scan_hdf5\n", color=:light_green)
@test scan_hdf5(hdf) == ["CI.SDD"]
@test scan_hdf5(hdf, level="trace") == ["/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_", "/Waveforms/CI.SDD/StationXML"]
@test_throws ErrorException scan_hdf5(hdf, level="response")
@test_throws ErrorException scan_hdf5(hdf, fmt="MatlabLol")
# HDF event read
printstyled(" asdf_rqml\n", color=:light_green)
(H,R) = asdf_rqml(hdf_evt)
@test H[1].id == "20120404_0000041"
@test H[2].id == "20120404_0000038"
@test H[3].id == "20120404_0000039"
@test H[1].loc.lat ≈ 41.818
@test H[1].loc.lon ≈ 79.689
@test H[1].loc.dep ≈ 1.0
@test H[1].mag.val ≈ 4.4
@test H[1].mag.scale == "mb"
@test H[2].loc.lat ≈ 39.342
@test H[2].loc.dep ≈ 14.4
@test H[2].mag.val ≈ 4.3
@test H[2].mag.scale == "ML"
io = h5open(hdf_evt)
(H1, R1) = asdf_rqml(io)
@test H == H1
@test R == R1
# =========================================================
# HDF write
printstyled(" write_hdf5\n", color=:light_green)
@test_throws ErrorException write_hdf5(hdf_out1, S1, fmt="MatlabLol")
S2 = deepcopy(S1) + SeisChannel(id="XX.SDD..YYZ", fs=40.0)
write_hdf5(hdf_out1, S2)
safe_rm(hdf_out1)
# ASDF write test 1: can we write to a new file?
printstyled(" write to new file\n", color=:light_green)
write_hdf5( hdf_out1, S1 )
S2 = read_hdf5(hdf_out1, ts, te, id = id)
for f in datafields
(f in (:src, :notes)) && continue
@test isequal(getfield(S1, f), getfield(S2, f))
end
# ASDF write test 2: can we overwrite parts of an existing file?
printstyled(" add to existing file\n", color=:light_green)
ts = "2019-07-08T00:00:00"
te = "2019-07-08T02:00:00"
S3 = read_hdf5(hdf_out1, ts, te, id = id)
for i in 1:S3.n
S3.x[i] .= (2.0.*S3.x[i])
end
redirect_stdout(out) do
write_hdf5( hdf_out1, S3, ovr=true, v=3 )
push!(S3, SeisChannel(id="YY.ZZTOP.00.LEG", fs=50.0, x=randn(1024)))
# This should now fail since :x[2] has no :t[2]
# @test_throws ArgumentError write_hdf5( hdf_out1, S3, v=3 )
# This should fail
@test_throws ErrorException write_hdf5( hdf_out1, S3, ovr=true, v=3 )
# GphysChannel extension
C = S3[1]
write_hdf5( hdf_out1, C, v=3 )
safe_rm(hdf_out1)
deleteat!(S3, 2)
# Force write to channel with existing net.sta
write_hdf5(hdf_out1, S3)
ts = "2019-07-08T10:00:00"
te = "2019-07-08T12:00:00"
S4 = read_hdf5(hdf, ts, te)
write_hdf5(hdf_out1, S4, ovr=true, add=true)
end
# There should be 4 successful writes at this point
@test sum([occursin("wrote to file " * hdf_out1, S3.notes[1][i]) for i in 1:length(S3.notes[1])]) == 4
@test scan_hdf5(hdf_out1, level="trace") == [
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T00:00:00__2019-07-08T02:00:00__hhz",
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T00:00:00__2019-07-08T23:59:59.975__hhz",
"/Waveforms/CI.SDD/StationXML"
]
safe_rm(hdf_out1)
# ASDF write test 3: write with gaps
printstyled(" write to new file with gaps\n", color=:light_green)
ts = "2019-07-08T10:00:00"
te = "2019-07-08T12:00:00"
read_hdf5!(S3, hdf, ts, te)
merge!(S3)
@test S3.n == 1 # fails if MultiStageResp merge! bug is back
write_hdf5( hdf_out1, S3 )
scan3 = scan_hdf5(hdf_out1, level="trace")
@test scan3 == [
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T00:00:00__2019-07-08T02:00:00__hhz",
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T10:00:00__2019-07-08T12:00:00__hhz",
"/Waveforms/CI.SDD/StationXML"
]
# ASDF write test 4: file with multiple stations and channels
if has_restricted
S = verified_read_data("sac", path*"/SampleFiles/Restricted/20140927000000*SAC")
redirect_stdout(out) do
write_hdf5( hdf_out1, S, v=3 )
end
id = S.id[3]
ts = "2014-09-27T09:00:00"
# Test for intended read behavior
printstyled(" are written traces the right length?\n", color=:light_green)
te = "2014-09-27T10:00:00"
C = read_hdf5(hdf_out1, ts, te, id = id)[1]
@test length(C.x) == 360000 # Stop at last available sample
te = "2014-09-27T09:59:59.99"
C = read_hdf5(hdf_out1, ts, te, id = id)[1]
@test length(C.x) == 360000 # Exact
ts = "2014-09-27T09:00:00.01"
te = "2014-09-27T09:59:59.90"
C2 = read_hdf5(hdf_out1, ts, te, id = id)[1]
@test length(C2.x) == 359990 # Exact, ten samples shorter
@test C.x[2:5] == C2.x[1:4]
# test 1: are the right trace names created?
printstyled(" are the right trace names created?\n", color=:light_green)
S1 = SeisData()
read_hdf5!(S1, hdf, "2019-07-07T23:00:00", "2019-07-08T02:00:00", id = "CI.SDD..HHZ")
append!(S, S1)
safe_rm(hdf_out1)
redirect_stdout(out) do
write_hdf5(hdf_out1, S, add=true, ovr=true, v=3, tag="raw")
end
@test scan_hdf5(hdf_out1, level="trace") == [
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-07T23:59:59.975__raw",
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T00:00:00__2019-07-08T23:59:59.975__raw",
"/Waveforms/CI.SDD/StationXML",
"/Waveforms/JP.VONTA/JP.VONTA..E__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTA/JP.VONTA..H__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTA/JP.VONTA..N__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTA/JP.VONTA..U__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTA/StationXML",
"/Waveforms/JP.VONTN/JP.VONTN..E__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTN/JP.VONTN..H__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTN/JP.VONTN..N__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTN/JP.VONTN..U__2014-09-27T00:00:00__2014-09-27T23:59:59.99__raw",
"/Waveforms/JP.VONTN/StationXML"
]
# test 2: ONLY write CI.SDD..HHZ, JP.VONTA..H, JP.VONTA..N, JP.VONTA..U
printstyled(" write with a channel sublist\n", color=:light_green)
redirect_stdout(out) do
write_hdf5(hdf_out2, S, chans=[2,3,4,9], add=true, ovr=true, v=3)
end
@test scan_hdf5(hdf_out2, level="trace") == [
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-07T23:59:59.975__hhz",
"/Waveforms/CI.SDD/CI.SDD..HHZ__2019-07-08T00:00:00__2019-07-08T23:59:59.975__hhz",
"/Waveforms/CI.SDD/StationXML",
"/Waveforms/JP.VONTA/JP.VONTA..H__2014-09-27T00:00:00__2014-09-27T23:59:59.99__h",
"/Waveforms/JP.VONTA/JP.VONTA..N__2014-09-27T00:00:00__2014-09-27T23:59:59.99__n",
"/Waveforms/JP.VONTA/JP.VONTA..U__2014-09-27T00:00:00__2014-09-27T23:59:59.99__u",
"/Waveforms/JP.VONTA/StationXML"
]
# ensure the right trace data were written to these names
S1 = read_hdf5(hdf_out2, "2014-09-27T09:00:00", "2014-09-27T09:59:59.99", id="*.VONTA..*", msr=false)
S2 = S[[2,3,4]]
for f in (:id, :loc, :fs, :gain, :resp, :t, :x)
@test getfield(S1,f) == getfield(S2,f)
end
# ensure what's read back in are the correct traces with NaNs in the right places
printstyled(" trace indexing with ovr=true\n", color=:light_green)
S1 = read_hdf5(hdf_out2, "2014-09-27T08:00:00.00", "2014-09-27T10:00:00.00", id="*.VONTA..*")
for i in 1:S1.n
x = S1.x[i]
@test length(x) == 720001
@test isnan(last(x))
@test eltype(x) == Float32
for j in 1:360000
@test isnan(x[j])
end
end
# only an hour of non-NaN data; 24 hours of data created
S1 = read_hdf5(hdf_out2, "2014-09-27T00:00:00.00", "2014-09-28T00:00:00.00", id="JP.VONTA..H")
@test length(S1.x[1]) == 8640000
@test length(S1.x[1])-length(findall(isnan.(S1.x[1]))) == 360000
# data from CI.SDD should have no NaNs in this range; it's the exact range of S.x[9]
S1 = read_hdf5(hdf_out2, "2019-07-07T23:00:00", "2019-07-08T02:00:00")
@test S1.x[1] == S.x[9]
end
# HDF write with SeisEvent
Ev = Array{SeisEvent,1}(undef,3)
for i in 1:3
Ev[i] = rse_wb(3)
end
printstyled(" asdf_wqml\n", color=:light_green)
printstyled(" append mode\n", color=:light_green)
SHDR = [Ev[i].hdr for i in 1:3]
SSRC = [Ev[i].source for i in 1:3]
asdf_wqml(hdf_out2, Ev[1].hdr, Ev[1].source)
asdf_wqml(hdf_out2, Ev[2])
asdf_wqml(hdf_out2, Ev)
asdf_wqml(hdf_out2, SHDR, SSRC)
(H,R) = asdf_rqml(hdf_out2)
@test length(H) == length(R) == 8
# Check that data are appended in the right order
inds = [1,2,1,2,3,1,2,3]
for i in 1:length(H)
j = inds[i]
compare_SeisHdr(Ev[j].hdr, H[i])
compare_SeisSrc(Ev[j].source, R[i])
end
printstyled(" overwrite mode\n", color=:light_green)
asdf_wqml(hdf_out2, Ev[2].hdr, Ev[2].source, ovr=true, v=1)
(H,R) = asdf_rqml(hdf_out2)
@test length(H) == length(R) == 1
compare_SeisHdr(Ev[2].hdr, H[1])
compare_SeisSrc(Ev[2].source, R[1])
printstyled(" to new file\n", color=:light_green)
asdf_wqml(hdf_out4, SHDR, SSRC)
(H,R) = asdf_rqml(hdf_out4)
for i in 1:3
compare_SeisHdr(SHDR[i], H[i])
compare_SeisSrc(SSRC[i], R[i])
end
printstyled(" append file with incompatible QML\n", color=:light_green)
# Create incompatible QML in file
xml_evfile1 = path*"/SampleFiles/XML/fdsnws-event_2017-01-12T03-18-55Z.xml"
xml_buf = read(xml_evfile1)
io = h5open(hdf_out4, "r+")
o_delete(io, "QuakeML")
io["QuakeML"] = xml_buf
close(io)
# Now try to write to the file...does it work?
asdf_wqml(hdf_out4, SHDR, SSRC)
(H,R) = asdf_rqml(hdf_out4)
L = length(H)
H = H[L-2:L]
R = R[L-2:L]
for i in 1:3
compare_SeisHdr(SHDR[i], H[i])
compare_SeisSrc(SSRC[i], R[i])
end
printstyled(" write SeisEvent\n", color=:light_green)
printstyled(" to new file\n", color=:light_green)
write_hdf5(hdf_out3, Ev[1])
printstyled(" to existing file\n", color=:light_green)
@test_throws ErrorException write_hdf5(hdf_out3, Ev[2], fmt="GarfieldTheCat")
write_hdf5(hdf_out3, Ev[2], chans=[1,2])
printstyled(" to appended file\n", color=:light_green)
push!(Ev[3].data, EventChannel(id="AA.STA.00.YYY"))
write_hdf5(hdf_out3, Ev[3], chans=[3,4])
printstyled(" read_asdf_evt\n", color=:light_green)
EvCat = read_asdf_evt(hdf_out3, Ev[1].hdr.id, msr=false)
printstyled(" accuracy of SeisEvent i/o\n", color=:light_green)
printstyled(" single-event read\n", color=:light_green)
W = EvCat[1]
Ev1 = deepcopy(Ev[1])
compare_events(Ev1, W)
EvCat = read_asdf_evt(hdf_out3, Ev[2].hdr.id, msr=false)
W = EvCat[1]
Ev2 = deepcopy(Ev[2])
Ev2.data = Ev2.data[1:2]
compare_events(Ev2, W)
EvCat = read_asdf_evt(hdf_out3, Ev[3].hdr.id, msr=false)
W = EvCat[1]
Ev3 = deepcopy(Ev[3])
Ev3.data = Ev3.data[3]
compare_events(Ev3, W)
printstyled(" multi-event read\n", color=:light_green)
EvCat = read_asdf_evt(hdf_out3, msr=false)
compare_events(EvCat[1], Ev1)
compare_events(EvCat[2], Ev2)
compare_events(EvCat[3], Ev3)
if Sys.iswindows() == false
printstyled(" multi-file read\n", color=:light_green)
write_hdf5(hdf_out4, Ev1)
# Check logging
@test sum([occursin("wrote to file ", Ev1.data.notes[1][i]) for i in 1:length(Ev1.data.notes[1])]) == 2
@test any([occursin("wrote to file " * hdf_out3, Ev1.data.notes[1][i]) for i in 1:length(Ev1.data.notes[1])])
@test any([occursin("wrote to file " * hdf_out4, Ev1.data.notes[1][i]) for i in 1:length(Ev1.data.notes[1])])
@test any([occursin("wrote to file " * hdf_out3, Ev1.hdr.notes[i]) for i in 1:length(Ev1.hdr.notes)])
@test any([occursin("wrote to file " * hdf_out4, Ev1.hdr.notes[i]) for i in 1:length(Ev1.hdr.notes)])
@test any([occursin("wrote to file " * hdf_out3, Ev1.source.notes[i]) for i in 1:length(Ev1.source.notes)])
@test any([occursin("wrote to file " * hdf_out4, Ev1.source.notes[i]) for i in 1:length(Ev1.source.notes)])
EC2 = read_asdf_evt("test[3-4].h5", msr=true)
inds = [1, 2, 3, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 1]
for i in 1:length(EC2)
j = inds[i]
if j > 0
compare_SeisHdr(EC2[i].hdr, EvCat[j].hdr)
compare_SeisSrc(EC2[i].source, EvCat[j].source)
end
end
end
# HDF write cleanup
safe_rm(hdf_out1)
safe_rm(hdf_out2)
safe_rm(hdf_out3)
safe_rm(hdf_out4)
# asdf_waux
# write to new file
X = randn(32768, 2)
pstr = "CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_"
asdf_waux(hdf_out1, pstr, X)
# does overwrite work?
asdf_waux(hdf_out1, pstr, X)
f = h5open(hdf_out1, "r")
aux = f["AuxiliaryData"]
@test names(aux) == [ "CI.SDD" ]
Xr = read(aux[pstr])
@test Xr == X
close(f)
# can we correct for a starting / in the string?
pstr = "//" * pstr
X = randn(32768, 2)
asdf_waux(hdf_out1, pstr, X)
f = h5open(hdf_out1, "r")
aux = f["AuxiliaryData"]
@test names(aux) == [ "CI.SDD" ]
pstr = "CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_"
@test names(aux["CI.SDD"]) == [ String(split(pstr,"/")[2]) ]
Yr = read(aux[pstr])
@test Yr == X
@test Xr != Yr
close(f)
# what about path that starts with /AuxiliaryData/?
pstr = "/AuxiliaryData/" * pstr
X = randn(32768, 2)
asdf_waux(hdf_out1, pstr, X)
f = h5open(hdf_out1, "r")
aux = f["AuxiliaryData"]
@test names(aux) == [ "CI.SDD" ]
pstr = "CI.SDD/CI.SDD..HHZ__2019-07-07T00:00:00__2019-07-09T00:00:00__hhz_"
@test names(aux["CI.SDD"]) == [ String(split(pstr,"/")[2]) ]
Yr = read(aux[pstr])
@test Yr == X
@test Xr != Yr
close(f)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4838 | printstyled(" AH (Ad Hoc)\n", color=:light_green)
ah1_file = path*"/SampleFiles/AH/ah1.f"
ah1_fstr = path*"/SampleFiles/AH/ah1.*"
ahc_file = path*"/SampleFiles/AH/lhz.ah"
ah_resp = path*"/SampleFiles/AH/BRV.TSG.DS.lE21.resp"
ah2_file = path*"/SampleFiles/AH/ah2.f"
ah2_fstr = path*"/SampleFiles/AH/ah2.*"
printstyled(" v1\n", color=:light_green)
redirect_stdout(out) do
S = verified_read_data("ah1", ah1_file, v=3)
S = verified_read_data("ah1", ah1_file, full=true)
S = verified_read_data("ah1", ah1_fstr, full=true, v=3, strict=false)
@test S.n == 3
S = read_data("ah1", ah1_fstr, full=true, v=3, strict=true)
@test S.n == 4
@test S.fs[1] == 4.0
@test isapprox(S.gain[1], 64200.121094)
@test isapprox(S.loc[1].lat, 35.599899)
@test isapprox(S.loc[1].lon, -85.568802)
@test isapprox(S.loc[1].el, 481.0)
@test any([occursin("gdsn_tape",s) for s in S.notes[1]])
@test any([occursin("demeaned",s) for s in S.notes[1]])
@test length(S.resp[1].p) == 24
@test length(S.resp[1].z) == 7
@test u2d(S.t[1][1,2]*μs) == DateTime("1984-04-20T06:42:00.12")
@test length(S.x[1]) == 720
@test eltype(S.x[1]) == Float32
@test isapprox(S.x[1][1:4], [-731.41247559, -724.41247559, -622.41247559, -470.4125061])
C = verified_read_data("ah1", ahc_file, v=1, full=true)[1]
# Station
@test isapprox(C.loc.lat, 36.5416984)
@test isapprox(C.loc.lon, 138.2088928)
@test isapprox(C.loc.el, 422.0)
@test isapprox(C.gain, 1.178e8)
@test length(C.resp.p) == 10
@test isapprox(C.resp.p[1], -0.0123f0 + 0.0123f0im)
@test isapprox(C.resp.p[2], -0.0123f0 - 0.0123f0im)
@test length(C.resp.z) == 3
# Data
@test length(C.x) == 1079
@test eltype(C.x) == Float32
@test C.fs == 1.0
@test u2d(C.t[1,2]*μs) == DateTime("1990-05-12T04:49:54.49")
# Event
@test isapprox(C.misc["ev_lat"], 49.037)
@test isapprox(C.misc["ev_lon"], 141.847)
@test isapprox(C.misc["ev_dep"], 606.0)
@test string(u2d(C.misc["ot"]*μs)) == "1990-05-12T04:50:08.7"
@test startswith(C.misc["data_comment"], "Streckeisen STS-1V/VBB Seismometer")
@test startswith(C.misc["event_comment"], "null")
C = verified_read_data("ah1", ah_resp, full=true, vl=true)[1]
@test isapprox(C.loc.lat, 53.058060)
@test isapprox(C.loc.lon, 70.282799)
@test isapprox(C.loc.el, 300.0)
@test isapprox(C.gain, 0.05)
@test isapprox(C.resp.a0, 40.009960)
@test length(C.resp.p) == 7
@test isapprox(C.resp.p[1], -0.1342653f0 + 0.1168836f0im)
@test length(C.resp.z) == 4
@test startswith(C.misc["data_comment"], "DS response in counts/nm")
@test startswith(C.misc["event_comment"], "Calibration_for_hg_TSG")
@test any([occursin("brv2ah: ahtedit",s) for s in C.notes])
@test any([occursin("demeaned",s) for s in C.notes])
@test any([occursin("modhead",s) for s in C.notes])
@test any([occursin("ahtedit",s) for s in C.notes])
end
printstyled(" append existing channel\n", color=:light_green)
test_chan_ext(ah1_file, "ah1", "nu.RSN..IPZ", 4.0, 1, 451291110190001)
test_chan_ext(ah1_file, "ah1", "nu.RSC..IPZ", 4.0, 1, 451291320120000)
printstyled(" v2\n", color=:light_green)
redirect_stdout(out) do
S = verified_read_data("ah2", ah2_file, v=3)
S = verified_read_data("ah2", ah2_file, v=3, full=true)
S = verified_read_data("ah2", ah2_fstr, v=3, full=true, vl=true, strict=false)
@test S.n == 1
S = read_data("ah2", ah2_fstr, v=3, full=true, strict=true, vl=true)
@test S.n == 4
@test S.fs[1] == 4.0
@test isapprox(S.gain[1], 64200.121094)
@test isapprox(S.loc[1].lat, 35.599899)
@test isapprox(S.loc[1].lon, -85.568802)
@test isapprox(S.loc[1].el, 481.0)
@test any([occursin("gdsn_tape",s) for s in S.notes[1]])
@test any([occursin("demeaned",s) for s in S.notes[1]])
@test length(S.resp[1].p) == 24
@test length(S.resp[1].z) == 7
@test u2d(S.t[1][1,2]*μs) == DateTime("1984-04-20T06:42:00.12")
@test length(S.x[1]) == 720
@test eltype(S.x[1]) == Float32
@test isapprox(S.x[1][1:4], [-731.41247559, -724.41247559, -622.41247559, -470.4125061])
end
printstyled(" append existing channel\n", color=:light_green)
test_chan_ext(ah2_file, "ah2", "nu.RS..IP", 4.0, 1, 451291320120000)
printstyled(" custom user attributes\n", color=:light_green)
tmp_ah = "tmp.ah"
io = open(ah2_file, "r")
buf = read(io, 10636)
skip(io, 4)
append!(buf, reinterpret(UInt8, [bswap(Int32(1))]))
append!(buf, reinterpret(UInt8, [bswap(Int32(6))]))
append!(buf, codeunits("PEBKAC\0\0"))
append!(buf, reinterpret(UInt8, [bswap(Int32(41))]))
append!(buf, codeunits("problem exists between keyboard and chair\0\0\0"))
append!(buf, read(io))
close(io)
ah_out = open(tmp_ah, "w")
write(ah_out, buf)
close(ah_out)
S = read_data("ah2", tmp_ah, strict=true)
@test haskey(S.misc[4], "PEBKAC")
@test S.misc[4]["PEBKAC"] == "problem exists between keyboard and chair"
safe_rm(tmp_ah)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2523 | lenn_file = string(path, "/SampleFiles/ASCII/0215162000.c00")
geocsv_file = string(path, "/SampleFiles/ASCII/geo-tspair.csv")
slist_file = string(path, "/SampleFiles/ASCII/2m-62.5hz.slist")
lenn_pat = string(path, "/SampleFiles/ASCII/021516*c00")
geocsv_pat = string(path, "/SampleFiles/ASCII/geo-tspair.*")
slist_pat = string(path, "/SampleFiles/ASCII/*.slist")
geoslist_f = string(path, "/SampleFiles/ASCII/geo-slist.csv")
printstyled(" Lennartz ASCII\n", color=:light_green)
C = verified_read_data("lennartz", lenn_file, vl=true)[1]
@test ≈(C.fs, 62.5)
printstyled(" wildcard support\n", color=:light_green)
S = verified_read_data("lennartz", lenn_pat)
printstyled(" GeoCSV timeseries\n", color=:light_green)
S = verified_read_data("geocsv", geocsv_file, vl=true)
@test S.n == 8
if Sys.iswindows()
println(" IDs = ", join(S.id, ","))
end
i = findid("CC.JRO..BHZ", S.id)
if i > 0
@test ≈(S.fs[i], 50.0)
@test ==(S.loc[i], GeoLoc(lat=46.275269, lon=-122.218262, el=1219.0, inc=180.0))
end
printstyled(" GeoCSV slist\n", color=:light_green)
S = verified_read_data("geocsv.slist", geoslist_f)
printstyled(" wildcard support\n", color=:light_green)
S = verified_read_data("geocsv", geocsv_pat)
printstyled(" slist\n", color=:light_green)
S = verified_read_data("slist", slist_file, vl=true)
nx = length(S.x[1])
@test S.id[1] == "YY.ERTA..EHZ"
@test ≈(S.fs[1], 62.5)
@test isapprox(C.x[1:nx], S.x[1])
printstyled(" empty channel + check_for_gap!\n", color=:light_green)
S.t[1] = Array{Int64}(undef, 0, 2)
S.x[1] = Float32[]
verified_read_data!(S, "slist", slist_file)
@test S.id[1] == "YY.ERTA..EHZ"
@test ≈(S.fs[1], 62.5)
@test C.t[1,2] == S.t[1][1,2]
@test isapprox(C.x[1:nx], S.x[1])
printstyled(" wildcard support\n", color=:light_green)
S = verified_read_data("slist", slist_pat)
@test S.id[1] == "YY.ERTA..EHZ"
@test ≈(S.fs[1], 62.5)
@test isapprox(C.x[1:nx], S.x[1])
printstyled(" channel continuation (Issue 34)\n", color=:light_green)
printstyled(" GeoCSV.tspair\n", color=:light_green)
test_chan_ext(geocsv_file, "geocsv.tspair", "CC.JRO..BHZ", 50.0, 1, 1554777720010000)
printstyled(" GeoCSV.slist\n", color=:light_green)
test_chan_ext(geoslist_f, "geocsv.slist", "IU.ANMO.00.LHZ", 1.0, 3, 1551249000000000)
printstyled(" slist\n", color=:light_green)
test_chan_ext(slist_file, "slist", "YY.ERTA..EHZ", 62.5, 1, 1013790000000000)
printstyled(" lennartz\n", color=:light_green)
test_chan_ext(lenn_file, "lennartz", ".ERTA..c00", 62.5, 1, 1013790000000000)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.