licenses
sequencelengths 1
3
| version
stringclasses 677
values | tree_hash
stringlengths 40
40
| path
stringclasses 1
value | type
stringclasses 2
values | size
stringlengths 2
8
| text
stringlengths 25
67.1M
| package_name
stringlengths 2
41
| repo
stringlengths 33
86
|
---|---|---|---|---|---|---|---|---|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 742 | bstr = [path * "/SampleFiles/Bottle/B024*",
path * "/SampleFiles/Bottle/B2021901700*",
path * "/SampleFiles/Bottle/B203*",
path * "/SampleFiles/Bottle/B20319115Rainfallmm"]
printstyled(" Bottle (UNAVCO strain data)\n", color=:light_green)
for i = 1:4
S = verified_read_data("bottle", bstr[i], nx_new=14400, nx_add=14400, vl=true)
fill_pbo!(S)
if i == 4
@test S.name[1] == "quarry203bwa2007"
@test isapprox(S.fs[1], 1.0/(30*60))
@test isapprox(S.loc[1].el, 814.4)
@test S.units[1] == "mm"
end
end
printstyled(" source logging\n", color=:light_green)
redirect_stdout(out) do
S = SeisIO.read_bottle(bstr[4], 14400, 14400, true, true, 0)
show_src(S, 1)
show_src(S[1])
show_src(S)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1066 | import SeisIO.Formats: formats, FmtVer, FormatDesc, HistVec
printstyled(" formats()\n", color=:light_green)
Fake_fmt = FormatDesc(
"Fake data format",
"\"goat-c\"",
"Two guys running a startup from a taco truck, Mountain View, California, USA",
"https://www.youtube.com/watch?v=TMZi25Pq3T8",
"[email protected]",
HistVec(),
[ "much like SEG Y, everything is in XML for no reason",
"still a better idea than SEED",
"abandoned 2004-01-14. (RIP)"],
[ "Mountain View, CA",
"Christmas Island"],
["https://lmgtfy.com/?q=goatse"],
0xff
)
Fake_fmt.ver = [ FmtVer("1.0", "1999-01-01", false); FmtVer() ]
formats["Fake"] = Fake_fmt
@test formats["Fake"].docs == ["https://lmgtfy.com/?q=goatse"]
@test formats["Fake"].status == 0xff
delete!(formats, "Fake")
Fake_fmt_2 = FormatDesc()
formats["Fake2"] = Fake_fmt_2
@test formats["Fake2"].docs == []
@test formats["Fake2"].status == 0x00
delete!(formats, "Fake2")
redirect_stdout(out) do
show(formats["ah1"])
show(formats["sac"])
show(formats["suds"])
show(formats["uw"])
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5669 | # The file test.mseed comes from an older IRIS libmseed, found by anowacki
# It has a more complicated structure than the test.mseed file in more recent
# versions of libmseed, which reads with no issues
printstyled(" mini-SEED\n", color=:light_green)
printstyled(" sample rate\n", color=:light_green)
# Tests from SEED manual v2.4 page 110
import SeisIO.SEED.update_dt!
r1 = [33, 330, 3306, -60, 1, -10, -1]
r2 = [10, 1, -10, 1, -10, 1, -10]
fs = [330.0, 330.0, 330.6, 1.0/60.0, 0.1, 0.1, 0.1]
for i = 1:length(r1)
BUF.r1 = r1[i]
BUF.r2 = r2[i]
update_dt!(BUF)
@test isapprox(1.0/BUF.dt, fs[i])
end
test_sac_file = string(path, "/SampleFiles/SAC/test_le.sac")
test_mseed_file = string(path, "/SampleFiles/SEED/test.mseed")
test_mseed_pat = string(path, "/SampleFiles/SEED/t*.mseed")
mseed_vals_file = string(path, "/SampleFiles/SEED/test_mseed_vals.txt")
printstyled(" file read\n", color=:light_green)
@test_throws ErrorException verified_read_data("mseed", test_sac_file)
S = verified_read_data("mseed", test_mseed_file, v=0, strict=false)
@test isequal(S.id[1], "NL.HGN.00.BHZ")
@test ≈(S.fs[1], 40.0)
@test ≈(S.gain[1], 1.0)
@test isequal(string(u2d(S.t[1][1,2]*μs)), "2003-05-29T02:13:22.043")
@test ≈(S.x[1][1:5], [ 2787, 2776, 2774, 2780, 2783 ])
# mseed with mmap
printstyled(" file read with mmap\n", color=:light_green)
Sm = read_data("mseed", test_mseed_file, v=0, memmap=true)
@test Sm == S
# Test breaks if memory-resident SeisIOBuf structure SEED is not reset
S1 = verified_read_data("mseed", test_mseed_file, v=0, strict=false)
if Sys.iswindows() == false
S2 = verified_read_data("mseed", test_mseed_pat, v=0, strict=false)
@test S2.src[1] == abspath(test_mseed_pat)
S2.src = S1.src
@test S == S1 == S2
end
mseed_vals = readdlm(mseed_vals_file, ',', comments=true, comment_char='#')
seedvals = Dict{String,Any}()
ntest = size(mseed_vals,1)
for i = 1:ntest
seedvals[mseed_vals[i,1]] = Float32.(mseed_vals[i, 2:end])
end
if safe_isdir(path*"/SampleFiles/Restricted")
printstyled(" mseed test files (time gaps, blockette types)\n", color=:light_green)
redirect_stdout(out) do
seed_support()
files = ls(path*"/SampleFiles/Restricted/*mseed")
for f in files
println(stdout, "attempting to read ", f)
S = SeisData()
ae = any([occursin(i, f) for i in ("blkt2000", "detection.record", "text-encoded", "timing.500s")])
if ae
verified_read_data!(S, "mseed", f, v=3, allow_empty=true, strict=false)
else
verified_read_data!(S, "mseed", f, v=2, strict=false)
end
# Test that our encoders return the expected values
(tmp, fname) = splitdir(f)
if haskey(seedvals, fname)
x = get(seedvals, :fname, Float32[])
nx = lastindex(x)
y = getindex(getfield(S, :x), 1)[1:nx]
@test isapprox(x,y)
end
if occursin("text-encoded", f)
@test haskey(S.misc[1], "seed_ascii") == true
str = split(S.misc[1]["seed_ascii"][1], "\n", keepempty=false)
@test occursin("Quanterra Packet Baler Model 14 Restart.", str[1])
elseif occursin("detection.record",f )
ev_rec = get(S.misc[1], "seed_event", "no record")[1]
@test ev_rec == "2004,7,28,20,28,6,185,80.0,0.39999998,18.0,dilatation,1,3,2,1,4,0,2,0,Z_SPWWSS"
elseif occursin("SHW.UW", f)
println("testing read accuracy of SHW.UW with ", f)
@test size(S.t[1],1) >= 158
@test size(S.t[2],1) >= 8
@test string(u2d(S.t[1][1,2]*μs)) == "1980-03-22T20:45:18.349"
@test isequal(S.id, String[ "UW.SHW..EHZ", "UW.SHW..SHZ" ])
@test ≈(S.fs, Float64[104.085000, 52.038997])
@test ≈(S.x[1][1:5], Float64[-68.0, -57.0, -71.0, -61.0, -52.0])
fnames = ls(path*"/SampleFiles/Restricted/1980*SHZ.D.SAC")
C = S[2]
@test w_time(t_win(C.t, C.fs), C.fs) == C.t
t = t_win(C.t, C.fs)[:,1]
W = Array{DateTime,1}(undef, 0)
for i=1:length(t)
# push!(W, round(u2d(t[i]*μs), Second))
push!(W, u2d(t[i]*μs))
end
Y = Array{DateTime,1}(undef,0)
for f in fnames
seis = verified_read_data("sac", f)[1]
push!(Y, u2d(seis.t[1,2]*μs))
end
#[round(u2d(i*μs), Second) for i in t]
# println("W = ", string.(W))
# println("Y = ", string.(Y))
Δ = [abs(.001*(W[i]-Y[i]).value)*C.fs for i=1:length(Y)]
@test maximum(Δ) < 1.0
else
@test isempty(S) == false
end
end
end
else
printstyled(" extended SEED tests skipped. (files not found; is this Appveyor?)\n", color=:green)
end
# Tests for unparseable blockette and data format
mseed_out = "test2.mseed"
io = open(test_mseed_file, "r")
buf = read(io)
close(io)
buf[40] = 0x03 # 3 blockettes follow
buf[53] = 0x13 # 19 = Steim-3
buf[67:68] .= [0x00, 0x54] # byte 84
buf[85:86] .= [0x01, 0x90] # [400]
buf[87:88] .= [0x00, 0x00] # next blockette at "byte 0" means we're done
write(mseed_out, buf)
S1 = read_data("mseed", test_mseed_file, v=0)[1]
S2 = read_data("mseed", mseed_out, v=0)[1]
# Check that skipping an unparseable data type on a new channel doesn't
# affect channel start time or data read-in
printstyled(" unparseable data\n", color=:light_green)
δx = length(S1.x)-length(S2.x)
@test div(S2.t[1,2]-S1.t[1,2], round(Int64, sμ/S1.fs)) == length(S1.x)-length(S2.x)
@test S1.x[δx+1:end] == S2.x
# Check that bytes skipped are accurately logged
printstyled(" unparseable blockettes\n", color=:light_green)
@test any([occursin("3968 bytes skipped", i) for i in S2.notes])
# Done
rm(mseed_out)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4075 | nx_add = 1400000
nx_new = 36000
pref = path * "/SampleFiles/"
cfile = pref * "Restricted/03_02_27_20140927.sjis.ch"
files = String[ "UW/00012502123W" "uw" "_"
"SEGY/03.334.12.09.00.0362.1" "passcal" "pa-full"
"SEGY/03.334.12.09.00.0362.1" "passcal" "passcal"
"SAC/test_le.sac" "sac" "_"
"SEED/test.mseed" "mseed" "_"
"Restricted/2014092709*.cnt" "win32" "win"
"ASCII/0215162000.c00" "lennartz" "_"
"ASCII/geo-slist.csv" "geocsv.slist" "_"
"Restricted/SHW.UW.mseed" "mseed" "lo-mem"
"Restricted/test_rev_1.segy" "segy" "full"
"Restricted/test_rev_1.segy" "segy" "_"
"SAC/test_be.sac" "sac" "full"
"SAC/test_be.sac" "sac" "_"
"ASCII/geo-tspair.csv" "geocsv" "_"
]
# Do not run on Appveyor; it can't access restricted files, so this breaks
printstyled(" read_data\n", color=:light_green)
nf = size(files,1)
for n = 1:nf
fname = pref * files[n,1]
if (occursin("Restricted", fname)==false || (has_restricted==true))
fwild = fname[1:end-1] * "*"
f_call = files[n,2]
opt = files[n,3]
printstyled(string(" ", n, "/", nf, " ", f_call, "\n"), color=:light_green)
if opt == "pa-full"
S = verified_read_data("passcal", fname, full=true)
S = verified_read_data("passcal", fwild, full=true)
S = read_data("passcal", fwild, full=true, memmap=true)
S = read_data(fname)
S = read_data(fname, full=true)
elseif opt == "win"
S = verified_read_data(f_call, fwild, cf=cfile)
S = read_data(f_call, fwild, memmap=true, cf=cfile)
S = read_data(fwild, cf=cfile)
S = read_data(f_call, [fwild], cf=cfile)
elseif opt == "slist"
S = verified_read_data("geocsv.slist", fname)
S = verified_read_data("geocsv.slist", fwild)
S = read_data("geocsv.slist", fwild, memmap=true)
S = read_data(fwild)
elseif opt == "lo-mem"
S = verified_read_data(f_call, fname, nx_new=nx_new, nx_add=nx_add)
S = verified_read_data(f_call, fwild, nx_new=nx_new, nx_add=nx_add)
S = read_data(f_call, fwild, nx_new=nx_new, nx_add=nx_add, memmap=true)
elseif opt == "full"
S = verified_read_data(f_call, fname, full=true)
S = verified_read_data(f_call, fwild, full=true)
S = read_data(f_call, fwild, full=true, memmap=true)
S = read_data(fwild, full=true)
else
S = verified_read_data(f_call, fname)
S = read_data(f_call, fname, memmap=true)
if f_call == "uw"
fwild = fname[1:end-3]*"*"*"W"
end
S = read_data(f_call, fwild, memmap=true)
S = read_data(fwild)
end
end
end
# Test for reading a String array of file names
pref = path * "/SampleFiles/"
files = pref .* ["SAC/test_be.sac", "SAC/test_le.sac"]
S = verified_read_data("sac", files, vl=true)
for f in files
for i in 1:S.n
@test any([occursin(abspath(f), n) for n in S.notes[i]])
end
end
# Check that other array methods work
S1 = SeisData()
S2 = SeisData()
read_data!(S1, "sac", files, vl=true)
read_data!(S2, files, vl=true)
S3 = read_data(files, vl=true)
@test S == S1 == S2 == S3
# Does a string array read the same way as a wildcard read?
compare_SeisData(S, verified_read_data("sac", pref .* "SAC/test*sac", vl=true))
# source logging
printstyled(" logging\n", color=:light_green)
uwf1 = joinpath(path, "SampleFiles/UW/99011116541")
uwf4 = joinpath(path, "SampleFiles/UW/00012502123W")
p1 = abspath(uwf1*"W")
p2 = abspath(uwf4)
S = read_data("uw", [uwf1*"W", uwf4])
for i in 1:S.n
if length(S.t[i]) == 6
@test S.src[i] == p2
else
@test S.src[i] == (S.t[i][1,2] ≥ 946684800000000 ? p2 : p1)
end
end
@test_throws ErrorException verified_read_data("deez", "nutz.sac")
nothing
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5686 | printstyled(" read_meta equivalencies\n", color=:light_green)
printstyled(" full (XML, RESP, dataless)\n", color=:light_green)
fname = "JRO.sacpz"
dataless_name = "CC.dataless"
sxml_file = path*"/SampleFiles/XML/fdsnws-station_2019-09-11T06_26_58Z.xml"
resp_file = path*"/SampleFiles/SEED/JRO.resp"
dataless_file = path*"/SampleFiles/SEED/"*dataless_name
dataless_wc = path*"/SampleFiles/SEED/CC.*"
sacpz_file = path*"/SampleFiles/SAC/"*fname
sacpz_wc = path*"/SampleFiles/SAC/JRO.sacp*"
S1 = read_meta("sxml", sxml_file, s="2016-01-01T00:00:00", memmap=true, msr=true)
S2 = read_meta("resp", resp_file, memmap=true, units=true)
S3 = read_meta("dataless", dataless_file, memmap=true, s="2016-01-01T00:00:00", units=true)[56:58]
S4 = read_meta("sacpz", sacpz_file, memmap=true)
@test_throws ErrorException read_meta("dataless", sxml_file)
@test_throws ErrorException read_meta("deez", "nutz.sac")
@test_throws ErrorException read_meta("deez", sxml_file)
C = Array{Char,2}(undef, 10, 3)
fill!(C, ' ')
SA = Array{String, 2}(undef, 10, 3)
for k = 1:3
fstr = ""
n = 0
# println(" ===== ", S1.id[k], " =====")
R1 = S1.resp[k]
R2 = S2.resp[k]
R3 = S3.resp[k]
for f in fieldnames(MultiStageResp)
fstr = uppercase(String(f))
n += 1
f1 = getfield(R1, f)
f2 = getfield(R2, f)
f3 = getfield(R3, f)
t = min(isequal(f1, f2), isequal(f1, f3))
if t
C[n,k] = '='
else
# Check for same size, type
L = length(f1)
if L != length(f2) || L != length(f3)
C[n,k] = 'f'
continue
elseif typeof(f1) != typeof(f2) || typeof(f1) != typeof(f3)
C[n,k] = 'f'
continue
end
# Check for approximately equal fields
T = falses(L)
for i = 1:L
i1 = getindex(f1, i)
i2 = getindex(f2, i)
i3 = getindex(f3, i)
t2 = min(isequal(i1, i2), isequal(i1, i3))
if t2 == true
T[i] = true
else
T1 = typeof(i1)
if T1 != typeof(i2) || T1 != typeof(i3)
C[n,k] = 'f'
break
# Easyfor a bitstype
elseif isbitstype(T1)
if isapprox(i1, i2) && isapprox(i1, i3)
C[n,k] = '≈'
T[i] = true
continue
end
else
FF = fieldnames(T1)
# Only possible for a String in these Types
if isempty(FF)
C[n,k] = 'f'
# Check for approximately equal subfields
else
TT = falses(length(FF))
for (j,g) in enumerate(FF)
j1 = getfield(i1, g)
j2 = getfield(i2, g)
j3 = getfield(i3, g)
# Dimension mismatch
if !(length(j1) == length(j2) == length(j3))
C[n,k] = 'f'
# True mismatch
elseif min(isapprox(j1, j2), isapprox(j1, j3)) == false
C[n,k] = 'f'
# Approx. equality
else
TT[j] = true
end
end
# If they're all approximate, set T[i] to true
if minimum(TT) == true
T[i] = true
end
end
end
end
end
# If they're all approximate, set C[n,k] to ≈
if minimum(T) == true
C[n,k] = '≈'
end
end
SA[n,k] = (k == 1 ? lpad(fstr * ": ", 12) : "") * lpad(C[n,k], 5)
@test C[n,k] in ('≈', '=')
end
end
println("")
println(" "^12,
lpad(S1.id[1][end-3:end], 6),
lpad(S1.id[2][end-3:end], 6),
lpad(S1.id[3][end-3:end], 6))
println(" "^12, "|", "="^5, "|", "="^5, "|", "="^5)
for i = 1:size(SA,1)
println(join(SA[i,:], " "))
end
println("")
printstyled(" one-stage (SACPZ, XML)\n", color=:light_green)
S1 = read_meta("sxml", sxml_file, s="2016-01-01T00:00:00", msr=false)
S4 = read_meta("sacpz", sacpz_file)
K = Array{Char,2}(undef, 3, 3)
fill!(K, ' ')
SA = Array{String, 2}(undef, 3, 3)
for k = 1:3
n = 0
R1 = S1.resp[k]
R2 = S4.resp[k]
for f in fieldnames(PZResp)
fstr = uppercase(String(f))
(f == :f0) && continue # SACPZ lacks f0 for some reason
n += 1
f1 = getfield(R1, f)
f2 = getfield(R2, f)
t = isequal(f1, f2)
if t == true
K[n,k] = '='
else
# Check for same size, type
L = length(f1)
if L != length(f2)
K[n,k] = 'f'
continue
elseif typeof(f1) != typeof(f2)
K[n,k] = 'f'
continue
elseif isapprox(f1, f2)
K[n,k] = '≈'
end
end
@test K[n,k] in ('≈', '=')
SA[n,k] = (k == 1 ? lpad(fstr * ": ", 12) : "") * lpad(K[n,k], 5)
end
end
println("")
println(" "^12,
lpad(S1.id[1][end-3:end], 6),
lpad(S1.id[2][end-3:end], 6),
lpad(S1.id[3][end-3:end], 6))
println(" "^12, "|", "="^5, "|", "="^5, "|", "="^5)
for i = 1:size(SA,1)
println(join(SA[i,:], " "))
end
println("")
S1 = read_meta("sacpz", sacpz_file)
S2 = read_meta("sacpz", sacpz_wc)
S3 = read_meta("dataless", dataless_wc, s="2016-01-01T00:00:00")
for f in SeisIO.datafields
if (f in (:src, :notes)) == false
@test isequal(getfield(S1,f), getfield(S2,f))
end
end
# test here to track +meta logging
printstyled(" logging to :notes\n", color=:light_green)
for i in 1:S1.n
@test any([occursin(fname, n) for n in S1.notes[i]])
end
for i in 1:S2.n
@test any([occursin(fname, n) for n in S2.notes[i]])
end
for i in 1:S3.n
@test any([occursin(dataless_name, n) for n in S3.notes[i]])
end
printstyled(" array of inputs\n", color=:light_green)
S = read_meta("sacpz", [sacpz_file, sacpz_wc])
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6477 | # test_le.sac was generated in SAC 101.6a with "fg seismogram; write test_le.sac"
sac_file = path*"/SampleFiles/SAC/test_le.sac"
sac_be_file = path*"/SampleFiles/SAC/test_be.sac"
sac_pz_file = path*"/SampleFiles/SAC/test_sac.pz"
sac_pz_wc = path*"/SampleFiles/SAC/test_sac.*"
uw_file = path*"/SampleFiles/UW/00012502123W"
sac_pat = path*"/SampleFiles/SAC/*.sac"
sac_pz_out1 = path*"/local_sac_1.pz"
sac_pz_out2 = path*"/local_sac_2.pz"
sac_pz_out3 = path*"/local_sac_3.pz"
sac_pz_out4 = path*"/local_sac_4.pz"
sac_pz_out5 = path*"/local_sac_5.pz"
f_stub = "1981.088.10.38.23.460"
f_out = f_stub * "..CDV...R.SAC"
f_out_new = f_stub * ".VU.CDV..NUL.R.SAC"
sacv7_out = "v7_out.sac"
printstyled(" SAC\n", color=:light_green)
printstyled(" read\n", color=:light_green)
@test_throws ErrorException verified_read_data("sac", uw_file)
SAC1 = verified_read_data("sac", sac_file)[1]
@test ≈(SAC1.fs, 100.0)
@test ≈(length(SAC1.x), 1000)
# SAC with mmap
printstyled(" mmap\n", color=:light_green)
SACm = read_data("sac", sac_file, memmap=true)[1]
@test SAC1 == SACm
SAC2 = verified_read_data("sac", sac_file, full=true)[1]
@test ≈(1/SAC1.fs, SAC2.misc["delta"])
@test ≈(length(SAC1.x), SAC2.misc["npts"])
printstyled(" wildcard\n", color=:light_green)
SAC = verified_read_data("sac", sac_pat, full=true)
printstyled(" bigendian\n", color=:light_green)
SAC3 = verified_read_data("sac", sac_be_file, full=true)[1]
@test ≈(1/SAC3.fs, SAC3.misc["delta"])
@test ≈(length(SAC3.x), SAC3.misc["npts"])
redirect_stdout(out) do
sachdr(sac_be_file)
end
printstyled(" write\n", color=:light_green)
S = SeisData(SAC2)
writesac(S) # change 2019-07-15 to cover writesac on GphysData
@test safe_isfile(f_out)
@test any([occursin(string("wrote to file ", f_out), S.notes[1][i]) for i in 1:length(S.notes[1])])
printstyled(" reproducibility\n", color=:light_green)
SAC4 = verified_read_data("sac", f_out, full=true)[1]
for f in SeisIO.datafields
(f in [:src, :notes, :misc]) && continue
@test isequal(getfield(SAC2, f), getfield(SAC4, f))
end
fn = f_out[3:end]
writesac(SAC4, fname=fn)
@test safe_isfile(fn)
SAC5 = verified_read_data("sac", fn, full=true)[1]
for f in SeisIO.datafields
(f in [:src, :notes, :misc]) && continue
@test isequal(getfield(SAC2, f), getfield(SAC4, f))
@test isequal(getfield(SAC4, f), getfield(SAC5, f))
end
safe_rm(fn)
printstyled(" logging\n", color=:light_green)
@test any([occursin("write", n) for n in S.notes[1]])
redirect_stdout(out) do
show_writes(S, 1)
show_writes(S[1])
show_writes(S)
end
SAC1.id = "VU.CDV..NUL"
SAC1.name = "VU.CDV..NUL"
writesac(SAC1)
@test safe_isfile(f_out_new)
writesac(SAC1, fname="POTATO.SAC")
@test safe_isfile("POTATO.SAC")
# testing custom naming formats
writesac(SAC1, fname="test_write_1.sac")
@test safe_isfile("test_write_1.sac")
safe_rm("test_write_1.sac")
# testing that appending ".sac" to a file string works
writesac(SAC1, fname="test_write_2", v=1)
@test safe_isfile("test_write_2.sac")
safe_rm("test_write_2.sac")
redirect_stdout(out) do
writesac(SAC1, v=1)
end
@test safe_isfile(f_out_new)
safe_rm(f_out_new)
printstyled(" skip if fs==0.0\n", color=:light_green)
SAC1.id = "VU.FS0..NUL"
SAC1.fs = 0.0
writesac(SAC1)
@test safe_isfile(f_stub*"VU.FS0..NUL.R.SAC") == false
# SACPZ
printstyled(" SACPZ\n", color=:light_green)
printstyled(" read\n", color=:light_green)
S = read_meta("sacpz", sac_pz_wc)
S = read_meta("sacpz", sac_pz_file)
writesacpz(sac_pz_out1, S)
T = read_meta("sacpz", sac_pz_out1)
for f in (:n, :id, :name, :loc, :fs, :gain, :units)
@test isequal(getfield(S, f), getfield(T, f))
end
for f in fieldnames(typeof(S.resp[1]))
for i = 1:S.n
@test isapprox(getfield(S.resp[i], f), getfield(T.resp[i], f))
end
end
U = deepcopy(S)
for i = 1:U.n
U.resp[i] = MultiStageResp(3)
U.resp[i].stage[1] = S.resp[i]
end
printstyled(" write\n", color=:light_green)
writesacpz(sac_pz_out2, U)
T = read_meta("sacpz", sac_pz_out2)
for f in (:n, :id, :name, :loc, :fs, :gain, :units)
@test isequal(getfield(S, f), getfield(T, f))
end
for f in fieldnames(typeof(S.resp[1]))
for i = 1:S.n
@test isapprox(getfield(S.resp[i], f), getfield(T.resp[i], f))
end
end
U[1] = SeisChannel(id = "UW.HOOD..ENE")
writesacpz(sac_pz_out3, U)
read_meta!(S, "sacpz", sac_pz_out3)
for f in (:n, :id, :name, :loc, :fs, :gain, :units)
@test isequal(getfield(S, f), getfield(T, f))
end
S = breaking_seis()[1:3]
S.resp[1].resp = rand(ComplexF64, 12, 2)
S.resp[3].stage[2] = nothing
writesacpz(sac_pz_out4, S)
printstyled(" GphysChannel\n", color=:light_green)
writesacpz(sac_pz_out5, S[1])
safe_rm(sac_pz_out1)
safe_rm(sac_pz_out2)
safe_rm(sac_pz_out3)
safe_rm(sac_pz_out4)
safe_rm(sac_pz_out5)
printstyled(" SAC file v7 (SAC v102.0)\n", color=:light_green)
test_fs = 62.5
test_lat = 48.7456
test_lon = -122.4126
printstyled(" writesac(..., nvhdr=N)\n", color=:light_green)
C = read_data("sac", sac_file)[1]
writesac(C, fname=sacv7_out, nvhdr=6)
sz6 = stat(sacv7_out).size
writesac(C, fname=sacv7_out, nvhdr=7)
sz7 = stat(sacv7_out).size
# The only difference should be the addition of a length-22 Vector{Float64
@test sz7-sz6 == 176
# In fact, we can open the file, skip to byte sz6, and read in the array
io = open(sacv7_out, "r")
seek(io, sz6)
sac_buf_tmp = read(io)
close(io)
dv = reinterpret(Float64, sac_buf_tmp)
# ...and the variables we write to the header should be identical
@test C.fs == 1.0/dv[1]
@test C.loc.lon == dv[19]
@test C.loc.lat == dv[20]
printstyled(" big endian\n", color=:light_green)
io = open(sac_be_file, "r")
sac_raw = read(io)
close(io)
sac_raw[308] = 0x07
reset_sacbuf()
dv = BUF.sac_dv
dv[1] = 1.0/test_fs
dv[19] = test_lon
dv[20] = test_lat
dv .= bswap.(dv)
sac_dbl_buf = reinterpret(UInt8, dv)
io = open(sacv7_out, "w")
write(io, sac_raw)
write(io, sac_dbl_buf)
close(io)
C = read_data("sac", sacv7_out, full=true)[1]
@test C.fs == test_fs
@test C.loc.lat == test_lat
@test C.loc.lon == test_lon
printstyled(" little endian\n", color=:light_green)
io = open(sac_file, "r")
sac_raw = read(io)
close(io)
sac_raw[305] = 0x07
reset_sacbuf()
dv[1] = 1.0/test_fs
dv[19] = test_lon
dv[20] = test_lat
sac_dbl_buf = reinterpret(UInt8, dv)
io = open(sacv7_out, "w")
write(io, sac_raw)
write(io, sac_dbl_buf)
close(io)
C = read_data("sac", sacv7_out, full=true)[1]
@test C.fs == test_fs
@test C.loc.lat == test_lat
@test C.loc.lon == test_lon
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6826 | segy_file_1 = string(path, "/SampleFiles/SEGY/03.334.12.09.00.0362.1")
segy_file_2 = string(path, "/SampleFiles/SEGY/test_rev1.segy")
segy_file_3 = string(path, "/SampleFiles/SEGY/04.322.18.06.46.92C3.1.coords1.segy")
segy_file_4 = string(path, "/SampleFiles/SEGY/04.322.18.06.46.92C3.1.coords2.segy")
segy_be_file = string(path, "/SampleFiles/SEGY/02.104.00.00.08.7107.2")
segy_fpat = string(path, "/SampleFiles/SEGY/02.104.00.00.08.7107.2")
segy_nodal = string(path, "/SampleFiles/SEGY/FORGE_78-32_iDASv3-P11_UTC190428135038.sgy")
printstyled(" SEG Y\n", color=:light_green)
printstyled(" Helper functions\n", color=:light_green)
printstyled(" IBM-float\n", color=:light_green)
#=
These bit representations are from Wikipedia's page on IBM-float,
https://en.wikipedia.org/wiki/IBM_hexadecimal_floating_point
=#
y = [-118.625, +7.2370051e75, +5.397605e-79, 0.0, 0.1]
for (i, str) in enumerate( ["11000010011101101010000000000000",
"01111111111111111111111111111111",
"00000000000100000000000000000000",
"00000000000000000000000000000000",
"01000000000110011001100110011010"] )
u = hton(parse(UInt32, str, base=2))
@test isapprox(ibmfloat(u), y[i], rtol=10*eps(Float32))
end
printstyled(" auto_coords\n", color=:light_green)
sc = Int16[1,1]
lat0 = -39.4209
lon0 = -71.94061
ele = 2856.3
xy = [round(Int32, lon0*1.0e4), round(Int32, lat0*1.0e4)]
y,x = auto_coords(xy, sc)
@test latlon2xy(y,x) == xy
xy2 = latlon2xy(lat0, lon0)
auto_coords(xy2, sc)
@test isapprox([auto_coords(xy2, sc)...], [lat0, lon0], rtol=eps(Float32))
S1 = read_data(segy_file_3)
S2 = read_data(segy_file_4)
@test abs(S1.loc[1].lat-S2.loc[1].lat) < 1.0e-4
@test abs(S1.loc[1].lon-S2.loc[1].lon) < 1.0e-4
@test abs(S1.loc[1].el-S2.loc[1].el) < 1.0e-4
@test S1.x[1] == S2.x[1]
printstyled(" trid\n", color=:light_green)
@test trid(Int16(2), S1.fs[1], 1.0) == "EHZ"
@test trid(Int16(2), S1.fs[1], 1/30) == "HHZ"
printstyled(" SEG Y rev 1\n", color=:light_green)
if has_restricted
SEG = verified_read_data("segy", segy_file_2)
redirect_stdout(out) do
segyhdr(segy_file_2)
end
printstyled(" data integrity\n", color=:light_green)
x = collect(1:1:8)
for i in [1,2,3,5,8]
fname = string(path, "/SampleFiles/SEGY/test_rev1_", i, ".segy")
S = read_data("segy", fname)
@test isapprox(S.x[1], x)
end
@test_throws ErrorException read_data("segy", string(path, "/SampleFiles/SEGY/test_rev1_4.segy"))
printstyled(" header accuracy\n", color=:light_green)
S = read_data("segy", segy_file_2)
@test S.n == 36
@test length(unique([i[1:6] for i in S.id])) == 12
for i in 1:S.n
@test S.fs[i] ≈ 2000.0
@test S.gain[i] ≈ 320.0
@test S.units[i] == "m/s"
@test length(S.x[i]) == 32000
@test size(S.t[i], 1) == 2
end
@test length(findall([endswith(i, "GHZ") for i in S.id])) == 12
@test length(findall([endswith(i, "GH1") for i in S.id])) == 12
@test length(findall([endswith(i, "GH2") for i in S.id])) == 12
else
printstyled(" Skipped SEG Y rev 1\n", color=:red)
end
printstyled(" PASSCAL/NMT SEG Y\n", color=:light_green)
SEG = verified_read_data("passcal", segy_file_1, full=true)
printstyled(" header integrity\n", color=:light_green)
SEG = verified_read_data("passcal", segy_file_1, full=true)
@test SEG.misc[1]["gain_const"] == 32
@test SEG.gain[1] ≈ SEG.misc[1]["scale_fac"]
@test isapprox(1.0/SEG.gain[1], 4.47021e-07/SEG.misc[1]["gain_const"], atol=eps(Float32))
@test SEG.fs[1] == 100.0 == 1.0e6 / SEG.misc[1]["delta"]
@test lastindex(SEG.x[1]) == 247698
@test SEG.misc[1]["trace_seq_line"] == 3
@test SEG.misc[1]["trace_seq_file"] == 3
@test SEG.misc[1]["rec_no"] == 1
@test SEG.misc[1]["channel_no"] == 2
@test SEG.misc[1]["trace_id_code"] == 3
@test SEG.misc[1]["h_units_code"] == 2
@test SEG.misc[1]["nx"] == 32767
@test SEG.misc[1]["samp_rate"] == 10000
@test SEG.misc[1]["gain_type"] == 1
@test SEG.misc[1]["year"] == 2003
@test SEG.misc[1]["day"] == 334
@test SEG.misc[1]["hour"] == 12
@test SEG.misc[1]["minute"] == 9
@test SEG.misc[1]["second"] == 0
@test SEG.misc[1]["ms"] == 5
@test SEG.misc[1]["time_code"] == 2
@test SEG.misc[1]["trigyear"] == 2003
@test SEG.misc[1]["trigday"] == 334
@test SEG.misc[1]["trighour"] == 12
@test SEG.misc[1]["trigminute"] == 9
@test SEG.misc[1]["trigsecond"] == 0
@test SEG.misc[1]["trigms"] == 5
@test SEG.misc[1]["data_form"] == 1
@test SEG.misc[1]["inst_no"] == 0x016a # 0362
@test strip(SEG.misc[1]["sensor_serial"]) == "UNKNOWN"
@test strip(SEG.misc[1]["station_name"]) == "362"
# Location
printstyled(" sensor position\n", color=:light_green)
h_sc = Float64(get(SEG.misc[1], "h_sc", 1.0))
h_sc = abs(h_sc)^(h_sc < 0.0 ? -1 : 1)
z_sc = Float64(get(SEG.misc[1], "z_sc", 1.0))
z_sc = abs(z_sc)^(z_sc < 0.0 ? -1 : 1)
x = get(SEG.misc[1], "rec_x", 0.0)
y = get(SEG.misc[1], "rec_y", 0.0)
z = get(SEG.misc[1], "rec_ele", 0.0)
# @test SEG.loc[1].lat == y*h_sc == 45.2896 # 45.2896 in wash.sta
# @test SEG.loc[1].lon == x*h_sc == -121.7915 # 121.791496 in wash.sta
# @test SEG.loc[1].el == z*z_sc == 1541.0 # 1541.0 in wash.sta
printstyled(" data integrity\n", color=:light_green)
@test Float64(SEG.misc[1]["max"]) == maximum(SEG.x[1]) == 396817
@test Float64(SEG.misc[1]["min"]) == minimum(SEG.x[1]) == -416512
# @test ≈(SEG.x[1][1:10], [47.0, 46.0, 45.0, 44.0, 51.0, 52.0, 57.0, 59.0, 40.0, 34.0])
@test length(SEG.x[1]) == SEG.misc[1]["num_samps"] == 247698
redirect_stdout(out) do
segyhdr(segy_file_1, passcal=true)
end
printstyled(" big-endian support\n", color=:light_green)
SEG = verified_read_data("passcal", segy_be_file, full=true, swap=true)
@test SEG.n == 1
@test SEG.id == ["...spn"] # lol, WHY BOB
@test isapprox(1.0/SEG.gain[1], 5.92346875e-8, atol=eps(Float32))
@test SEG.misc[1]["trigyear"] == SEG.misc[1]["year"] == 2002
printstyled(" wildcard support\n", color=:light_green)
SEG = verified_read_data("passcal", segy_fpat, full=true, swap=true)
@test SEG.n == 1
@test Float64(SEG.misc[1]["max"]) == maximum(SEG.x[1]) == 49295.0
@test Float64(SEG.misc[1]["min"]) == minimum(SEG.x[1]) == -54454.0
@test ≈(SEG.x[1][1:5], [-615.0, -3994.0, -4647.0, -895.0, 190.0])
@test length(SEG.x[1]) == SEG.misc[1]["num_samps"] == 180027
printstyled(" keyword ll\n", color=:light_green)
for ll in (0x01, 0x04, 0x07)
S = read_data("segy", segy_nodal, ll=ll)
@test S.n == 33
@test first(S.id) == ".0.01.YYY"
@test last(S.id) == ".0.0X.YYY"
end
for ll in (0x02, 0x05, 0x06)
S = read_data("segy", segy_nodal, ll=ll)
@test S.n == 1
@test S.id[1] == ".0.00.YYY"
end
S = read_data("segy", segy_nodal)
@test S.n == 1
@test S.id[1] == ".0..YYY"
@test_throws InexactError read_data("segy", segy_nodal, ll=0x03)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6600 | xml_stfile = path*"/SampleFiles/XML/fdsnws-station_2017-01-12T03-17-42Z.xml"
xml_stpat = path*"/SampleFiles/XML/fdsnws-station*"
f_out = "test.xml"
id_err = "error in Station ID creation!"
unit_err = "units don't match instrument code!"
true_id = String["3337497", "3279407", "2844986", "2559759", "2092067", "1916079", "2413"]
true_ot = DateTime("2011-03-11T05:46:23.200")
true_loc = Float64[2.2376 38.2963; 93.0144 142.498; 26.3 19.7]
true_mag = Float32[8.6, 9.1, 8.8, 8.5, 8.6, 9.0, 8.5]
true_msc = String["MW", "MW", "MW", "MW", "MW", "MW", ""]
r1 = PZResp(a0 = 2.45956f13, f0 = 0.02f0, p = ComplexF32[-981.0+1009.0im, -981.0-1009.0im, -3290.0+1263.0im, -3290.0-1263.0im])
r2 = PZResp(Complex{Float32}.([ 0.0+0.0im -0.037-0.037im
0.0+0.0im -0.037+0.037im
-15.15+0.0im -15.64+0.0im
-176.6+0.0im -97.34-400.7im
-463.1-430.5im -97.34+400.7im
-463.1+430.5im -374.8+0.0im
0.0+0.0im -520.3+0.0im
0.0+0.0im -10530.0-10050.0im
0.0+0.0im -10530.0+10050.0im
0.0+0.0im -13300.0+0.0im
0.0+0.0im -255.097+0.0im ]),rev=true)
r2.z = r2.z[1:6]
r2.f0 = 0.02f0
r2.a0 = 3.53734f17
printstyled(" FDSN station XML\n", color=:light_green)
io = open(xml_stfile, "r")
xsta = read(io, String)
close(io)
S = FDSN_sta_xml(xsta, false, "0001-01-01T00:00:00", "9999-12-31T23:59:59", 0)
ID = S.id
NAME = S.name
LOC = S.loc
FS = S.fs
GAIN = S.gain
RESP = S.resp
UNITS = S.units
MISC = S.misc
@assert(ID[1]=="AK.ATKA..BNE", id_err)
@assert(ID[2]=="AK.ATKA..BNN", id_err)
@assert(ID[end-1]=="IU.MIDW.01.BHN", id_err)
@assert(ID[end]=="IU.MIDW.01.BHZ", id_err)
@test ==(LOC[1], GeoLoc(lat=52.2016, lon=-174.1975, el=55.0, az=90.0, inc=-90.0))
@test ≈(LOC[3].lat, LOC[1].lat)
@test ≈(LOC[3].lon, LOC[1].lon)
@test ≈(LOC[3].dep, LOC[1].dep)
for i = 1:length(UNITS)
if UNITS[i] == "m/s2"
@assert(in(split(ID[i],'.')[4][2],['G', 'L', 'M', 'N'])==true, unit_err)
elseif UNITS[i] in ["m/s", "m"]
@assert(in(split(ID[i],'.')[4][2],['L', 'H'])==true, unit_err)
elseif UNITS[i] == "v"
@assert(split(ID[i],'.')[4][2]=='C', unit_err)
end
end
@test ≈(GAIN[1], 283255.0)
@test ≈(GAIN[2], 284298.0)
@test ≈(GAIN[end-1], 8.38861E9)
@test ≈(GAIN[end], 8.38861E9)
@test RESP[1] == RESP[2] == r1
@test RESP[end-1] == RESP[end] == r2
# xdoc = LightXML.parse_string(xsta); xroot = LightXML.root(xdoc); xnet = child_elements(xroot);
# xr = get_elements_by_tagname(get_elements_by_tagname(get_elements_by_tagname(get_elements_by_tagname(xroot, "Network")[1], "Station")[1], "Channel")[1], "Response")[1];
# stage = get_elements_by_tagname(xr, "Stage");
printstyled(" with MultiStageResp\n", color=:light_green)
S = FDSN_sta_xml(xsta, true, "0001-01-01T00:00:00", "9999-12-31T23:59:59", 0)
r = S.resp[1]
for f in fieldnames(MultiStageResp)
@test length(getfield(r,f)) == 9
end
@test r.stage[3].b[1:3] == [0.000244141, 0.000976562, 0.00244141]
@test r.fs[6] == 8000.0
@test r.fac[6] == 2
@test r.delay[6] == 7.5E-4
@test r.corr[6] == 7.5E-4
@test r.stage[9].b[6:9] == [-0.0000000298023, -0.0000000298023, -0.0000000298023, 0.0]
@test r.stage[9].a == []
printstyled(" read_sxml\n", color=:light_green)
S = read_sxml(xml_stfile, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, false, 0)
T = read_sxml(xml_stpat, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, false, 0)
@assert T.n > S.n
@test findid(T.id[S.n+1], S.id) == 0
@test_throws ErrorException read_sxml("nonexist.txt", "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, false, 0)
printstyled(" read_meta\n", color=:light_green)
S = read_meta("sxml", xml_stfile)
T = read_meta("sxml", xml_stpat)
@assert T.n > S.n
@test findid(T.id[S.n+1], S.id) == 0
printstyled(" overwrite channel headers on time match\n", color=:light_green)
redirect_stdout(out) do
xml_stfile = path*"/SampleFiles/XML/fdsnws-station_2017-01-12T03-17-42Z.xml"
S = SeisData()
read_station_xml!(S, xml_stfile, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, 3)
n = S.n
read_station_xml!(S, xml_stfile, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, 3)
@test S.n == n
S = read_station_xml(xml_stfile, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, 0)
end
# XML writer
printstyled(" write_sxml\n", color=:light_green)
S = randSeisData(12)
S.id[1] = "CC.VALT..BHZ"
S.id[2] = "CC.VALT..BHE"
S.id[3] = "YY.FAIL"
S.name[2] = S.name[1]
write_sxml(f_out, S)
Sr = read_sxml(f_out, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, false, 0)
sort!(S)
sort!(Sr)
for i in 1:S.n
for f in (:name, :fs, :gain, :units)
@test getfield(S, f)[i] == getfield(Sr, f)[i]
end
@test S.id[i]*(S.id[i] == "YY.FAIL" ? ".." : "") == Sr.id[i]
if typeof(S.resp[i]) in (PZResp, PZResp64, MultiStageResp)
if typeof(S.resp[i]) == PZResp64
R = S.resp[i]
S.resp[i] = PZResp(a0 = Float32(R.a0), f0 = Float32(R.f0),
p = ComplexF32.(R.p), z = ComplexF32.(R.z))
end
@test S.resp[i] == Sr.resp[i]
end
if typeof(S.loc[i]) == GeoLoc
for f in (:lat, :lon, :el, :dep, :az, :inc)
@test getfield(S.loc[i],f) ≈ getfield(Sr.loc[i],f)
end
end
end
write_sxml(f_out, breaking_seis())
printstyled(" check that output of write_sxml is re-read identically\n", color=:light_green)
files = ls(xml_stpat)
for file in files
S = read_sxml(file, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, true, 0)
write_sxml(f_out, S)
Sr = read_sxml(f_out, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, true, 0)
sort!(S)
sort!(Sr)
for f in datafields
(f in (:id, :src, :notes)) && continue
@test getfield(S,f) == getfield(Sr,f)
end
end
try
rm(f_out)
catch err
@warn(string("Can't remove ", f_out, ": throws error ", err))
end
printstyled(" checking channel list handling\n", color=:light_green)
file = files[2]
chans = 21:40
S = read_sxml(file, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, true, 0)
write_sxml(f_out, S, chans=chans)
S1 = S[chans]
sort!(S1)
Sr = read_sxml(f_out, "0001-01-01T00:00:00", "9999-12-31T23:59:59", false, true, 0)
sort!(Sr)
for f in datafields
(f in (:id, :src, :notes)) && continue
@test getfield(S1,f) == getfield(Sr,f)
end
printstyled(" extension to GphysChannel\n", color=:light_green)
write_sxml(f_out, S[1])
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4837 | vfname = "SampleFiles/Restricted/10081701.WVP"
sac_fstr = "SampleFiles/Restricted/20081701.sac-0*"
eq_wvm1_pat = "SampleFiles/SUDS/*1991191072247.wvm1.sac"
eq_wvm1_file = "SampleFiles/SUDS/eq_wvm1.sud"
eq_wvm2_pat = "SampleFiles/SUDS/*1991191072247.wvm2.sac"
eq_wvm2_file = "SampleFiles/SUDS/eq_wvm2.sud"
lsm_pat = "SampleFiles/SUDS/*1992187065409.sac"
lsm_file = "SampleFiles/SUDS/lsm.sud"
rot_pat = "SampleFiles/SUDS/*1993258220247.sac"
rot_file = "SampleFiles/SUDS/rotate.sud"
printstyled(" SUDS\n", color=:light_green)
printstyled(" readsudsevt\n", color=:light_green)
# read into SeisEvent
W = SUDS.readsudsevt(rot_file)
W = SUDS.readsudsevt(lsm_file)
W = SUDS.readsudsevt(eq_wvm1_file)
W = SUDS.readsudsevt(eq_wvm2_file)
printstyled(" in read_data\n", color=:light_green)
if safe_isfile(vfname)
redirect_stdout(out) do
SUDS.suds_support()
S = verified_read_data("suds", vfname, v=3, full=true)
S = verified_read_data("suds", eq_wvm1_file, v=3, full=true)
S = verified_read_data("suds", eq_wvm2_file, v=3, full=true)
S = verified_read_data("suds", lsm_file, v=3, full=true)
S = verified_read_data("suds", rot_file, v=3, full=true)
S = verified_read_data("suds", "SampleFiles/SUDS/eq_wvm*sud", v=3, full=true)
end
end
printstyled(" equivalence to SAC readsuds\n", color=:light_green)
# Volcano-seismic event supplied by W. McCausland
if has_restricted
S = SUDS.read_suds(vfname, full=true)
S2 = verified_read_data("sac", sac_fstr, full=true)
for n = 1:S2.n
id = S2.id[n]
if startswith(id, ".")
id = "OV"*id
end
i = findid(id, S)
if i > 0
@test isapprox(S.x[i], S2.x[n])
@test isapprox(Float32(S2.fs[n]), Float32(S.fs[i]))
@test abs(S.t[i][1,2] - S2.t[i][1,2]) < 2000 # SAC only has ~1 ms precision
end
end
else
printstyled(" skipped volcano-seismic test file\n", color=:red)
end
# SUDS sample files
# from eq_wvm1.sud
S = SUDS.read_suds(eq_wvm1_file)
S1 = verified_read_data("sac", eq_wvm1_pat)
for n = 1:S1.n
i = findid(S1.id[n], S)
if i > 0
@test isapprox(S.x[i], S1.x[n])
else
@warn(string(S1.id[n], " not found in S; check id conversion!"))
end
end
# from eq_wvm2.sud
S = SUDS.read_suds(eq_wvm2_file)
S1 = verified_read_data("sac", eq_wvm2_pat)
for n = 1:S1.n
i = findid(S1.id[n], S)
if i > 0
@test isapprox(S.x[i], S1.x[n])
else
@warn(string(S1.id[n], " not found in S; check id conversion!"))
end
end
# from lsm.sud
S = SeisData()
SUDS.read_suds!(S, lsm_file)
S1 = verified_read_data("sac", lsm_pat)
for n = 1:S1.n
i = findid(S1.id[n], S)
if i > 0
@test isapprox(S.x[i], S1.x[n])
else
@warn(string(S1.id[n], " not found in S; check id conversion!"))
end
end
# from rot.sud
S = SUDS.read_suds(rot_file)
S1 = verified_read_data("sac", rot_pat)
for n = 1:S1.n
i = findid(S1.id[n], S)
if i > 0
@test isapprox(S.x[i], S1.x[n])
else
@warn(string(S1.id[n], " not found in S; check id conversion!"))
end
end
printstyled(" data types not yet seen in real samples\n", color=:light_green)
f_out = string("fake_suds.sud")
id_buf = codeunits("UWSPORTSBALL")
nb = Int32(3200)
n_unreadable = Int32(60)
suds_types = (Int32, Complex{Float32}, Float64, Float32, Complex{Float64})
# skeleton: 0x53, 0x00, struct_id, struct_size, nbytes_following_struct
suds_struct_tag = (0x53, 0x00, zero(Int16), zero(Int32), zero(Int32))
suds_unreadable = (0x53, 0x36, Int16(4), n_unreadable, zero(Int32))
suds_5 = (0x53, 0x36, Int16(5), Int32(76), zero(Int32))
suds_readable = (0x53, 0x36, Int16(7), Int32(62), nb)
for (j, data_code) in enumerate([0x32, 0x63, 0x64, 0x66, 0x74])
T = suds_types[j]
x = rand(T, div(nb, sizeof(T)))
io = open(f_out, "w")
# Unreadable packet
[write(io, i) for i in suds_unreadable]
write(io, rand(UInt8, n_unreadable))
# Packet 5
[write(io, i) for i in suds_5]
p = position(io)
write(io, id_buf)
write(io, rand(Int16, 2))
write(io, 45.55)
write(io, 122.62)
write(io, 77.1f0)
write(io, rand(UInt8, 7))
write(io, 0x76)
write(io, data_code)
write(io, rand(UInt8, 3))
write(io, 1.152f8, 0.0f0, 0.0f0)
write(io, Int16[2, 32])
skip(io, 4)
write(io, 0.0f0, -32767.0f0)
# Packet 7
[write(io, i) for i in suds_readable]
p = position(io)
write(io, id_buf)
write(io, d2u(now()))
write(io, zero(Int16))
write(io, data_code)
write(io, 0x00)
skip(io, 4)
write(io, Int32(div(nb, sizeof(T))))
write(io, 50.0f0)
skip(io, 16)
write(io, zero(Float64))
write(io, 0.0f0)
write(io, x)
close(io)
if j < 5
S = verified_read_data("suds", f_out)
if data_code == 0x63
@test isapprox(S.x[1], real(x))
else
@test isapprox(S.x[1], x)
end
else
@test_throws ErrorException read_data("suds", f_out)
end
end
safe_rm(f_out)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2475 | import SeisIO.UW: readuwevt, uwdf, uwdf!, uwpf, uwpf!
printstyled(" UW\n", color=:light_green)
uwf0 = joinpath(path, "SampleFiles/UW/00*W")
uwf1 = joinpath(path, "SampleFiles/UW/99011116541")
uwf2 = joinpath(path, "SampleFiles/UW/94100613522o")
uwf3 = joinpath(path, "SampleFiles/UW/02062915175o")
uwf4 = joinpath(path, "SampleFiles/UW/00012502123W")
uwf5 = joinpath(path, "SampleFiles/UW/02062915205o")
S = verified_read_data("uw", uwf0)
S1 = SeisData()
uwdf!(S1, joinpath(path, "SampleFiles/UW/00012502123W"), false, false, false, 0)
[@test S.src[i] == abspath(uwf0) for i in 1:S.n]
S.src = S1.src
@test S == S1
printstyled(" readuwevt\n", color=:light_green)
# Can we read from pickfile only? datafile only?
printstyled(" read (pickfile, datafile) from pickfile name\n", color=:light_green)
W = readuwevt(uwf1*"o")
printstyled(" read (pickfile, datafile) from datafile name\n", color=:light_green)
redirect_stdout(out) do
W = readuwevt(uwf1*"W", v=3)
end
for i in ["UW.WWVB..TIM","UW.TCG..TIM","UW.TDH..EHZ","UW.VLM..EHZ"]
@test !isempty(findall(W.data.id.==i))
n = findfirst(W.data.id.==i)
@test ≈(W.data.fs[n], 100.0)
end
# Can we read from filename stub?
printstyled(" read (pickfile, datafile) from filename stub\n", color=:light_green)
W = readuwevt(uwf1)
@test W.hdr.id == W.source.eid == "99011116541"
@test W.hdr.mag.val == 3.0f0
@test occursin("99011116541o", W.hdr.src)
@test W.hdr.ot == DateTime("1999-01-11T16:54:11.96")
S = breaking_seis()
n = S.n
append!(S, convert(SeisData, W.data))
@test S.n == n + W.data.n
δt = μs*(rem(W.hdr.ot.instant.periods.value*1000 - SeisIO.dtconst, 60000000))
i = findfirst(W.data.id.=="UW.TDH..EHZ")
pha = W.data[i].pha["P"].tt
@test ≈(pha + δt, 14.506)
i = findfirst(W.data.id.=="UW.VLM..EHZ")
pha = W.data[i].pha["S"].tt
@test ≈(pha + δt, 24.236)
i = findfirst(W.data.id.=="UW.VFP..EHZ")
@test W.data.misc[i]["dur"] == 116.0
printstyled(" pickfile handling\n", color=:light_green)
# What about when there is no data file?
W = readuwevt(uwf2)
@test W.hdr.mag.val == 0.9f0
@test occursin("94100613522o", W.hdr.src)
@test W.hdr.ot == DateTime("1994-10-06T13:52:39.02")
W = readuwevt(uwf3)
@test W.hdr.id == "041568"
printstyled(" data file with a time correction structure\n", color=:light_green)
redirect_stdout(out) do
W = readuwevt(uwf4, v=2)
end
printstyled(" pick file with nonnumeric error info\n", color=:light_green)
redirect_stdout(out) do
W = readuwevt(uwf5, v=2)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 5616 | hexIDs = UInt16[0x0001, 0x0002, 0x0003]
@test findhex(0x0000, hexIDs) == -1
@test findhex(0x0001, hexIDs) == 1
@test findhex(0x0002, hexIDs) == 2
@test findhex(0x0003, hexIDs) == 3
# win32 with gaps
if has_restricted
cfile = path*"/SampleFiles/Restricted/03_02_27_20140927.euc.ch"
printstyled(" win32\n", color=:light_green)
@info(string(timestamp(), ": win32 tests use an SSL-encrypted tarball."))
# test that verbose logging works
printstyled(" verbose logging\n", color=:light_green)
redirect_stdout(out) do
fname = path*"/SampleFiles/Restricted/2014092709*.cnt"
cfile = path*"/SampleFiles/Restricted/03_02_27_20140927.euc.ch"
S = verified_read_data("win32", fname, cf=cfile, v=3, vl=true)
files = ls(fname)
S2 = SeisData()
read_data!(S2, "win32", files, cf=cfile, v=3, vl=true)
compare_SeisData(S, S2)
for i in 1:S.n
notes = S.notes[i]
@test length(notes) == 60
for n in notes
@test occursin("jst = true", n)
@test occursin("nx_new = 8640000", n)
@test occursin("nx_add = 360000", n)
@test occursin("v = 3", n)
end
end
# There should be 8 channels
@test S.n==8
# There should be exactly 360000 points per channel (all are 100 Hz)
nx = [length(S.x[i]) for i=1:1:S.n]
@test minimum(nx)==360000
@test maximum(nx)==360000
# Check against SAC files
testfiles = path*"/SampleFiles/Restricted/".*["20140927000000.V.ONTA.E.SAC",
"20140927000000.V.ONTA.H.SAC",
"20140927000000.V.ONTA.N.SAC",
"20140927000000.V.ONTA.U.SAC",
"20140927000000.V.ONTN.E.SAC",
"20140927000000.V.ONTN.H.SAC",
"20140927000000.V.ONTN.N.SAC",
"20140927000000.V.ONTN.U.SAC"]
# SAC files prepared in SAC with these commands from day-long Ontake files
# beginning at midnight Japan time converted to SAC with win32 precompiled
# utilities
#
# SAC commands that generate these from day-long SAC files:
# cut b 32400 35999.99
# r /home/josh/SAC/20140927000000.V.*.SAC"
# ch b 0 nzjday 270 nzhour 09
# w over
# q
U = SeisData()
for f in testfiles
T = verified_read_data("sac", f)[1]
push!(U, T)
end
# ID correspondence
j = Array{Int64,1}(undef, S.n)
for (n,i) in enumerate(S.id)
id = split(i, '.')
id[3] = ""
id[2] = "V"*id[2]
id[1] = "JP"
c = id[4][3:3]
if c == "Z"
id[4] = "U"
else
id[4] = c
end
id = join(id, '.')
j[n] = findid(id, U)
end
inds = Array{Array{Int64,1}}(undef, S.n)
for (n,i) in enumerate(j)
inds[n] = findall(abs.(S.x[n]-U.x[i]).>eps())
end
# The only time gaps should be what are in the logs:
# ┌ Warning: Time gap detected! (15.0 s at V.ONTA.H, beginning 2014-09-27T09:58:00)
# └ @ SeisIO ~/.julia/dev/SeisIO/src/Formats/Win32.jl:137
# ┌ Warning: Time gap detected! (15.0 s at V.ONTA.U, beginning 2014-09-27T09:58:00)
# └ @ SeisIO ~/.julia/dev/SeisIO/src/Formats/Win32.jl:137
# ┌ Warning: Time gap detected! (15.0 s at V.ONTA.N, beginning 2014-09-27T09:58:00)
# └ @ SeisIO ~/.julia/dev/SeisIO/src/Formats/Win32.jl:137
# ┌ Warning: Time gap detected! (15.0 s at V.ONTA.E, beginning 2014-09-27T09:58:00)
# └ @ SeisIO ~/.julia/dev/SeisIO/src/Formats/Win32.jl:137
for k = 1:S.n
i = j[k]
if !isempty(inds[i])
@test length(inds[i]) == 1500
@test div(first(inds[i]), 6000) == 58
@test div(last(inds[i]), 6000) == 58
r₀ = rem(first(inds[i]), 6000)
r₁ = rem(last(inds[i]), 6000)
@test round((r₁ - r₀)/100, digits=1) == 15.0
end
end
end
# Converting the mean to single-point precision gives exactly the same
# result as SAC conversion from win32; however, the average computed
# over an hour will be slightly different.
#
# This can be verified using the script "ontake_test.jl" in ../../internal_tests/
# Now test the other two bits types, 4-bit Int ...
printstyled(" Int4\n", color=:light_green)
fname = path*"/SampleFiles/Restricted/2014092700000302.cnt"
cfile = path*"/SampleFiles/Restricted/03_02_27_20140927*ch"
S = SeisData()
redirect_stdout(out) do
verified_read_data!(S, "win32", fname, cf=cfile, v=3)
end
i = findid("V.ONTA.23.EHH", S)
@test length(S.x[i]) == 60*S.fs[i]
@test maximum(S.x[i]) == 11075.0
@test minimum(S.x[i]) == -5026.0
# ...and 24-bit bigendian Int...
printstyled(" Int24\n", color=:light_green)
fname = path*"/SampleFiles/Restricted/2014092712000302.cnt"
verified_read_data!(S, "win32", fname, cf=cfile)
printstyled(" channel continuation\n", color=:light_green)
@test length(S.x[1]) == round(Int64, 120*S.fs[1]) == S.t[1][end,1]
ii = findlast(S.id.=="V.ONTA.23.EHH")
@test maximum(S.x[ii]) == 14896.0
@test minimum(S.x[ii]) == -12651.0
for id in unique(S.id)
@test length(findall(S.id.==id))==1
end
# ...and 32-bit bigendian Int ...
printstyled(" Int32\n", color=:light_green)
cfile = path*"/SampleFiles/Restricted/chandata_20140927.txt"
fname = path*"/SampleFiles/Restricted/2014092712370207VM.cnt"
verified_read_data!(S, "win32", fname, cf=cfile)
else
printstyled(" win32 data format not tested (files not found)\n", color=:red)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6932 | # save to disk/read from disk
savfile1 = "test.seis"
savfile2 = "test.hdr"
savfile3 = "test.evt"
legf_050 = path * "/SampleFiles/SEIS/2011.314.7D.J34A..HH1.seis"
legf_052 = path * "/SampleFiles/SEIS/seisio_testfile_v0.52.seis"
uw_file = path * "/SampleFiles/UW/02062915175o"
# Changing this test to guarantee at least one campaign-style measurement ... and test splat notation ... and something with no notes
printstyled("Native I/O\n", color=:light_green)
printstyled(" read/write of types\n", color=:light_green)
printstyled(" SeisData\n", color=:light_green)
S = breaking_seis()
wseis(savfile1, S)
@test all([any([occursin("wrote to file " * savfile1, n) for n in S.notes[i]]) for i in 1:S.n])
R = rseis(savfile1)[1]
@test(R==S)
S.notes[2][1] = string(String(Char.(0x00:0x7e)), String(Char.(0x80:0xff)))
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test(R!==S)
printstyled(" SeisHdr\n", color=:light_green)
H = randSeisHdr()
wseis(savfile2, H)
@test any([occursin("wrote to file " * savfile2, n) for n in H.notes])
H2 = rseis(savfile2)[1]
@test(H==H2)
printstyled(" SeisEvent\n", color=:light_green)
EV = SeisEvent(hdr=H, data=convert(EventTraceData, S))
EV.data.misc[1] = breaking_dict
wseis(savfile3, EV)
R = rseis(savfile3)[1]
@test R == EV
printstyled(" read/write of each type to same file\n", color=:light_green)
Ch = randSeisChannel(s=true)
Ch.x = rand(24)
Ch.t = vcat(Ch.t[1], [24 0])
wseis(savfile3, EV, S, H, Ch)
R = rseis(savfile3)
@test(R[1]==EV)
@test(R[2]==S)
@test(R[3]==H)
@test(R[4]==Ch)
@test(S.misc[1] == R[1].data.misc[1] == R[2].misc[1])
# with mmap
printstyled(" read with mmap\n", color=:light_green)
R1 = rseis(savfile3, memmap=true)
@test R == R1
# read one file with one record number
printstyled(" read file with integer record number\n", color=:light_green)
R = rseis("test.seis", c=1)
@test R[1] == S
# read everything
printstyled(" read a multi-record file\n", color=:light_green)
R = rseis("test*")
@test R[3] == R[5] # Header is read twice, test.evt (file 1) record 3, test.hdr (file 2) record 1
@test R[2] == R[6] # Seis is read twice, test.evt (file 1) record 2, test.seis (file 3) record 1
# read when some files have record 3 but not all
printstyled(" read file list with list of record numbers\n", color=:light_green)
R = rseis("test.*", c = [1,3])
@test(R[3]==R[2])
@test(R[1].data.misc[1]==R[4].misc[1])
# read nothing as each target file has one record
printstyled(" read nothing due to an intentionally poor choice of record numbers\n", color=:light_green)
R = rseis(["test.seis", "test.h*"], c=[2, 3])
@test isempty(R)
# read the first record of each file
printstyled(" read first record from each SeisIO file using a wildcard list\n", color=:light_green)
R = rseis("test*", c=1)
@test R[1] == EV
@test R[2] == H
@test R[3] == S
printstyled(" test that every custom Type can be written and read faithfully\n", color=:light_green)
redirect_stdout(out) do
A = Array{Any,1}(undef, 0)
for T in SeisIO.TNames
println("testing ", T)
if T == PhaseCat
push!(A, randPhaseCat())
elseif T == MultiStageResp
push!(A, MultiStageResp(6))
else
push!(A, getfield(SeisIO, Symbol(T))())
end
end
wseis(savfile1, A...)
R = rseis(savfile1, v=2)
for i = 1:length(R)
@test R[i] == A[i]
end
# add an incompatible type; should throw a warning, and another when read back in
push!(A, rand(Float64,3))
wseis(savfile1, A...)
R = rseis(savfile1, v=2)
end
printstyled(" test read/write with data compression\n", color=:light_green)
SeisIO.KW.comp = 0x02
S = randSeisData()
nx = 4
S.t[1] = [1 0; nx 0]
S.x[1] = randn(eltype(S.x[1]), nx)
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test R == S
SeisIO.KW.comp = 0x01
S = randSeisEvent()
C = SeisChannel()
nx = SeisIO.KW.n_zip*2
C.fs = 1.0
C.t = [1 0; nx 0]
C.x = randn(nx)
n = S.data.n
push!(S.data, C)
@test S.data.n == n+1
C = SeisChannel()
nx = 4
C.t = hcat(collect(1:4), Int64.(cumsum(rand(UInt32,4))))
C.x = randn(nx)
push!(S.data, C)
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test R == S
# read_data("seisio", ...)
S1 = read_data(savfile1)
@test convert(SeisData, S.data) == S1
S1 = verified_read_data("seisio", savfile1)
@test convert(SeisData, S.data) == S1
S2 = rseis(savfile3)
S1 = read_data(savfile3)
Tr = S2[1].data
@test Tr.n == S1.n
for i = 1:Tr.n
if any(isnan, Tr.x[i]) == false
@test Tr.x[i] == S1.x[i]
end
end
# verify wildcard functionality
S1 = read_data("test.se*")
@test convert(SeisData, S.data) == S1
# Type unit tests with read_data("seisio", ...)
C = randSeisChannel()
wseis(savfile1, C)
S1 = verified_read_data("seisio", savfile1)
@test S1[1] == C
C = convert(EventChannel, randSeisChannel())
wseis(savfile1, C)
S1 = verified_read_data("seisio", savfile1)
@test S1[1] == convert(SeisChannel, C)
S = randSeisData()
wseis(savfile1, S)
S1 = verified_read_data("seisio", savfile1)
@test S1 == S
S = convert(EventTraceData, S)
wseis(savfile1, S)
S1 = verified_read_data("seisio", savfile1)
@test S1 == convert(SeisData, S)
Ev = randSeisEvent()
L = GeoLoc(lat=45.560504, lon=-122.617020, el=51.816, az=180.0, inc=0.0)
wseis(savfile1, L, Ev)
S1 = verified_read_data("seisio", savfile1)
@test S1 == convert(SeisData, Ev.data)
rm(savfile1)
rm(savfile2)
rm(savfile3)
# Legacy file reading ========================================================
# 0.50 all custom types can use write(); rseis, wseis no longer required
# String arrays and :misc are written in a completely different way
# Type codes for :misc changed
# deprecated BigFloat/BigInt support in :misc
# :n is no longer stored as a UInt32
# :x compression no longer automatic and changed from Blosc to lz4
set_file_ver(legf_050, 0.4)
@test get_file_ver(legf_050) == 0.4f0
@test_throws ErrorException rseis(legf_050)
@test_throws ErrorException get_file_ver(uw_file)
set_file_ver(legf_050, 0.50)
S = rseis(legf_050)[1]
@test S.n == 3
@test S.id == ["7D.J34A..HH1", "7D.J34A..HH2", "7D.J34A..HHZ"]
# 0.52 2019-09-03 added Types: CoeffResp, MultiStageResp
# 0.51 2019-08-01 added :f0 to PZResp, PZResp64
# (legacy reader is the same for both)
S = rseis(legf_052)[1]
@test S.n == 3
@test S.id == ["IU.COR.00.BH1", "IU.COR.00.BH2", "IU.COR.00.BHZ"]
@test typeof(S.resp[1]) == MultiStageResp
@test typeof(S.resp[1].stage[1]) == PZResp64
@test isapprox(S.resp[1].stage[1].p, [ -0.0173949 + 0.01234im,
-0.0173949 - 0.01234im,
-0.0175489 + 0.0im ,
-0.0591783 + 0.0im ,
-39.18 + 49.12im ,
-39.18 - 49.12im ])
@test isapprox(S.resp[1].gain, [2660.0, 1.67772e6, 1.0])
@test S.resp[1].fac == [0,1,1]
@test S.resp[1].o[1] == "m/s"
@test S.resp[1].i[1] == "v" # note: this was wrong at the time in absolute sense. Should be "V" in UCUM.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 862 | printstyled("SeisIO.Nodal submodule\n", color=:light_green)
function pop_nodal_dict!(info:: Dict{String, Any}; n::Int64=12, kmax::Int64=32)
for i in 1:n
k = randstring(rand(1:kmax))
t = rand([0x00000001,
0x00000002,
0x00000003,
0x00000004,
0x00000005,
0x00000006,
0x00000007,
0x00000008,
0x00000009,
0x0000000a,
0x00000020])
T = get(SeisIO.Nodal.tdms_codes, t, UInt8)
v = T == Char ? randstring(rand(1:256)) : rand(T)
info[k] = v
end
return nothing
end
fstr = path*"/SampleFiles/Nodal/Node1_UTC_20200307_170738.006.tdms"
fref = path*"/SampleFiles/Nodal/silixa_vals.dat"
io = open(fref, "r")
YY = Array{UInt8, 1}(undef, 39518815)
readbytes!(io, YY)
XX = reshape(decompress(Int16, YY), 60000, :)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 648 | # save to disk/read from disk
printstyled(" Native file I/O\n", color=:light_green)
savfile1 = "test.dat"
fs = 100.0
nc = 10
nx = 2^12
D = randSeisData(nc, nx=nx, s=1.0)
t = mk_t(nx, D.t[1][1,2])
for i in 1:D.n
D.fs[i] = fs
D.t[i] = copy(t)
end
S = convert(NodalData, D)
S.ox = (rand()-0.5) * 360.0
S.oy = (rand()-0.5) * 90.0
S.oz = rand()*1000.0
S.info["foo"] = "bar"
pop_nodal_dict!(S.info)
printstyled(" NodalChannel\n", color=:light_green)
C = getindex(S, 1)
wseis(savfile1, C)
R = rseis(savfile1)[1]
@test R == C
printstyled(" NodalData\n", color=:light_green)
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test R == S
rm(savfile1)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4595 | printstyled(" Nodal types\n", color=:light_green)
printstyled(" method extensions\n", color=:light_green)
fs = 100.0
j = 7
j1 = 3
j2 = 5
j3 = 6
j4 = 8
nc = 10
nx = 2^12
v1 = 3.0
v2 = 4.0
J = j1:j2
printstyled(" append!\n", color=:light_green)
U = read_nodal("silixa", fstr)
S = deepcopy(U)
S2 = S[j1:j2]
n = S.n
append!(S, S2)
@test S.n == n + S2.n
for f in SeisIO.Nodal.nodalfields
F1 = getfield(S, f)
F2 = getfield(S2, f)
for (i,j) in enumerate(n+1:n+S2.n)
@test F1[j] == F2[i]
end
end
@test S.data[:, n+1:n+S2.n] == S2.data
printstyled(" convert\n", color=:light_green)
printstyled(" NodalChannel ⟺ SeisChannel\n", color=:light_green)
C = randSeisChannel()
C1 = convert(NodalChannel, C)
C2 = convert(SeisChannel, C1)
@test C == C2
printstyled(" NodalChannel ⟺ EventChannel\n", color=:light_green)
C1 = EventChannel(C)
C2 = convert(NodalChannel, C1)
C3 = convert(SeisChannel, C2)
@test typeof(C1) == EventChannel
@test typeof(C2) == NodalChannel
@test C == C3
printstyled(" NodalData ⟺ SeisData\n", color=:light_green)
S = randSeisData(nc, nx=nx, s=1.0)
t = mk_t(nx, S.t[1][1,2])
for i in 1:S.n
S.fs[i] = fs
S.t[i] = copy(t)
end
D = convert(NodalData, S)
for f in SeisIO.Nodal.nodalfields
@test length(getfield(D, f)) == nc
end
@test size(D.data) == (nx, nc)
@test NodalData(S) == D
S = convert(SeisData, U)
for i in 1:S.n
@test isapprox(S.x[i], U.x[i])
end
@test SeisData(U) == S
printstyled(" NodalData ⟺ EventTraceData\n", color=:light_green)
Ev = convert(EventTraceData, S)
D = convert(NodalData, Ev)
@test typeof(Ev) == EventTraceData
for f in SeisIO.Nodal.nodalfields
@test length(getfield(D, f)) == S.n
end
@test size(D.data) == size(U.data)
@test NodalData(Ev) == D
@test EventTraceData(D) == Ev
printstyled(" deleteat!\n", color=:light_green)
S = deepcopy(U)
deleteat!(S, 3)
@test S.data[:, 1:2] == U.data[:, 1:2]
@test S.data[:, 3:S.n] == U.data[:, 4:U.n]
S = deepcopy(U)
deleteat!(S, J)
@test S.data[:, 1:2] == U.data[:, 1:2]
@test S.data[:, 3:S.n] == U.data[:, 6:U.n]
printstyled(" getindex\n", color=:light_green)
S = deepcopy(U)
C = S[j]
@test S.data[:,j] == C.x
for f in SeisIO.Nodal.nodalfields
F = getfield(S, f)
@test getindex(F, j) == getfield(C, f)
end
S2 = S[j1:j2]
for f in SeisIO.Nodal.nodalfields
F1 = getfield(S, f)
F2 = getfield(S2, f)
for (i,j) in enumerate(j1:j2)
@test F1[j] == F2[i]
end
end
@test S.data[:, j1:j2] == S2.data
printstyled(" isempty\n", color=:light_green)
S = NodalData()
@test isempty(S) == true
C = NodalChannel()
@test isempty(C) == true
L = NodalLoc()
@test isempty(L) == true
printstyled(" isequal\n", color=:light_green)
S = deepcopy(U)
@test S == U
S = NodalData()
T = NodalData()
@test S == T
C = NodalChannel()
D = NodalChannel()
@test C == D
L1 = NodalLoc()
L2 = NodalLoc()
@test L1 == L2
printstyled(" push!\n", color=:light_green)
U = read_nodal("silixa", fstr)
S = deepcopy(U)
n = S.n
C = S[S.n]
push!(S, C)
@test S.n == n + 1
for f in SeisIO.Nodal.nodalfields
F1 = getindex(getfield(S, f), n)
F2 = getfield(C, f)
@test F1 == F2
end
@test S.data[:, n+1] == S.data[:, n]
@test ===(S.data[:, n+1], S.data[:, n]) == false
D = convert(SeisChannel, C)
push!(S, D)
printstyled(" setindex!\n", color=:light_green)
S = deepcopy(U)
S2 = getindex(S, j1:j2)
setindex!(S, S2, j3:j4)
@test S.data[:,j1:j2] == S.data[:, j3:j4]
C = S[j2]
setindex!(S, C, j1)
@test S.data[:, j1] == S.data[:, j2]
printstyled(" show\n", color=:light_green)
redirect_stdout(out) do
for i = 1:10
for T in (NodalLoc, NodalChannel, NodalData)
repr(T(), context=:compact=>true)
repr(T(), context=:compact=>false)
show(T())
end
end
end
printstyled(" sizeof\n", color=:light_green)
S = deepcopy(U)
# Add some notes
for j in 1:3
j_min = rand(1:3)
j_max = rand(j_min:S.n)
note!(S, j_min:j_max, randstring(rand(1:100)))
end
# Add some things to :misc
for i in 1:4
j_min = rand(1:3)
j_max = rand(j_min:S.n)
# add 1-6 entries each to the :misc field of each channel
for j in j_min:j_max
D = S.misc[j]
pop_nodal_dict!(D, n=rand(1:6))
end
end
@test sizeof(S) > 168
C = S[1]
note!(C, randstring(rand(1:100)))
note!(C, randstring(rand(1:100)))
pop_nodal_dict!(C.misc, n=rand(3:18))
@test sizeof(C) > 120
printstyled(" sort!\n", color=:light_green)
S = deepcopy(U)
id1 = S.id[1]
id2 = S.id[2]
S.id[1] = id2
S.id[2] = id1
sort!(S)
@test S.data[:,1] == U.data[:,2]
@test S.data[:,2] == U.data[:,1]
S.x[1][1] = v1
S.x[2][1] = v2
@test S.data[1,1] == v1
@test S.data[1,2] == v2
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 820 | printstyled(" processing of NodalData\n", color=:light_green)
fstr = path*"/SampleFiles/Nodal/Node1_UTC_20200307_170738.006.tdms"
S1 = read_nodal("silixa", fstr)
# these should all work
for f in (:convert_seis!, :demean!, :detrend!, :sync!, :taper!, :ungap!, :unscale!)
printstyled(string(" ", f, "\n"), color=:light_green)
getfield(SeisIO, f)(S1)
end
@test_throws ErrorException merge!(S1)
@test Nodal.merge_ext!(S, 1, collect(2:S1.n)) == nothing
# test resampling
printstyled(string(" ", "resample!", "\n"), color=:light_green)
f0 = 500.
S2 = resample(S1,f0)
resample!(S1,f0)
@test S2.data == S1.data
# test filtering
printstyled(string(" ", "filtfilt!", "\n"), color=:light_green)
S2 = filtfilt(S1,rt="Bandpass",fl=100.,fh=200.)
filtfilt!(S1,rt="Bandpass",fl=100.,fh=200.)
@test S2.data == S1.data
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3860 | printstyled(" read_nodal\n", color=:light_green)
segy_nodal = string(path, "/SampleFiles/SEGY/FORGE_78-32_iDASv3-P11_UTC190428135038.sgy")
printstyled(" Nodal SEG Y\n", color=:light_green)
S = read_nodal("segy", segy_nodal)
@test S.n == 33
@test S.id[1] == "N0.00001..OG0"
@test S.id[2] == "N0.00002..OG0"
@test S.id[S.n] == "N0.00033..OG0"
@test isapprox(S.fs[1], 2000.0)
@test isapprox(S.fs[2], 2000.0)
@test isapprox(S.fs[S.n], 2000.0)
@test S.name[1] == "43249_1"
@test S.gain[1] != NaN
@test S.units[1] == "m/m"
@test S.t[1][1,2] == 1556458718000000
printstyled(" channel unit tests\n", color=:light_green)
redirect_stdout(out) do
S = read_nodal("segy", segy_nodal, chans=2:10, v=3)
end
printstyled(" defaults\n", color=:light_green)
S1 = read_nodal("segy", segy_nodal)
printstyled(" start channel only\n", color=:light_green)
S = read_nodal("segy", segy_nodal, chans=2:33)
@test size(S.data) == (30000, 32)
@test S1.data[:, 2:end] == S.data
printstyled(" end channel only\n", color=:light_green)
S = read_nodal("segy", segy_nodal, chans=1:32)
@test size(S.data) == (30000, 32)
@test S1.data[:, 1:end-1] == S.data
printstyled(" only one channel\n", color=:light_green)
j = 31
S = read_nodal("segy", segy_nodal, chans=j)
@test size(S.data) == (30000, 1)
@test S1.data[:, j:j] == S.data
@test S1.name[j] == S.name[1]
printstyled(" start & end channel\n", color=:light_green)
S = read_nodal("segy", segy_nodal, chans=collect(11:30))
@test size(S.data) == (30000, 20)
@test S1.data[:, 11:30] == S.data
printstyled(" Silixa TDMS\n", color=:light_green)
# Base read, no modifiers
S1 = read_nodal("silixa", fstr)
@test isapprox(S1.data, XX)
@test_throws ErrorException read_nodal("foo", fstr)
# Start and end time unit tests
printstyled(" time & channel unit tests\n", color=:light_green)
redirect_stdout(out) do
S = read_nodal("silixa", fstr, s=2.0, t=40.0, chans=2:10, v=3)
end
printstyled(" defaults\n", color=:light_green)
S1 = read_nodal("silixa", fstr)
printstyled(" start time only\n", color=:light_green)
S = read_nodal("silixa", fstr, s=1.0)
@test size(S.data) == (59000, 448)
@test S1.data[1001:end, :] == S.data
printstyled(" start index in chunk > 1\n", color=:light_green)
S = read_nodal("silixa", fstr, s=2.0)
@test size(S.data) == (58000, 448)
@test S1.data[2001:end, :] == S.data
printstyled(" end time only\n", color=:light_green)
S = read_nodal("silixa", fstr, t=59.0)
@test size(S.data) == (59000, 448)
@test S1.data[1:59000, :] == S.data
printstyled(" end index in chunk < last\n", color=:light_green)
S = read_nodal("silixa", fstr, t=59.0)
@test size(S.data) == (59000, 448)
@test S1.data[1:59000, :] == S.data
printstyled(" start & end time\n", color=:light_green)
S = read_nodal("silixa", fstr, s=1.0, t=59.0)
@test size(S.data) == (58000, 448)
@test S1.data[1001:59000, :] == S.data
printstyled(" start channel only\n", color=:light_green)
S = read_nodal("silixa", fstr, chans=2:448)
@test size(S.data) == (60000, 447)
@test S1.data[:, 2:end] == S.data
printstyled(" end channel only\n", color=:light_green)
S = read_nodal("silixa", fstr, chans=1:447)
@test size(S.data) == (60000, 447)
@test S1.data[:, 1:end-1] == S.data
printstyled(" only one channel\n", color=:light_green)
S = read_nodal("silixa", fstr, chans=23)
@test size(S.data) == (60000, 1)
@test S1.data[:, 23:23] == S.data
@test S1.name[23] == S.name[1]
printstyled(" start & end channel\n", color=:light_green)
S = read_nodal("silixa", fstr, chans=collect(101:200))
@test size(S.data) == (60000, 100)
@test S1.data[:, 101:200] == S.data
printstyled(" all four\n", color=:light_green)
S = read_nodal("silixa", fstr, s=2.0, t=40.0, chans=2:10)
@test size(S.data) == (38000, 9)
@test S1.data[2001:40000, 2:10] == S.data
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 140 | printstyled(" Utils\n", color=:light_green)
printstyled(" info_dump\n", color=:light_green)
redirect_stdout(out) do
info_dump(U)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3007 | printstyled(" seismogram differentiation/integration\n", color=:light_green)
convert_file = path*"/SampleFiles/SEIS/2019_M7.1_Ridgecrest.seis"
S = randSeisData(3, s=1.0)
S.units = ["m/s2", "m", "m/s"]
U = deepcopy(S)
S1 = deepcopy(S)
C = deepcopy(U[1])
printstyled(" conversion to m/s\n", color=:light_green)
redirect_stdout(out) do
convert_seis!(S, v=3)
@test S.units[1] == S.units[2] == S.units[3] == "m/s"
@test S.x[3] == U.x[3]
end
printstyled(" conversion to m/s2\n", color=:light_green)
redirect_stdout(out) do
convert_seis!(S, v=2, units_out="m/s2")
@test S.units[1] == S.units[2] == S.units[3] == "m/s2"
@test isapprox(S.x[1], U.x[1])
end
printstyled(" conversion to m\n", color=:light_green)
redirect_stdout(out) do
convert_seis!(S1, v=2, units_out="m")
@test S1.units[1] == S1.units[2] == S1.units[3] == "m"
@test isapprox(S1.x[2], U.x[2])
end
printstyled(" conversion from m to m/s2 and back to m\n", color=:light_green)
redirect_stdout(out) do
for j = 1:100
S = randSeisData(3, s=1.0)
S.units = ["m", "m", "m"]
for i = 1:3
Nx = min(length(S.x[i]), 20000)
S.t[i] = [1 S.t[i][1,2]; Nx 0]
S.x[i] = rand(Float64, Nx)
end
U = deepcopy(S)
convert_seis!(S, units_out="m/s2")
@test S.units[1] == S.units[2] == S.units[3] == "m/s2"
convert_seis!(S, units_out="m")
@test S.units[1] == S.units[2] == S.units[3] == "m"
for i = 1:3
@test isapprox(S.x[i], U.x[i])
end
end
end
printstyled(" Float32 precision\n", color=:light_green)
redirect_stdout(out) do
S = rseis(convert_file)[1]
U = deepcopy(S)
convert_seis!(S, units_out="m/s2", v=2)
convert_seis!(S)
for i = 1:16
@test isapprox(S.x[i], U.x[i])
end
end
printstyled(" Float64 precision\n", color=:light_green)
redirect_stdout(out) do
S = rseis(convert_file)[1]
for i = 1:S.n
S.x[i] = Float64.(S.x[i])
end
detrend!(S)
U = deepcopy(S)
convert_seis!(S, units_out="m/s2")
for i = 17:19
@test S.x[i] == U.x[i]
end
convert_seis!(S)
for i = 1:16
@test isapprox(S.x[i], U.x[i])
@test isapprox(S.t[i], U.t[i])
end
convert_seis!(S, units_out="m")
convert_seis!(S, units_out="m/s")
convert_seis!(S, units_out="m")
convert_seis!(S, units_out="m/s2")
T = convert_seis(S, units_out="m")
end
printstyled(" extension to SeisChannel\n", color=:light_green)
redirect_stdout(out) do
convert_seis!(C, units_out="m", v=1)
convert_seis!(C, units_out="m/s", v=1)
convert_seis!(C, units_out="m/s2", v=1)
convert_seis!(C, units_out="m/s", v=1)
convert_seis!(C, units_out="m", v=1)
D = convert_seis(C, units_out="m/s2", v=1)
end
C.units = "{unknown}"
@test_throws ErrorException convert_seis!(C, units_out="m")
@test_throws ErrorException convert_seis!(SeisData(C), units_out="foo")
printstyled(" logging\n", color=:light_green)
redirect_stdout(out) do
show_processing(S)
show_src(S)
show_processing(S,1)
show_src(S,1)
show_processing(S[1])
show_src(S[1])
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2156 | printstyled(" demean, detrend, unscale\n", color=:light_green)
S = randSeisData(2, s=1.0)[2:2]
fs = 100.0
nx = 14400
t = floor(Int64, time()-60.0)*sμ
S.fs[1] = 100.0
S.t[1] = [1 t; nx 0]
x = (rand(nx) .- 0.5)
broadcast!(-, x, x, mean(x))
S.x[1] = x
S₀ = deepcopy(S)
# Test 1: de-mean
μ = 6.0
S.x[1] .+= μ
T = demean(S)
@test maximum(S.x[1] .- T.x[1]) ≤ 6.01
@test minimum(S.x[1] .- T.x[1]) ≥ 5.99
@test maximum(abs.(T.x[1]-S₀.x[1])) < 0.01
@test abs(mean(T.x[1])) < 0.01
# Test 2: de-trend
m = 0.01
demean!(S)
S.x[1] += (m.*(1.0:1.0:Float64(nx)))
U = detrend(S)
@test maximum(abs.(U.x[1]-S₀.x[1])) < 0.1
@test maximum(abs.(U.x[1]-T.x[1])) < 0.1
@test abs(mean(U.x[1])) < 1.0e-8
# de-mean and de-trend with NaNs
nx = length(S.x[1])
for i = 1:3
j = randperm(nx)[1:rand(1:div(nx,2))]
V = deepcopy(S)
V.x[1][j] .= NaN
demean!(V)
@test length(j) == length(findall(isnan.(V.x[1])))
k = randperm(nx)[1:rand(1:div(nx,2))]
W = deepcopy(S)
W.x[1][k] .= NaN
detrend!(W)
@test length(k) == length(findall(isnan.(W.x[1])))
C = randSeisChannel()
j = randperm(length(C.x))[1:rand(1:div(length(C.x),2))]
C.x[j] .= NaN
D = deepcopy(C)
demean!(C)
detrend!(D)
@test length(j) == length(findall(isnan.(C.x))) == length(findall(isnan.(D.x)))
end
# Safe demean, detrend
C = randSeisChannel()
D = detrend(C)
S = randSeisData()
T = detrend(S)
# V = randSeisEvent()
# W = detrend(V)
C = randSeisChannel()
D = demean(C)
S = randSeisData()
T = demean(S)
# V = randSeisEvent()
# W = demean(V)
demean!(C)
detrend!(C)
# demean!(V)
# detrend!(V)
# Test in-place unscaling
C = randSeisChannel(s=true)
S = randSeisData()
# V = randSeisEvent()
unscale!(C)
unscale!(S, irr=true)
# unscale!(V, irr=true)
# Test for out-of-place unscaling
C = randSeisChannel()
S = randSeisData()
# V = randSeisEvent()
D = unscale(C)
T = unscale(S)
# W = unscale(V)
# tests for channel ranges
S = randSeisData(10, s=1.0)
U = deepcopy(S)
demean!(S, chans=1:5)
for i = 1:S.n
@test (S[i] == U[i]) == (i < 6 ? false : true)
end
S = randSeisData(10, s=1.0)
U = deepcopy(S)
detrend!(S, chans=1:5)
for i = 1:S.n
@test (S[i] == U[i]) == (i < 6 ? false : true)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1367 | printstyled(" env!\n", color=:light_green)
# GphysChannel, gaps
printstyled(" on SeisChannel\n", color=:light_green)
C = randSeisChannel(s=true)
C.t = [ 1 0;
2 500;
1959 69420;
90250 0]
C.x = randn(C.t[end,1])
env!(C)
# GphysChannel, no gaps
C = randSeisChannel(s=true)
ungap!(C)
D = env(C)
# GphysData
printstyled(" on SeisData\n", color=:light_green)
S = randSeisData(24)
ungap!(S)
U = deepcopy(S)
env!(S, v=2)
T = env(U, v=2)
printstyled(" testing that env! == DSP.hilbert\n", color=:light_green)
for i = 1:S.n
if S.fs[i] > 0.0 && length(S.x[i]) > 256
ex1 = S.x[i]
ex2 = abs.(DSP.hilbert(U.x[i]))
@test isapprox(ex1, ex2)
@test isapprox(S.x[i], T.x[i])
end
end
for k = 1:4
S = randSeisData(24, s=1.0, fs_min=20.0)
for i = 1:S.n
nx = 2^16
t = [1 0; 512 134235131; 2^14 100000; 2^15 12345678; nx 0]
S.t[i] = t
S.x[i] = randn(eltype(S.x[i]), nx)
end
U = deepcopy(S)
env!(S)
for i = 1:S.n
j = rand(1:3)
si = S.t[i][j,1]
ei = S.t[i][j+1,1]-1
ex1 = S.x[i][si:ei]
ex2 = abs.(DSP.hilbert(U.x[i][si:ei]))
@test isapprox(ex1, ex2)
end
end
printstyled(" testing on very short data segments\n", color=:light_green)
S = randSeisData(5)
N = 2 .^ collect(1:1:5)
for i = 1:S.n
nx = N[i]
S.fs[i] = 100.0
S.t[i] = [1 0; nx 0]
S.x[i] = rand(nx)
end
env!(S)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4390 | fs = 100.0
nx = 10000
T = Float32
printstyled(" filtfilt!\n", color=:light_green)
Δ = round(Int64, sμ/fs)
# Methods
C = randSeisChannel(s=true)
C.fs = fs
C.t = [1 100; div(nx,4) Δ; div(nx,2) 5*Δ+3; nx 0]
C.x = randn(T, nx)
D = filtfilt(C)
filtfilt!(C)
@test C == D
# test for channel ranges
S = randSeisData(24, s=1.0, fs_min=40.0)
for i in (1, S.n)
S.fs[i] = fs
S.t[i] = [1 0; nx 0]
S.x[i] = randn(Float32, nx)
end
U = deepcopy(S)
filtfilt!(S)
S1 = filtfilt(U)
klist = Int64[]
for i = 1:S.n
if any(isnan.(S.x[i])) || any(isnan.(S1.x[i]))
push!(klist, i)
end
end
deleteat!(S, klist)
deleteat!(S1, klist)
if S.n > 0
@test S == S1
else
warn("All channels deleted; can't test equality.")
end
printstyled(" source logging\n", color=:light_green)
redirect_stdout(out) do
show_processing(S, 1)
show_processing(S[1])
show_processing(S)
end
printstyled(" equivalence with DSP.filtfilt\n", color=:light_green)
for i = 1:10
C = randSeisChannel(s=true)
C.fs = fs
C.t = [1 100; nx 0]
C.x = randn(Float64, nx)
D = deepcopy(C)
filtfilt!(C)
naive_filt!(D)
@test isapprox(C.x, D.x)
end
printstyled(" former breaking cases\n", color=:light_green)
printstyled(" very short data windows\n", color=:light_green)
n_short = 5
C = randSeisChannel()
C.fs = fs
C.t = [1 0; n_short 0]
C.x = randn(Float32, n_short)
filtfilt!(C)
S = randSeisData(24, s=1.0, fs_min=40.0)
S.fs[S.n] = fs
S.t[S.n] = [1 0; n_short 0]
S.x[S.n] = randn(Float32, n_short)
filtfilt!(S)
printstyled(" repeated segment lengths\n", color=:light_green)
n_rep = 2048
S = randSeisData(24, s=1.0, fs_min=40.0)
for i in (1, S.n)
S.fs[i] = fs
S.t[i] = [1 0; n_rep 0]
S.x[i] = randn(Float32, n_rep)
end
filtfilt!(S)
GC.gc()
# test for channel ranges
S = randSeisData(24, s=1.0, fs_min=40.0)
for i in (1, S.n)
S.fs[i] = fs
S.t[i] = [1 0; n_rep 0]
S.x[i] = randn(Float32, n_rep)
end
U = deepcopy(S)
filtfilt!(S, chans=1:3)
for i = 1:S.n
if i < 4
@test S[i] != U[i]
else
@test S[i] == U[i]
end
end
GC.gc()
printstyled(" checking that all filters work\n", color=:light_green)
for dm in String["Butterworth", "Chebyshev1", "Chebyshev2", "Elliptic"]
for rt in String["Bandpass", "Bandstop", "Lowpass", "Highpass"]
S = randSeisData(3, s=1.0, fs_min=40.0)
filtfilt!(S, rt=rt, dm=dm)
end
end
printstyled(" test all filters on SeisData\n\n", color = :light_green)
@printf("%12s | %10s | time (ms) | filt (MB) | data (MB) | ratio\n", "Name (dm=)", "Type (rt=)")
@printf("%12s | %10s | --------- | --------- | --------- | -----\n", " -----------", "---------")
for dm in String["Butterworth", "Chebyshev1", "Chebyshev2", "Elliptic"]
for rt in String["Bandpass", "Bandstop", "Lowpass", "Highpass"]
S = randSeisData(8, s=1.0, fs_min=40.0)
(xx, t, b, xx, xx) = @timed filtfilt!(S, rt=rt, dm=dm)
s = sum([sizeof(S.x[i]) for i = 1:S.n])
r = b/s
@printf("%12s | %10s | %9.2f | %9.2f | %9.2f | ", dm, rt, t*1000, b/1024^2, s/1024^2)
printstyled(@sprintf("%0.2f\n", r), color=printcol(r))
GC.gc()
end
end
printstyled(string("\n test all filters on a long, gapless ", T, " SeisChannel\n\n"), color = :light_green)
nx = 3456000
@printf("%12s | %10s | data | filtfilt! | naive_filtfilt! | ratio |\n", "", "")
@printf("%12s | %10s | sz (MB) | t (ms) | sz (MB) | t (ms) | sz (MB) | speed | size |\n", "Name (dm=)", "Type (rt=)")
@printf("%12s | %10s | ------- | ------ | ------- | ------ | ------- | ----- | ---- |\n", " -----------", "---------")
for dm in String["Butterworth", "Chebyshev1", "Chebyshev2", "Elliptic"]
for rt in String["Bandpass", "Bandstop", "Lowpass", "Highpass"]
C = randSeisChannel(s=true)
C.fs = fs
C.t = [1 100; nx 0]
C.x = randn(T, nx)
D = deepcopy(C)
# b = @allocated(filtfilt!(C, rt=rt, dm=dm))
(xx, tc, b, xx, xx) = @timed(filtfilt!(C, rt=rt, dm=dm))
(xx, td, n, xx, xx) = @timed(naive_filt!(D, rt=rt, dm=dm))
# n = @allocated(naive_filt!(D, rt=rt, dm=dm))
sz = sizeof(C.x)
p = b/sz
r = b/n
q = tc/td
@printf("%12s | %10s | %7.2f | %6.1f | %7.2f | %6.1f | %7.2f | ", dm, rt, sz/1024^2, tc*1000.0, b/1024^2, td*1000.0, n/1024^2)
printstyled(@sprintf("%5.2f", q), color=printcol(q))
@printf(" | ")
printstyled(@sprintf("%4.2f", r), color=printcol(r))
@printf(" | \n")
GC.gc()
end
end
println("")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 35348 | printstyled(stdout," merge! behavior and intent\n", color=:light_green)
nx = 100
fs = 100.0
Δ = round(Int64, 1000000/fs)
id = "VV.STA1.00.EHZ"
loc = GeoLoc(lat=46.8523, lon=121.7603, el=4392.0)
loc1 = GeoLoc(lat=rand(), lon=rand(), el=rand())
resp1 = fctoresp(0.2, 1.0)
resp2 = fctoresp(2.0, 1.0)
units = "m/s"
src = "test"
t0 = 0
mkC1() = SeisChannel( id = id, name = "Channel 1",
loc = loc, fs = fs, resp = deepcopy(resp1), units = units,
src = "test channel 1",
notes = [tnote("New channel 1.")],
misc = Dict{String,Any}( "P" => 6.1 ),
t = [1 t0; nx 0],
x = randn(nx) )
mkC2() = SeisChannel( id = id, name = "Channel 2",
loc = loc, fs = fs, resp = deepcopy(resp1), units = units,
src = "test channel 2",
notes = [tnote("New channel 2.")],
misc = Dict{String,Any}( "S" => 11.0 ),
t = [1 t0+nx*Δ; nx 0],
x = randn(nx) )
mkC4() = (C = mkC2(); C.loc = loc1;
C.name = "Channel 4";
C.src = "test channel 4";
C.notes = [tnote("New channel 4.")]; C)
mkC5() = (C = mkC2(); C.loc = loc1;
C.name = "Channel 5";
C.src = "test channel 5";
C.notes = [tnote("New channel 5.")];
C.t = [1 t0+nx*2Δ; nx 0];
C)
C2_ov() = (nov = 3; C = mkC2(); C.t = [1 t0+(nx-nov)*Δ; nx 0]; C)
C3_ov() = (nov = 3; C = mkC2(); C.t = [1 t0+2*(nx-nov)*Δ; nx 0]; C)
function prandSC(c::Bool)
if c == true
C = randSeisChannel(c=true, nx=1000)
else
C = randSeisChannel(c=false, nx=10000)
end
C.name = randstring(20)
C.misc = Dict{String,Any}()
return C
end
function mk_tcat(T::Array{Array{Int64,2},1}, fs::Float64)
L = length(T)
τ = Array{Array{Int64,1},1}(undef,L)
for i = 1:L
τ[i] = t_expand(T[i], fs)
end
tt = sort(unique(vcat(τ...)))
return t_collapse(tt, fs)
end
# ===========================================================================
printstyled(stdout," xtmerge!\n", color=:light_green)
δ = 20000
tmax = div(typemax(Int64),2)-2δ
x = randn(12)
t = sort(rand(0:2δ:tmax, 12))
while (length(unique(t)) < 12)
push!(t, rand(0:2δ:tmax))
t = sort(unique(t))
end
x = vcat(x, x[1:6])
t = vcat(t, t[1:6])
xtmerge!(t, x, div(δ,2))
@test length(t) == 12
@test length(x) == 12
# ===========================================================================
printstyled(stdout," removal of traces with no data or time info\n", color=:light_green)
S = SeisData(prandSC(false), prandSC(false), prandSC(false), prandSC(false))
S.x[2] = Float64[]
S.x[3] = Float64[]
S.t[4] = Array{Int64,2}(undef,0,0)
merge!(S, v=1)
basic_checks(S)
sizetest(S, 1)
printstyled(stdout," ability to handle irregularly-sampled data\n", color=:light_green)
C = prandSC(true)
namestrip!(C)
S = SeisData(C, prandSC(true), prandSC(true))
namestrip!(S)
for i = 2:3
S.id[i] = identity(S.id[1])
S.resp[i] = copy(S.resp[1])
S.loc[i] = deepcopy(S.loc[1])
S.units[i] = identity(S.units[1])
end
T = merge(S, v=1)
basic_checks(T)
sizetest(T, 1)
# ===========================================================================
printstyled(stdout," simple merges\n", color=:light_green)
printstyled(stdout," three channels, two w/same params, no overlapping data\n", color=:light_green)
S = SeisData(mkC1(), mkC2(), prandSC(false))
# Is the group merged correctly?
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test vcat(U.x[1], U.x[2])==S.x[i]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# Do the notes log the extra source?
@test findfirst([occursin("New channel 1", i) for i in S.notes[i]]) != nothing
@test findfirst([occursin("test channel 1", i) for i in S.notes[i]]) != nothing
# Are dictionaries merging correctly?
@test haskey(S.misc[i], "P")
@test haskey(S.misc[i], "S")
printstyled(stdout, " \"zipper\" merge I: two channels, staggered time windows, no overlap\n", color=:light_green)
S = SeisData(mkC1(), mkC2())
W = Array{Int64,2}(undef, 8, 2);
for i = 1:8
W[i,:] = t0 .+ [(i-1)*nx*Δ ((i-1)*nx + nx-1)*Δ]
end
w1 = W[[1,3,5,7], :]
w2 = W[[2,4,6,8], :]
S.t[1] = w_time(w1, Δ)
S.t[2] = w_time(w2, Δ)
S.x[1] = randn(4*nx)
S.x[2] = randn(4*nx)
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 1
@test S.t[1] == [1 t0; 8*nx 0]
# ===========================================================================
# (II) as (I) with another channel at a new location
printstyled(stdout," channels must have identical :fs, :loc, :resp, and :units to merge \n", color=:light_green)
S = SeisData(mkC1(), mkC2(), prandSC(false), mkC4())
U = deepcopy(S)
merge!(S)
@test S.n == 3
basic_checks(S)
# Are there two channels with the same ID?
@test length(findall(S.id.==id)) == 2
# Is the subgroup merged correctly?
i = findfirst(S.src.=="test channel 2")
@test S.loc[i] == loc
@test mk_tcat(U.t[1:2], fs) == S.t[i]
@test vcat(U.x[1], U.x[2]) == S.x[i]
# Is the odd channel left alone?
j = findfirst(S.src.=="test channel 4")
@test S.loc[j] == loc1
for i in datafields
if i != :notes
@test getfield(U, i)[4] == getfield(S, i)[j]
end
end
# with a second channel at the new location
S = SeisData(deepcopy(U), mkC5())
U = deepcopy(S)
merge!(S)
@test S.n == 3
basic_checks(S)
# Are there two channels with the same ID?
@test length(findall(S.id.==id)) == 2
# Is the first subgroup merged correctly?
i = findfirst(S.src.=="test channel 2")
@test S.loc[i] == loc
@test mk_tcat(U.t[1:2], fs) == S.t[i]
@test vcat(U.x[1], U.x[2]) == S.x[i]
# Is the second subgroup merged correctly?
j = findfirst(S.src.=="test channel 5")
@test S.loc[j] == loc1
@test mk_tcat(U.t[4:5], fs) == S.t[j]
@test vcat(U.x[4], U.x[5]) == S.x[j]
# with resp, not loc
S = deepcopy(U)
S.loc[4] = deepcopy(loc)
S.loc[5] = deepcopy(loc)
S.resp[4] = deepcopy(resp2)
S.resp[5] = deepcopy(resp2)
U = deepcopy(S)
merge!(S)
@test S.n == 3
basic_checks(S)
# Are there two channels with the same ID?
@test length(findall(S.id.==id)) == 2
# Is the first subgroup merged correctly?
i = findfirst(S.src.=="test channel 2")
@test S.loc[i] == loc
@test mk_tcat(U.t[1:2], fs) == S.t[i]
@test vcat(U.x[1], U.x[2]) == S.x[i]
# Is the second subgroup merged correctly?
j = findfirst(S.src.=="test channel 5")
@test S.resp[j] == resp2
@test mk_tcat(U.t[4:5], fs) == S.t[j]
@test vcat(U.x[4], U.x[5]) == S.x[j]
# ===========================================================================
printstyled(stdout," one merging channel with a time gap\n", color=:light_green)
S = SeisData(mkC1(), mkC2(), prandSC(false))
S.x[2] = rand(2*nx)
S.t[2] = vcat(S.t[2][1:1,:], [nx 2*Δ], [2*nx 0])
# Is the group merged correctly?
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test U.x[1] == S.x[i][1:nx]
@test U.x[2][1:nx] == S.x[i][nx+1:2nx]
@test U.x[2][nx+1:2nx] == S.x[i][2nx+1:3nx]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
printstyled(stdout," merge window is NOT the first\n", color=:light_green)
os = 2
S = SeisData(mkC1(), mkC2(), prandSC(false))
S.x[1] = rand(2*nx)
S.t[1] = vcat(S.t[1][1:1,:], [nx os*Δ], [2*nx 0])
S.t[2][1,2] += (os+nx)*Δ
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test U.x[1] == S.x[i][1:2nx]
@test U.x[2] == S.x[i][2nx+1:3nx]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# ===========================================================================
printstyled(stdout," one merge group has non-duplication time overlap\n", color=:light_green)
printstyled(stdout," check for averaging\n", color=:light_green)
nov = 3
S = SeisData(mkC1(), C2_ov(), prandSC(false))
# Is the group merged correctly?
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test S.x[i][1:nx-nov] == U.x[1][1:nx-nov]
@test S.x[i][nx-nov+1:nx] == 0.5*(U.x[1][nx-nov+1:nx] + U.x[2][1:nov])
@test S.x[i][nx+1:2nx-nov] == U.x[2][nov+1:nx]
@test S.t[i] == [1 U.t[1][1,2]; 2nx-nov 0]
# Do the notes log the extra source?
@test findfirst([occursin("New channel 1", i) for i in S.notes[i]]) != nothing
@test findfirst([occursin("test channel 1", i) for i in S.notes[i]]) != nothing
# Are dictionaries merging correctly?
@test haskey(S.misc[i], "P")
@test haskey(S.misc[i], "S")
printstyled(stdout," src overlap window is NOT first\n", color=:light_green)
os = 2
S = SeisData(mkC1(), C2_ov(), prandSC(false))
S.x[1] = rand(2*nx)
S.t[1] = vcat(S.t[1][1:1,:], [nx os*Δ], [2*nx 0])
S.t[2][1,2] += (os+nx)*Δ
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test S.x[i][1:2nx-nov] == U.x[1][1:2nx-nov]
@test S.x[i][2nx-nov+1:2nx] == 0.5*(U.x[1][2nx-nov+1:2nx] + U.x[2][1:nov])
@test S.x[i][2nx+1:3nx-nov] == U.x[2][nov+1:nx]
@test S.t[i] == vcat(U.t[1][1:2,:], [length(S.x[i]) 0])
printstyled(stdout," dest overlap window is NOT first\n", color=:light_green)
nov = 3
S = SeisData(mkC1(), C2_ov(), prandSC(false))
S.x[2] = rand(2*nx)
S.t[2] = [1 t0-nx*Δ; nx+1 Δ*(nx-nov); 2*nx 0]
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test S.x[i][1:nx] == U.x[2][1:nx]
@test S.x[i][nx+1:2nx-nov] == U.x[1][1:nx-nov]
@test S.x[i][2nx-nov+1:2nx] == 0.5*(U.x[1][nx-nov+1:nx] + U.x[2][nx+1:nx+nov])
@test S.x[i][2nx+1:3nx-nov] == U.x[2][nx+nov+1:2nx]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# ===========================================================================
printstyled(stdout," overlap with time mismatch\n", color=:light_green)
#= mk_tcat stops working here as merge shifts one window back in
time one sample to account for the intentional one-Δ time mismatch =#
printstyled(stdout," data overlap one sample off of S.t\n", color=:light_green)
# (a) 3_sample overlap with wrong time (C2[1:2] == C1[99:100])
nov = 2
S = SeisData(mkC1(), C2_ov())
S.x[2] = vcat(copy(S.x[1][nx-nov+1:nx]), rand(nx-nov))
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 1
i = findid(id, S)
@test length(S.x[i]) == 2nx-nov
@test S.x[i][1:nx-nov] == U.x[1][1:nx-nov]
@test S.x[i][nx-nov+1:nx] == U.x[1][nx-nov+1:nx] == U.x[2][1:nov]
@test S.x[i][nx+1:2nx-nov] == U.x[2][nov+1:nx]
@test S.t[i] == [1 U.t[1][1,2]-Δ; 2nx-nov 0]
printstyled(stdout," src overlap window is NOT first\n", color=:light_green)
S = deepcopy(U)
S.x[1] = rand(2*nx)
S.t[1] = vcat(S.t[1][1:1,:], [nx os*Δ], [2*nx 0])
S.x[2] = vcat(copy(S.x[1][2nx-nov+1:2nx]), rand(nx-nov))
S.t[2][1,2] += (os+nx)*Δ
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 1
i = findid(id, S)
@test S.x[i][1:2nx-nov] == U.x[1][1:2nx-nov]
@test S.x[i][2nx-nov+1:2nx] == U.x[1][2nx-nov+1:2nx] == U.x[2][1:nov]
@test S.x[i][2nx+1:3nx-nov] == U.x[2][nov+1:nx]
@test S.t[i] == [1 0; U.t[1][2,1] U.t[1][2,2]-Δ; 3nx-nov 0]
#= mk_tcat starts working again here as the time shift is now
applied to the second window, rather than the first. =#
printstyled(stdout," dest overlap window is NOT first\n", color=:light_green)
S = SeisData(mkC1(), C2_ov())
S.t[2] = [1 t0-nx*Δ; nx+1 Δ*(nx-nov); 2*nx 0]
S.x[2] = vcat(randn(nx), copy(S.x[1][nx-nov+1:nx]), randn(nx-nov))
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 1
i = findid(id, S)
@test S.x[i][1:nx] == U.x[2][1:nx]
@test S.x[i][nx+1:2nx-nov] == U.x[1][1:nx-nov]
@test S.x[i][2nx-nov+1:2nx] == U.x[1][nx-nov+1:nx] == U.x[2][nx+1:nx+nov]
@test S.x[i][2nx+1:3nx-nov] == U.x[2][nx+nov+1:2nx]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# ===========================================================================
printstyled(stdout," multichannel merge with overlap\n", color=:light_green)
breakpt_1 = nx-nov # 97
breakpt_2 = 2*(nx-nov)-1 # 195
breakpt_3 = 2*(nx-nov)+2 # 198
S = SeisData(mkC1(), C2_ov(), C3_ov(), prandSC(false))
S.x[2] = vcat(copy(S.x[1][nx-nov+1:nx]), rand(nx-nov))
U = deepcopy(S)
merge!(S)
basic_checks(S)
i = findid(id, S)
@test S.n == 2
@test S.x[i][1:breakpt_1] == U.x[1][1:nx-nov]
@test S.x[i][breakpt_1+1:nx] == U.x[1][nx-nov+1:nx] == U.x[2][1:nov]
@test S.x[i][nx+1:breakpt_2] == U.x[2][nov+1:nx-nov-1]
@test S.x[i][breakpt_2+1:breakpt_3] == 0.5*(U.x[2][nx-nov:nx] + U.x[3][1:nov+1])
@test S.x[i][breakpt_3+1:end] == U.x[3][nov+2:end]
@test S.t[i] == [1 U.t[1][1,2]-Δ; 3nx-2nov-1 0]
# What happens when there's a gap in one trace?
breakpt_4 = length(S.x[i])
S = deepcopy(U)
S.x[3] = rand(2*nx)
S.t[3] = vcat(S.t[3][1:1,:], [nx 2*Δ], [2*nx 0])
@test w_time(t_win(S.t[3], Δ), Δ) == S.t[3]
U = deepcopy(S)
merge!(S)
basic_checks(S)
i = findid(id, S)
@test S.t[i] == [1 t0-Δ; 295 2*Δ; 395 0]
@test S.x[i][1:breakpt_1] == U.x[1][1:nx-nov]
@test S.x[i][breakpt_1+1:nx] == U.x[1][nx-nov+1:nx] == U.x[2][1:nov]
@test S.x[i][nx+1:breakpt_2] == U.x[2][nov+1:nx-nov-1]
@test S.x[i][breakpt_2+1:breakpt_3] == 0.5*(U.x[2][nx-nov:nx] + U.x[3][1:nov+1])
@test S.x[i][breakpt_3+1:breakpt_4] == U.x[3][nov+2:nx]
@test S.x[i][breakpt_4+1:end] == U.x[3][nx+1:end]
# ===========================================================================
printstyled(stdout," \"zipper\" merges II\n", color=:light_green)
printstyled(stdout," two traces with staggered time windows, some with overlap\n", color=:light_green)
# (a) One overlap in a late window should not shift back previous windows
nov = 2
S = SeisData(mkC1(), mkC2())
W = Array{Int64,2}(undef, 8, 2);
for i = 1:8
W[i,:] = t0 .+ [(i-1)*nx*Δ ((i-1)*nx + nx-1)*Δ]
end
W[8,1] -= nov*Δ
W[8,2] -= nov*Δ
w1 = W[[1,3,5,7], :]
w2 = W[[2,4,6,8], :]
S.t[1] = w_time(w1, Δ)
S.t[2] = w_time(w2, Δ)
S.x[1] = randn(4*nx)
S.x[2] = randn(4*nx)
U = deepcopy(S)
merge!(S)
basic_checks(S)
i = findid(id, S)
@test S.n == 1
@test S.t[i] == [1 t0; 8*nx-nov 0]
# These should be untouched
for j = 1:6
si = 1 + nx*(j-1)
ei = nx*j
k = div(j,2)
if isodd(j)
usi = k*nx + 1
uei = usi + nx - 1
@test S.x[i][si:ei] == U.x[1][usi:uei]
else
usi = (k-1)*nx + 1
uei = usi + nx - 1
@test S.x[i][si:ei] == U.x[2][usi:uei]
end
end
# The only overlap should be here:
@test S.x[i][1+6nx:7nx-nov] == U.x[1][1+3nx:4nx-nov]
@test S.x[i][7nx-nov+1:7nx] == 0.5*(U.x[1][4nx-nov+1:4nx] .+ U.x[2][3nx+1:3nx+nov])
@test S.x[i][7nx+1:8nx-nov] == U.x[2][3nx+nov+1:4nx]
printstyled(stdout," one overlap, late window, time mismatch\n", color=:light_green)
nov = 3
true_nov = 2
S = SeisData(mkC1(), mkC2())
W = Array{Int64,2}(undef, 8, 2);
for i = 1:8
W[i,:] = t0 .+ [(i-1)*nx*Δ ((i-1)*nx + nx-1)*Δ]
end
W[8,1] -= nov*Δ
W[8,2] -= nov*Δ
w1 = W[[1,3,5,7], :]
w2 = W[[2,4,6,8], :]
S.t[1] = w_time(w1, Δ)
S.t[2] = w_time(w2, Δ)
S.x[1] = randn(4*nx)
S.x[2] = randn(4*nx)
S.x[2][3nx+1:3nx+2] = copy(S.x[1][4nx-1:4nx])
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 1
for j = 1:5
si = 1 + nx*(j-1)
ei = nx*j
k = div(j,2)
if isodd(j)
usi = k*nx + 1
uei = usi + nx - 1
@test S.x[i][si:ei] == U.x[1][usi:uei]
else
usi = (k-1)*nx + 1
uei = usi + nx - 1
@test S.x[i][si:ei] == U.x[2][usi:uei]
end
end
δi = nov - true_nov
# Because we moved the start time of 2:3 back by 1, we expect:
@test S.x[i][1+5nx:6nx-δi] == U.x[2][1+2nx:3nx-δi]
# Thus there's a one-point overlap (that gets resolved by averaging) at:
@test S.x[i][6nx-δi+1:6nx] == 0.5*(U.x[2][3nx-δi+1:3nx] + U.x[1][3nx+1:3nx+δi])
# Proceeding through the merged time series, we expect:
@test S.x[i][6nx+1:7nx-nov] == U.x[1][3nx+δi+1:4nx-true_nov]
@test S.x[i][7nx-nov+1:7nx-δi] == U.x[1][4nx-true_nov+1:4nx] == U.x[2][3nx+1:3nx+true_nov]
@test S.x[i][7nx-δi+1:end] == U.x[2][3nx+true_nov+1:end]
# ============================================================================
printstyled(stdout," distributivity: S1*S3 + S2*S3 == (S1+S2)*S3\n", color=:light_green)
imax = 10
printstyled(" trial ", color=:light_green)
for i = 1:imax
if i > 1
print("\b\b\b\b\b")
end
printstyled(string(lpad(i, 2), "/", imax), color=:light_green)
S1 = randSeisData()
S2 = randSeisData()
S3 = randSeisData()
# M1 = (S1+S2)*S3
# M2 = S1*S3 + S2*S3
@test ((S1+S2)*S3) == (S1*S3 + S2*S3)
if i == imax
println("")
end
end
# ============================================================================
printstyled(stdout," checking (formerly-breaking) end-member cases\n", color=:light_green)
printstyled(stdout," time windows not in chronological order\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
C3 = deepcopy(C1)
C3.t = [1 0; 101 -2000000; 200 0]
append!(C3.x, randn(100))
S = SeisData(C3, C2, prandSC(false))
# Is the group merged correctly?
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test S.t[i] == [1 -1000000; 300 0]
@test vcat(U.x[1][101:200], U.x[1][1:100], U.x[2]) == S.x[i]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# Do the notes log the extra source?
@test findfirst([occursin("New channel 1", j) for j in S.notes[i]]) != nothing
@test findfirst([occursin("test channel 1", j) for j in S.notes[i]]) != nothing
# Are dictionaries merging correctly?
@test haskey(S.misc[i], "P")
@test haskey(S.misc[i], "S")
printstyled(stdout," sequential one-sample windows\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
C2.t = [1 1200000; 99 90059; 100 90210]
C2.t = [1 1000000; 99 90059; 100 90210]
S = SeisData(C1, C2, prandSC(false))
# Is the group merged correctly?
U = deepcopy(S)
merge!(S)
basic_checks(S)
@test S.n == 2
i = findid(id, S)
@test S.t[i] == [1 0; 199 90059; 200 90210]
@test vcat(U.x[1], U.x[2])==S.x[i]
@test mk_tcat(U.t[1:2], fs) == S.t[i]
# Do the notes log the extra source?
@test findfirst([occursin("New channel 1", j) for j in S.notes[i]]) != nothing
@test findfirst([occursin("test channel 1", j) for j in S.notes[i]]) != nothing
# Are dictionaries merging correctly?
@test haskey(S.misc[i], "P")
@test haskey(S.misc[i], "S")
# ============================================================================
printstyled(stdout," merge! and new/extended methods\n", color=:light_green)
fs1 = 50.0
Δ = round(Int64, sμ/fs1)
# t1 = round(Int64,time()/μs)
t1 = 0
# s1 and s2 represent data from a fictitious channel
# s2 begins 1 second later, but has a gap of 1s after sample 25
s1 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1; 100 0], x=randn(100))
s2 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1+1000000; 26 1000000; 126 200000; 150 0], x=randn(150))
s3 = SeisChannel(fs = 100.0, gain = 5.0, name = "DEAD.STA.EHE", id = "DEAD.STA..EHE",
t = [1 t1; 100 0], x=rand(100).-0.5)
s4 = SeisChannel(fs = 100.0, gain = 50.0, name = "DEAD.STA.EHE", id = "DEAD.STA..EHE",
t = [1 t1+1000000; 100 1000000; 150 0], x=randn(150))
# We expect:
# (1) LAST 25-26 points in s1 will be averaged with first 25 points in s3
# (2) s3 will be ungapped with exactly 50 samples of junk
# (3) after sync of a seisobj formed from [s1+s3, s2], s2 will be padded with 0.5s
# at start and 3s at end
S = SeisData()
for i in fieldnames(typeof(S))
if i != :n
@test ≈(isempty([]), isempty(getfield(S,i)))
end
end
printstyled(" fixing data gaps\n", color=:light_green)
s2u = ungap(s2)
@test length(s2.x) / s2.fs + μs * sum(s2.t[2:end - 1, 2]) ≈ length(s2u.x) / s2u.fs
printstyled(" channel add and simple merges\n", color=:light_green)
S = S + (s1 * s2)
# Do in-place operations only change leftmost variable?
@test length(s1.x) ≈ 100
@test length(s2.x) ≈ 150
# expected behavior for S = s1+s2
# (0) length[s.x[1]] = 250
# (1) S.x[1][1:50] = s1.x[1:50]
# (2) S.x[1][51:75] = 0.5.*(s1.x[51:75] + s2.x[1:25])
# (3) S.x[1][76:100] = s1.x[76:100]
# (4) S.x[1][101:125] = mean(S.x[1])
# (5) S.x[1][126:250] = s2.x[126:250]
# Basic merge ops
@test S.n == 1
@test length(S.fs) == 1
@test ==(S.fs[1], s1.fs)
@test ==(length(S.gain), 1)
@test ==(S.gain[1], s1.gain)
@test ==(length(S.name), 1)
@test ==(length(S.t),1)
@test ==(length(S.x),1)
@test S.id[1]=="DEAD.STA..EHZ"
U = deepcopy(S)
ungap!(S, m=false, tap=false) # why do I have to force type here
@test ==(length(S.x[1]), 260)
@test ==(S.x[1][1:50], s1.x[1:50])
@test ==(S.x[1][51:75], 0.5.*(s1.x[51:75] .+ s2.x[1:25]))
@test ==(S.x[1][76:100], s1.x[76:100])
@test minimum(isnan.(S.x[1][101:125]))
@test ==(S.x[1][126:225], s2.x[26:125])
@test minimum(isnan.(S.x[1][226:235]))
@test ==(S.x[1][236:260], s2.x[126:150])
# Auto-tapering after a merge
T = deepcopy(S)
ii = findall(isnan.(S.x[1]))
nanfill!(S)
taper!(S, N_min = 0)
@test length(findall(isnan.(S.x[1])))==0 # No more NaNs?
@test sum(diff(S.x[1][ii]))==0 # All NaNs filled w/same val?
@test ≈(T.x[1][15:90], S.x[1][15:90]) # Un-windowed vals untouched?
printstyled(" channel delete\n", color=:light_green)
S -= 1
for i in fieldnames(typeof(S))
if i != :n
@test ≈(isempty([]), isempty(getfield(S,i)))
end
end
@test isempty(S)
printstyled(" a more difficult merge\n", color=:light_green)
S = SeisData()
S *= (s1 * s3)
S *= (s2 * s4)
@test ≈(S.n, 2)
@test ≈(length(S.fs), 2)
@test ≈(length(S.gain), 2)
@test ≈(length(S.name), 2)
@test ≈(length(S.t),2)
@test ≈(length(S.x),2)
i = findid("DEAD.STA..EHE", S)
j = findid("DEAD.STA..EHZ", S)
@test ≈(S.fs[i], 100.0)
@test ≈(S.gain[j], 10.0)
ungap!(S, m=false, tap=false)
@test ≈(length(S.x[j]), 260)
@test ≈(length(S.x[i]), 350)
@test ≈(S.x[i][101]/S.gain[i], s4.x[1]/s4.gain)
printstyled(" repeating data\n", color=:light_green)
loc1 = GeoLoc(lat=45.28967, lon=-121.79152, el=1541.0)
loc2 = GeoLoc(lat=48.78384, lon=-121.90093, el=1676.0)
s1 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1; 100 0], x=randn(100))
s2 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1+1000000; 150 0], x=vcat(s1.x[51:100], randn(100)))
C = (s1 * s2)[1]
@test length(C.x) == 200
@test C.x[1:100] == s1.x
@test C.x[101:200] == s2.x[51:150]
printstyled(" overlapping times\n", color=:light_green)
os = 50
nx = length(s1.x)
lx = length(s1.x)/s1.fs
τ = round(Int, sμ*(2.0-os/fs1))
s2 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1+τ; 150 0], x=randn(150))
C = (s1 * s2)[1]
@test length(C.x) == 250-os
@test C.x[1:nx-os] == s1.x[1:os]
@test C.x[nx-os+1:nx] == 0.5.*(s1.x[nx-os+1:nx]+s2.x[1:os])
@test C.x[nx+1:200] == s2.x[os+1:150]
printstyled(" two-sample offset\n", color=:light_green)
os = 2
nx = length(s1.x)
lx = length(s1.x)/s1.fs
τ = round(Int, sμ*(2.0-(os-1)/fs1))
s2 = SeisChannel(fs = fs1, gain = 10.0, name = "DEAD.STA.EHZ", id = "DEAD.STA..EHZ",
t = [1 t1+τ; 150 0], x=vcat(copy(s1.x[nx-os+1:nx]), randn(150-os)))
@test s1.x[nx] == s2.x[os]
C = (s1 * s2)[1]
@test length(C.x) == 250-os+1
@test C.x[1:99] == s1.x[1:99]
@test C.x[100] == 0.5*(s2.x[1] + s1.x[100])
@test C.x[101:249] == s2.x[2:150]
# Ensure merge works correctly with traces separated in time
printstyled(" operator \"*\"\n", color=:light_green)
s5 = SeisChannel(fs = 100.0, loc=loc1, gain = 32.0, name = "DEAD.STA.EHE", id = "DEAD.STA..EHE",
t = [1 t1; 100 0], x=randn(100))
s6 = SeisChannel(fs = 100.0, loc=loc1, gain = 16.0, name = "UNNAMED", id = "DEAD.STA..EHE",
t = [1 t1+30000000; 200 0], x=randn(200))
T = (s5 * s6)
ungap!(T)
@test ≈(length(T.x[1]),3200)
printstyled(" one repeating segment in two channels\n", color=:light_green)
fs1 = 50.0
Δ = round(Int64, sμ/fs1)
g1 = 10.0
ns = 2
n1 = "DEAD.STA..EHZ"
id = "DEAD.STA..EHZ"
t_in = [1 0; 300 0]
nx = round(Int64, ns*fs1)
seg_1 = randn(nx)
seg_2 = randn(nx)
seg_3 = randn(nx)
t = t_expand(t_in, fs1)
i1 = vcat(collect(1:100), collect(201:300))
i2 = collect(101:300)
t1 = t_collapse(t[i1], fs1)
t2 = t_collapse(t[i2], fs1)
s1 = SeisChannel(fs = fs1, gain = g1, name = n1, id = id, t = t1, x = vcat(seg_1, seg_3))
s2 = SeisChannel(fs = fs1, gain = g1, name = n1, id = id, t = t2, x = vcat(seg_2, seg_3))
C = (s1 * s2)[1]
@test length(C.x) == 300
@test C.x == vcat(seg_1, seg_2, seg_3)
@test C.t == t_in
printstyled(stdout," SeisData * SeisChannel ==> SeisData\n", color=:light_green)
(S,T) = mktestseis()
merge!(S,T)
C = prandSC(false)
D = prandSC(false)
U = merge(S,C)
U = sort(U)
V = merge(C,S)
V = sort(V)
@test U == V
U = merge(C,D)
(S,T) = mktestseis()
A = deepcopy(S[5])
B = deepcopy(T[4])
T*=S[1]
sizetest(T, 5)
printstyled(stdout," SeisData merge tests\n", color=:light_green)
printstyled(stdout," one common channel (fast, no gaps)\n", color=:light_green)
(S,T) = mktestseis()
t = t_expand(S.t[4], S.fs[4])
T.t[3][1,2] = t[end] + t[end]-t[end-1]
merge!(T, S[4])
printstyled(stdout," one common channel (slow, has gaps)\n", color=:light_green)
(S,T) = mktestseis()
A = deepcopy(S[5])
B = deepcopy(T[4])
T*=S[1]
sizetest(T, 5)
T = sort(T)
S*=T[2]
sizetest(S, 5)
i = findid(A, S)
@test ≈(S.t[i][2,1], 1+length(A.x))
@test ≈(S.t[i][2,2], (5-1/S.fs[i])*sμ)
printstyled(stdout," two independent channels ==> same as \"+\"\n", color=:light_green)
(S,T) = mktestseis()
U = S[1] * T[2]
sizetest(U, 2)
@test U == S[1]+T[2]
printstyled(stdout," two identical channel IDs\n", color=:light_green)
U = S[4] * T[3]
@test typeof(U)==SeisData
@test U.id[1]==S.id[4]
@test U.id[1]==T.id[3]
printstyled(stdout," pull\n", color=:light_green)
ux = Float64.(randperm(800))
C = pull(S,4)
C.x = ux[1:600]
@test C.name=="Longmire"
@test S.n==4
@test length(findall(S.name.=="Longmire"))==0
printstyled(stdout," notes still faithfully track names in modified objects\n", color=:light_green)
str1 = "ADGJALMGFLSFMGSLMFLSeptember Lobe sucks"
str2 = "HIGH SNR ON THIS CHANNEL"
note!(S,str2)
note!(S,str1)
@test (findall([maximum([occursin(str1, S.notes[i][j]) for j=1:length(S.notes[i])]) for i = 1:S.n]) == [4])
@test (length(findall([maximum([occursin(str2, S.notes[i][j]) for j = 1:length(S.notes[i])]) for i = 1:S.n]))==S.n)
# merge test: when we merge, does each field have exactly 7 entries?
printstyled(stdout," merge! after pull doesn't affect pulled channel\n", color=:light_green)
T.x[3] = ux[601:800]
merge!(S,T)
n_targ = 7
@test ≈(S.n, n_targ)
@test ≈(maximum([length(getfield(S,i)) for i in datafields]), n_targ)
@test ≈(minimum([length(getfield(S,i)) for i in datafields]), n_targ)
@test any(maximum([C.x.==i for i in S.x[1]])) == false
# Old: (Test Passed, 1.42978256, 154864097, 0.038663895, Base.GC_Diff(154864097, 109, 0, 1845386, 4165, 0, 38663895, 7, 0))
# New: (Test Passed, 1.263168574, 128490661, 0.108295874, Base.GC_Diff(128490661, 81, 0, 1324714, 3857, 0, 108295874, 6, 1))
printstyled(stdout," purge!\n", color=:light_green)
(S,T) = mktestseis()
S.t[5] = Array{Int64,2}(undef,0,2)
S.x[5] = Array{Float32,1}(undef,0)
U = purge(S)
purge!(S)
@test S == U
@test S.n == 4
printstyled(stdout," issue 29\n", color=:light_green)
S = read_data(path * "/SampleFiles/SEED/CIRIO__BHE___2017101.mseed")
merge!(S)
C2 = S[1]
ungap!(C2)
#= In Issue #29, ObsPy handled the merge without repeated data, so this is the
basis for comparison
=#
io = open(path * "/SampleFiles/SEED/obspy.dat", "r")
X2 = Array{Int32,1}(undef, length(C2.x))
read!(io, X2)
close(io)
X = map(Float32, X2)
@test isapprox(C2.x, X)
printstyled(stdout," merge_ext!\n", color=:light_green)
C = randSeisChannel(s=true)
S = randSeisData(3, s=1.0)
S.id = [C.id, C.id, C.id]
S.fs = [C.fs, C.fs, C.fs]
U = deepcopy(S)
Ω = 3
rest = [1,2]
@test SeisIO.merge_ext!(S, Ω, rest) == nothing
@test S==U
printstyled(stdout," merge! with GphysChannel objects\n", color=:light_green)
nx = 100
fs = 100.0
id = "VV.STA1.00.EHZ"
Δ = round(Int64, 1000000/fs)
# ===========================================================================
printstyled(stdout," simple merges\n", color=:light_green)
printstyled(stdout," two channels, same params, no overlap\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
# Is the group merged correctly?
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
@test C1.id == id
@test vcat(U1.x, U2.x)==C1.x
@test mk_tcat([U1.t, U2.t], fs) == C1.t
# Do the notes log the extra source?
@test any([occursin("New channel 1", i) for i in C1.notes])
@test any([occursin("test channel 1", i) for i in C1.notes])
# Are dictionaries merging correctly?
@test haskey(C1.misc, "P")
@test haskey(C1.misc, "S")
printstyled(stdout, " \"zipper\" merge\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
W = Array{Int64,2}(undef, 8, 2);
for i = 1:8
W[i,:] = t0 .+ [(i-1)*nx*Δ ((i-1)*nx + nx-1)*Δ]
end
w1 = W[[1,3,5,7], :]
w2 = W[[2,4,6,8], :]
C1.t = w_time(w1, Δ)
C2.t = w_time(w2, Δ)
C1.x = randn(4*nx)
C2.x = randn(4*nx)
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1))
@test C1.t == [1 t0; 8*nx 0]
# ===========================================================================
printstyled(stdout," two channels, one with a time gap\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
C2.x = rand(2*nx)
C2.t = vcat(C2.t[1:1,:], [nx 2*Δ], [2*nx 0])
# Is the group merged correctly?
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1))
@test U1.x == C1.x[1:nx]
@test U2.x[1:nx] == C1.x[nx+1:2nx]
@test U2.x[nx+1:2nx] == C1.x[2nx+1:3nx]
@test C2.x == U2.x
@test mk_tcat([U1.t, U2.t], fs) == C1.t
@test C2.t == U2.t
# ===========================================================================
printstyled(stdout," non-duplicate samples at overlapping times\n", color=:light_green)
printstyled(stdout," check for averaging\n", color=:light_green)
nov = 3
C1 = mkC1()
C2 = C2_ov()
U1 = deepcopy(C1)
U2 = deepcopy(C2)
# Is the group merged correctly?
merge!(C1, C2)
basic_checks(SeisData(C1))
@test C1.x[1:nx-nov] == U1.x[1:nx-nov]
@test C1.x[nx-nov+1:nx] == 0.5*(U1.x[nx-nov+1:nx] + U2.x[1:nov])
@test C1.x[nx+1:2nx-nov] == U2.x[nov+1:nx]
@test C1.t == [1 U1.t[1,2]; 2nx-nov 0]
# Do the notes log the extra source?
@test findfirst([occursin("New channel 1", i) for i in C1.notes]) != nothing
@test findfirst([occursin("test channel 1", i) for i in C1.notes]) != nothing
# Are dictionaries merging correctly?
@test haskey(C1.misc, "P")
@test haskey(C1.misc, "S")
printstyled(stdout," src overlap window is NOT first\n", color=:light_green)
os = 2
C1 = mkC1()
C2 = C2_ov()
U1 = deepcopy(C1)
U2 = deepcopy(C2)
C1.x = rand(2*nx)
C1.t = vcat(C1.t[1:1,:], [nx os*Δ], [2*nx 0])
C2.t[1,2] += (os+nx)*Δ
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1))
@test C1.x[1:2nx-nov] == U1.x[1:2nx-nov]
@test C1.x[2nx-nov+1:2nx] == 0.5*(U1.x[2nx-nov+1:2nx] + U2.x[1:nov])
@test C1.x[2nx+1:3nx-nov] == U2.x[nov+1:nx]
@test C1.t == vcat(U1.t[1:2,:], [length(C1.x) 0])
printstyled(stdout," dest overlap window is NOT first\n", color=:light_green)
nov = 3
C1 = mkC1()
C2 = C2_ov()
C2.x = rand(2*nx)
C2.t = [1 t0-nx*Δ; nx+1 Δ*(nx-nov); 2*nx 0]
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1))
@test C1.x[1:nx] == U2.x[1:nx]
@test C1.x[nx+1:2nx-nov] == U1.x[1:nx-nov]
@test C1.x[2nx-nov+1:2nx] == 0.5*(U1.x[nx-nov+1:nx] + U2.x[nx+1:nx+nov])
@test C1.x[2nx+1:3nx-nov] == U2.x[nx+nov+1:2nx]
@test mk_tcat([U1.t, U2.t], fs) == C1.t
# ===========================================================================
printstyled(stdout," overlap with time mismatch\n", color=:light_green)
#= mk_tcat stops working here as merge shifts one window back in
time one sample to account for the intentional one-Δ time mismatch =#
printstyled(stdout," one sample off\n", color=:light_green)
# (a) 3_sample overlap with wrong time (C2[1:2] == C1[99:100])
nov = 2
C1 = mkC1()
C2 = C2_ov()
C2.x = vcat(copy(C1.x[nx-nov+1:nx]), rand(nx-nov))
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1))
@test length(C1.x) == 2nx-nov
@test C1.x[1:nx-nov] == U1.x[1:nx-nov]
@test C1.x[nx-nov+1:nx] == U1.x[nx-nov+1:nx] == U2.x[1:nov]
@test C1.x[nx+1:2nx-nov] == U2.x[nov+1:nx]
@test C1.t == [1 U1.t[1,2]-Δ; 2nx-nov 0]
printstyled(stdout," src overlap window is NOT first\n", color=:light_green)
C1 = deepcopy(U1)
C2 = deepcopy(U2)
C1.x = rand(2*nx)
C1.t = vcat(C1.t[1:1,:], [nx os*Δ], [2*nx 0])
C2.x = vcat(copy(C1.x[2nx-nov+1:2nx]), rand(nx-nov))
C2.t[1,2] += (os+nx)*Δ
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1, C2))
@test C1.x[1:2nx-nov] == U1.x[1:2nx-nov]
@test C1.x[2nx-nov+1:2nx] == U1.x[2nx-nov+1:2nx] == U2.x[1:nov]
@test C1.x[2nx+1:3nx-nov] == U2.x[nov+1:nx]
@test C1.t == [1 0; U1.t[2,1] U1.t[2,2]-Δ; 3nx-nov 0]
#= mk_tcat starts working again here as the time shift is now
applied to the second window, rather than the first. =#
printstyled(stdout," dest overlap window is NOT first\n", color=:light_green)
C1 = mkC1()
C2 = C2_ov()
C2.t = [1 t0-nx*Δ; nx+1 Δ*(nx-nov); 2*nx 0]
C2.x = vcat(randn(nx), copy(C1.x[nx-nov+1:nx]), randn(nx-nov))
U1 = deepcopy(C1)
U2 = deepcopy(C2)
merge!(C1, C2)
basic_checks(SeisData(C1, C2))
@test C1.x[1:nx] == U2.x[1:nx]
@test C1.x[nx+1:2nx-nov] == U1.x[1:nx-nov]
@test C1.x[2nx-nov+1:2nx] == U1.x[nx-nov+1:nx] == U2.x[nx+1:nx+nov]
@test C1.x[2nx+1:3nx-nov] == U2.x[nx+nov+1:2nx]
@test mk_tcat([U1.t, U2.t], fs) == C1.t
# ===========================================================================
printstyled(stdout," ignore traces with no data or time info\n", color=:light_green)
C1 = mkC1()
C2 = mkC2()
U1 = deepcopy(C1)
C2.x = Float64[]
C2.t = Array{Int64,2}(undef,2,0)
merge!(C1, C2)
@test C1 == U1
printstyled(stdout," irregularly-sampled data\n", color=:light_green)
C1 = prandSC(true)
namestrip!(C1)
C2 = prandSC(true)
namestrip!(C2)
C2.id = identity(C1.id)
C2.gain = C1.gain
C2.resp = deepcopy(C1.resp)
C2.loc = deepcopy(C1.loc)
C2.units = identity(C1.units)
C3 = prandSC(true)
namestrip!(C3)
C3.id = identity(C1.id)
C3.resp = deepcopy(C1.resp)
C3.loc = deepcopy(C1.loc)
C3.units = identity(C1.units)
merge!(C1, C2)
merge!(C1, C3)
nt = size(C1.t, 1)
nx = length(C1.x)
@test nt > 1000
@test nx > 1000
@test nt == nx
printstyled(stdout," mseis!\n", color=:light_green)
(S,T) = mktestseis()
U = merge(S,T)
sizetest(U, 7)
V = SeisData(S,T)
merge!(V)
@test U == V
mseis!(S,T)
@test S == V
printstyled(stdout," with SeisIO.Quake\n", color=:light_green)
S = randSeisData()
Ev = randSeisEvent(2)
C = ungap(deepcopy(Ev.data[1]))
Ev.data[1] = deepcopy(C)
C.t[1,2] += 1000000
C.x = randn(eltype(C.x), length(C.x))
C.az = 0.0
C.baz = 0.0
C.dist = 0.0
Ev.data[2] = deepcopy(C)
mseis!(S, randSeisChannel(),
convert(EventChannel, randSeisChannel()),
rand(Float64, 23), # should warn
convert(EventTraceData, randSeisData()),
Ev,
randSeisEvent())
printstyled(stdout," target is a SeisChannel\n", color=:light_green)
(S, T) = mktestseis()
C = S[4]
C1 = deepcopy(C)
C2 = deepcopy(C)
mseis!(C, C, T)
mseis!(C2, T)
@test C == C2
printstyled(stdout," with SeisIO.Quake\n", color=:light_green)
C = convert(EventChannel, C1)
mseis!(C, T)
C2 = convert(EventChannel, C2)
mseis!(C, C2)
Ev = randSeisEvent(2)
C3 = ungap(deepcopy(Ev.data[1]))
for f in (:id, :fs, :gain, :loc, :resp, :units)
setfield!(C3, f, getfield(C, f))
end
Ev.data[1] = deepcopy(C3)
mseis!(C, Ev)
@test (C == C2) == false
printstyled(stdout," type conversion\n", color=:light_green)
(S, T) = mktestseis()
C = convert(EventChannel, S[4])
ϕ = 1.0*rand(0:179) + rand()
C.baz = ϕ
D = T[3]
merge!(C, D)
@test typeof(D) == SeisChannel
@test typeof(C) == EventChannel
@test C.baz == ϕ
C = convert(EventChannel, S[4])
C.baz = ϕ
D = T[3]
merge!(D, C)
@test typeof(C) == EventChannel
@test typeof(D) == SeisChannel
@test C.baz == ϕ
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1749 | printstyled(" nanfill, ungap\n", color=:light_green)
# Check that nanfill does not error
S = randSeisData()
Ev = SeisEvent(hdr=randSeisHdr(), data=convert(EventTraceData, deepcopy(S)))
C = deepcopy(S[1])
for i = 1:S.n
L = length(S.x[i])
inds = rand(1:L, div(L,2))
S.x[i][inds] .= NaN
end
nanfill!(S)
for i = 1:S.n
x = getindex(getfield(S, :x), i)
@test isempty(findall(isnan.(x)))
end
for i = 1:Ev.data.n
L = length(Ev.data.x[i])
inds = rand(1:L, div(L,2))
Ev.data.x[i][inds] .= NaN
end
nanfill!(Ev.data)
for i = 1:Ev.data.n
x = getindex(getfield(getfield(Ev, :data), :x), i)
@test isempty(findall(isnan.(x)))
end
L = length(C.x)
inds = rand(1:L, div(L,2))
C.x[inds] .= NaN
nanfill!(C)
@test isempty(findall(isnan.(C.x)))
# Test that traces of all NaNs becomes traces of all zeros
C = SeisChannel()
C.x = fill!(zeros(Float32, 128), NaN32)
nanfill!(C)
@test C.x == zeros(Float32, 128)
S = randSeisData()
U = deepcopy(S)
for i = 1:S.n
x = getindex(getfield(S, :x), i)
nx = lastindex(x)
T = eltype(x)
fill!(x, T(NaN))
u = getindex(getfield(U, :x), i)
fill!(u, zero(T))
end
nanfill!(S)
for i = 1:S.n
x = getindex(getfield(S, :x), i)
nx = lastindex(x)
T = eltype(x)
u = getindex(getfield(U, :x), i)
@test T == eltype(u)
@test S.x[i] == x == U.x[i] == u
end
# Test that ungap calls nanfill properly
Ev2 = ungap(Ev.data, tap=true)
ungap!(Ev.data, tap=true)
@test Ev.data == Ev2
ungap!(C, tap=true)
ungap!(S, tap=true)
# Ensure one segment is short enough to invoke bad behavior in ungap
Ev = randSeisEvent()
Ev.data.fs[1] = 100.0
Ev.data.x[1] = rand(1024)
Ev.data.t[1] = vcat(Ev.data.t[1][1:1,:], [5 2*ceil(S.fs[1])*sμ], [8 2*ceil(S.fs[1])*sμ], [1024 0])
redirect_stdout(out) do
ungap!(Ev.data)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2228 | printstyled(" resample!\n", color=:light_green)
function check_resamp()
S = randSeisData(fs_min=20.0)
sz = sizeof(S)
(xx, ta, aa, xx, xx) = @timed resample(S, fs=10.0)
return [aa/sz ta]
end
N = 20
check_resamp()
C = randSeisChannel()
nx = 8640000
C.t = [1 0; nx 0]
C.fs = 100.0
C.x = rand(Float32, nx)
resample!(C, 50.0)
C = randSeisChannel()
C.t = [ 1 1559599308051202;
29229 46380573;
194240 0]
C.x = randn(194240)
C.fs = 40.0
resample!(C, 10.0)
@test length(C.x) == 48560
# A controlled test with two groups of channels, 40.0 Hz and 20.0 Hz
printstyled(" downsample\n", color=:light_green)
S = randSeisData(4)
S.id = ["NN.STA1.00.EHZ", "NN.STA2.00.EHZ", "NN.STA3.00.EHZ", "NN.STA4.00.EHZ"]
n = 400
fs = 20.0
t20 = [1 1559599308051202; n 0]
t40 = [1 1559599308051202; 2n 0]
S.fs = [fs, 2fs, fs, 2fs]
S.t = [deepcopy(t20), deepcopy(t40), deepcopy(t20), deepcopy(t40)]
S.x = [randn(n), randn(2n), randn(n), randn(2n)]
U = deepcopy(S)
resample!(S, fs=fs)
for i = 1:S.n
@test length(S.x[i]) == n
@test S.fs[i] == fs
@test S.t[i] == t20
end
norm_sz = Array{Float64,2}(undef,N,2)
printstyled(" trial ", color=:light_green)
for i = 1:N
if i > 1
print("\b\b\b\b\b")
end
printstyled(string(lpad(i, 2), "/", N), color=:light_green)
norm_sz[i,:] = check_resamp()
end
println("")
println(stdout, " mean overhead (", N, " random trials): ",
@sprintf("%0.1f%%", 100.0*(mean(norm_sz[:,1])-1.0)), "; ",
"mean t = " , @sprintf("%0.2f", mean(norm_sz[:,2])))
# Now with upsampling
printstyled(" upsample (Issue #50)\n", color=:light_green)
S = deepcopy(U)
resample!(S, fs=2fs)
for i = 1:S.n
L = length(S.x[i])
@test L in (2n, 2n-1)
@test S.fs[i] == 2fs
@test S.t[i] == (L == 2n ? t40 : [1 1559599308051202; 2n-1 0])
end
printstyled(" test on long, gapless Float32 SeisChannel\n", color=:light_green)
S = randSeisChannel()
nx = 8640000
S.t = [1 0; nx 0]
S.fs = 100.0
S.x = rand(Float32, nx)
U = SeisData(deepcopy(S))
C = resample(S, 50.0)
sz = sizeof(S)
(xx, ta, aa, xx, xx) = @timed resample!(S, 50.0)
println(stdout, " resample!: overhead = ",
@sprintf("%0.1f%%", 100.0*(aa/sz - 1.0)), ", t = ",
@sprintf("%0.2f", ta), " s")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1843 | printstyled(" rescale\n", color=:light_green)
g = 32.0
g1 = 7.0
g2 = 11.0
segy_nodal = string(path, "/SampleFiles/SEGY/FORGE_78-32_iDASv3-P11_UTC190428135038.sgy")
U = read_data("segy", segy_nodal, ll=0x01)
@test U.n == 33
S = U[1:7]
fill!(S.gain, 1.0)
C = deepcopy(S[1])
U = deepcopy(S)
D = deepcopy(C)
printstyled(" rescale!(S)\n", color=:light_green)
S = deepcopy(U)
S.gain[1] = g1
S2 = rescale(S)
rescale!(S)
@test S == S2
G = S.gain
@test all([getindex(G, i) == g1 for i in 1:S.n])
for i in 2:S.n
@test isapprox(S.x[i]/g1, U.x[i])
end
printstyled(" rescale!(S, g)\n", color=:light_green)
S = deepcopy(U)
S2 = rescale(S, g)
rescale!(S, g)
@test S == S2
G = S.gain
X = S.x
@test all([getindex(G, i) == g for i in 1:S.n])
@test all([isapprox(X[i], U.x[i].*g) for i in 1:S.n])
printstyled(" channel range\n", color=:light_green)
S = deepcopy(U)
chans = collect(2:2:6)
rest = setdiff(1:S.n, chans)
S2 = rescale(S, g, chans=chans)
rescale!(S, g, chans=chans)
@test S == S2
G = S.gain
X = S.x
@test all([getindex(G, i) == g for i in chans])
@test any([getindex(G, i) == g for i in rest]) == false
@test all([isapprox(X[i], U.x[i].*g) for i in chans])
@test any([isapprox(X[i], U.x[i].*g) for i in rest]) == false
printstyled(" rescale!(S_targ, S_src)\n", color=:light_green)
S = deepcopy(U)
fill!(U.gain, g2)
S2 = rescale(S, U)
rescale!(S, U)
@test S == S2
G = S.gain
X = S.x
@test all([getindex(G, i) == g2 for i in 1:S.n])
@test all([isapprox(X[i], U.x[i].*g2) for i in 1:S.n])
printstyled(" rescale!(C, g)\n", color=:light_green)
B = rescale(C, g)
rescale!(C, g)
@test B == C
@test C.gain == g
@test isapprox((C.x).*g, D.x)
printstyled(" rescale!(C, D)\n", color=:light_green)
C = deepcopy(D)
D.gain = g2
B = rescale(C, D)
rescale!(C, D)
@test B == C
@test isapprox(C.x, (D.x).*g2)
@test eltype(C.x) == Float32
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3780 | printstyled(" resp\n", color=:light_green)
sac_pz_file = path*"/SampleFiles/SAC/test_sac.pz"
non_resp = PZResp(p = Complex{Float32}[complex(1.0, 1.0)], z = Complex{Float32}[2.0/Complex(1.0, -1.0)])
Nx = 1000000
C = randSeisChannel(s=true)
taper!(C)
printstyled(" fctoresp\n", color=:light_green)
r = fctoresp(0.2f0)
r2 = fctoresp(2.0f0)
printstyled(" resp_a0\n", color=:light_green)
S = randSeisData(3, s=1.0)
R = MultiStageResp(2)
S.units = ["m/s", "m", "m/s2"]
R.stage[1] = r
R.stage[2] = r2
S.resp[1] = R
S.resp[2] = CoeffResp()
S.resp[3] = r2
resp_a0!(S)
@test S.resp[1].stage[1].a0 ≈ -1.7771534f0
@test S.resp[1].stage[2].a0 ≈ 17.771534f0
@test S.resp[1].stage[2].a0 == S.resp[3].a0
@test isempty(S.resp[2])
printstyled(" translate_resp (SeisData)\n", color=:light_green)
S = randSeisData(3, s=1.0)
S.resp[1] = fctoresp(0.2f0)
S.resp[2] = fctoresp(0.2f0)
S.resp[3] = fctoresp(2.0f0)
S.x[1] = randn(Float32, S.t[1][end,1])
for i = 1:3
S.units[i] = rand(["m", "m/s", "m/s2"])
end
S.resp[3].z = ComplexF32[]
C_a = deepcopy(S[3])
detrend!(S)
taper!(S)
U = deepcopy(S)
T = translate_resp(S, r)
@test typeof(T.x[1]) == Array{Float32,1}
translate_resp!(S, r)
nanfill!(S)
nanfill!(T)
@test S==T
printstyled(" remove_resp (SeisData)\n", color=:light_green)
remove_resp!(S)
@test_logs (:info, Regex("nothing done")) remove_resp!(S)
T = translate_resp(T, non_resp)
for i = 1:S.n
if isempty(findall(isnan.(S.x[i]))) && isempty(findall(isnan.(T.x[i])))
@test isapprox(S.x[i], T.x[i])
else
@warn string("NaNs found! i = ", i)
end
@test S.resp[i] == non_resp
@test T.resp[i] == non_resp
end
# unit tests
S = deepcopy(U)
resp_a0!(S)
fc = resptofc(S.resp[1])
@test isapprox(fc, 0.2f0)
fc = resptofc(S.resp[3])
@test isapprox(fc, 2.0f0)
# test for channel ranges
S = deepcopy(U)
S1 = remove_resp(S, chans=1:3)
for i = 1:S1.n
@test (S1[i] == U[i]) == (i < 4 ? false : true)
end
# test for MultiStageResp
U = randSeisData(4, s=1.0)
U.resp[1] = fctoresp(0.2f0)
U.resp[2] = MultiStageResp(6)
U.resp[2].stage[1] = fctoresp(15.0f0)
U.resp[3] = fctoresp(2.0f0)
U.resp[4] = fctoresp(1.0f0)
ungap!(U)
detrend!(U)
resp_a0!(U)
S = deepcopy(U)
T = deepcopy(U)
S1 = translate_resp(S, r, chans=1:3)
T1 = translate_resp(T, r, chans=1:3)
for i = 1:3
if (any(isnan, S1.x[i]) == false) && (any(isnan, T1.x[i]) == false)
@test S1[i] == T1[i]
else
@warn(string("NaNs in channel ", i, " -- can't test equality!"))
end
end
if (any(isnan, S1.x[4]) == false) && (any(isnan, T1.x[4]) == false) &&
(any(isnan, U.x[4]) == false)
@test S1[4] == T1[4] == U[4]
else
@warn(string("NaNs in channel 4 -- can't test equality!"))
end
for i = 2:6
@test S1.resp[2].stage[i] == T1.resp[2].stage[i] == U.resp[2].stage[i]
end
@test S1.resp[2].stage[1] != U.resp[2].stage[1]
@test S1.resp[2].stage[1] == T1.resp[2].stage[1]
# SeisChannel method extension
printstyled(" translate_resp (SeisChannel)\n", color=:light_green)
C = randSeisChannel(s=true)
C.t = [1 0; Nx 0]
C.x = randn(Nx)
C.resp = deepcopy(r2)
D = translate_resp(C, r)
translate_resp!(C, r)
@test C == D
@test C.x != zeros(eltype(C.x), lastindex(C.x))
# Here, we expect nothing to happen
translate_resp!(C, r)
# Test accelerometer response with empty zeros
remove_resp!(C_a)
C = randSeisChannel(s=true)
taper!(C)
D = deepcopy(C)
remove_resp!(C)
D = remove_resp(D)
@test C.resp == non_resp
# test on SeisData created from SACPZ file
printstyled(" compatibility with SACPZ responses\n", color=:light_green)
S = read_sacpz(sac_pz_file)
U = deepcopy(S)
for i = 1:S.n
nx = round(Int64, S.fs[i]*3600)
S.t[i] = [1 0; nx 0]
S.x[i] = rand(eltype(S.x[i]), nx).-0.5
end
r = S.resp[1]
# f0 should not matter here; it should work
translate_resp!(S, r)
for i = 1:S.n
@test S.resp[i] == U.resp[1]
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 17755 | printstyled(" sync!\n", color=:light_green)
# Method of time calculation in old sync!
function sync_times(t::AbstractArray{Int64, 2}, fs::Float64, t_min::Int64, t_max::Int64)
t_x = t_expand(t, fs)
ii = vcat(findall(t_x.<t_min), findall(t_x.>t_max))
return ii
end
sync_times(t::AbstractArray{Int64, 2}, fs::Float64, t_min::Int64) = get_sync_inds(t, fs, t_min, endtime(t, fs))
# Based on code in sync!
function prune_x!(x::SeisIO.FloatArray, x_del::Array{UnitRange, 1})
nr = size(x_del, 1)
for i in nr:-1:1
if !isempty(x_del[i])
deleteat!(x, x_del[i])
end
end
return nothing
end
printstyled(" sync_t\n", color=:light_green)
# Cases to consider
printstyled(" nothing removed\n", color=:light_green)
# most basic case: a well-formed time matrix
Δ = 20000
ts = 1583455810004000
nx = 40000
fs = sμ/Δ
t = [1 ts; nx 0]
te = endtime(t, Δ)
t_min = ts
t_max = te
(xi, W) = sync_t(t, Δ, t_min, t_max)
@test xi == [1 nx]
@test W == t_win(t, Δ)
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
printstyled(" empty window\n", color=:light_green)
t = [1 ts-nx*Δ; nx 0]
(xi, W) = sync_t(t, Δ, t_min, t_max)
@test isempty(xi)
@test isempty(W)
printstyled(" simple truncations\n", color=:light_green)
# truncate start
t = [1 ts; nx 0]
nclip = 4
(xi, W) = sync_t(t, Δ, ts+nclip*Δ, te)
@test xi == [nclip+1 nx]
# truncate end
t_min = ts
t_max = te-nclip*Δ
(xi, W) = sync_t(t, Δ, t_min, t_max)
@test xi == [1 nx-nclip]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# a time matrix with one gap equal to n samples
n = 1
gi = 10
t = [1 ts; gi n*Δ; nx 0]
(xi, W) = sync_t(t, Δ, ts, te)
@test xi == [1 gi-1; gi nx-n]
n = 120
gi = 10
t = [1 ts; gi n*Δ; nx 0]
(xi, W) = sync_t(t, Δ, ts, te)
@test xi == [1 gi-1; gi nx-n]
# two gaps, n and m samples
m = 33
gj = 2000
t = [1 ts; gi n*Δ; gj m*Δ; nx 0]
(xi, W) = sync_t(t, Δ, ts, te)
@test xi == [1 gi-1; gi gj-1; gj nx-m-n]
t_min = ts
t_max = te
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# negative gap
t = [1 ts; gi -n*Δ; nx 0]
(xi, W) = sync_t(t, Δ, ts, te)
@test xi == [1 gi-1; n+1 nx]
# here, the number of samples before ts in window 2 is n-gi, or 110; the first
# n-gi samples of this window are samples gi to n-gi+gi = n; so the window
# starts at n+1, or 121
t_min = ts
t_max = te
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# truncate other (windows not in chronological order)
n = 120
gi = 10
nx = 100
# start of middle window
W = ts .+ Δ.*([ 1 gi
-n+1 0
gi+1 nx ] .- 1)
t_min = ts - div(n,2)*Δ
t_max = last(W)
t = w_time(W, Δ)
xi0 = x_inds(t)
(xi, W) = sync_t(t, Δ, t_min, t_max)
@test xi-xi0 == [0 0; div(n,2) 0; 0 0]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# end of middle window
W = ts .+ Δ.*([ 1 gi
nx+1 nx+n
gi+1 nx ] .- 1)
t_min = ts
t_max = ts + (nx - 1 + div(n,2))*Δ
t = w_time(W, Δ)
xi0 = x_inds(t)
(xi, W) = sync_t(t, Δ, t_min, t_max)
@test xi-xi0 == [0 0; 0 -div(n,2); 0 0]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
printstyled(" segment removed\n", color=:light_green)
# first window gets emptied
gi = 10
nx = 100
gl = 6
W = ts .+ Δ.*[ -gl -1
1 gi-1
gi+gl nx]
t_min = ts
t_max = last(W)
t = w_time(W, Δ)
xi0 = x_inds(t)
xi, W1 = sync_t(t, Δ, ts, last(W))
@test size(xi, 1) == size(W1, 1) == 2
@test sum(diff(xi, dims=2).+1) == nx-gl
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# second window gets emptied (no gap, windows 1 and 2 are out of order)
W = ts .+ Δ.*[ 0 gi-1
-gl -1
gi nx-gl-1]
t = w_time(W, Δ)
xi = x_inds(t)
xi1, W1 = sync_t(t, Δ, ts, last(W))
@test size(xi1, 1) == size(W1, 1) == 2
@test xi1 == xi[[1,3],:]
# last window gets emptied
W = ts .+ Δ.*[ 0 gi
gi+2 nx
nx+2 nx+gi]
t_min = ts
t_max = W[2,2]
t = w_time(W, Δ)
xi = x_inds(t)
xi1, W1 = sync_t(t, Δ, t_min, t_max)
@test size(xi1, 1) == size(W1, 1) == 2
@test xi1 == xi[[1,2],:]
printstyled(" multiple segs removed\n", color=:light_green)
# first + last emptied
gi = 10
W = ts .+ Δ.*[ 0 gi-1
gi+2 nx
nx+2 nx+gi]
t_min = ts + gi*Δ
t_max = ts + nx*Δ
t = w_time(W, Δ)
xi0 = x_inds(t)
xi, W1 = sync_t(t, Δ, t_min, t_max)
@test size(xi, 1) == size(W1, 1) == 1
@test xi == xi0[[2],:]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# first + last emptied, middle truncated by os
os = 10
t_max = ts + (nx-os)*Δ
t = w_time(W, Δ)
xi0 = x_inds(t)
xi, W1 = sync_t(t, Δ, t_min, t_max)
@test size(xi, 1) == size(W1, 1) == 1
@test xi0[[2],:] .- xi == [0 os]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# first emptied, last truncated, middle is one point
gi = 666
nx = gi + 100
W = ts .+ Δ.*[ 0 gi-1
gi+2 nx
nx+2 nx+gi]
t_min = ts + nx*Δ
t_max = ts + (nx+div(gi,2))*Δ
t = w_time(W, Δ)
xi0 = x_inds(t)
xi, W1 = sync_t(t, Δ, t_min, t_max)
@test size(xi, 1) == size(W1, 1) == 2
@test W1[1,1] == W1[1,2]
@test div(W1[2,2] - W1[2,1], Δ) + 2 == div(gi, 2)
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# second and last emptied
gi = 10
W = ts .+ Δ.*[ 0 gi-1
gi+2 nx
nx+2 nx+gi]
t_min = ts + (gi+1)*Δ
t_max = ts + nx*Δ
t = w_time(W, Δ)
xi = x_inds(t)
xi1, W1 = sync_t(t, Δ, t_min, t_max)
@test size(xi1, 1) == size(W1, 1) == 1
@test xi1 == xi[[2],:]
printstyled(" complicated cases\n", color=:light_green)
# complex case with no completely emptied windows; formed as W and not :t
nx = 40000
W = ts .+ Δ.*[1 10
12 12
14 14
23 40
51 100
-1200 0
101 9500
12000 41318]
t_min = ts - 300Δ
t_max = last(W) - 2000Δ
t = w_time(W, Δ)
xi0 = x_inds(t)
xi, W = sync_t(t, Δ, t_min, t_max)
j = sortperm(W[:,1])
W1 = W[j,:]
t1 = w_time(W1, Δ)
@test t1[1,2] == t_min
@test endtime(t1, Δ) == t_max
# check our work; sorted matrix should be from t_min to t_max
t2 = w_time(W, Δ)
@test diff(x_inds(t2), dims=2) == diff(xi, dims=2)
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi, t[end, 1])
test_del_ranges(x_del, i2)
# last window emptied; previous window truncated; first window truncated
nx = 1000
gi = 17
os = 50
W = ts .+ Δ.*[ -1*os gi
gi+2 nx+gi
nx+gi+2 2nx ]
t_min = ts
t_max = W[2,2] - Δ*gi
t = w_time(W, Δ)
xi = x_inds(t)
xi1, W1 = sync_t(t, Δ, t_min, t_max)
t1 = w_time(W1, Δ)
@test size(xi1, 1) == size(W1, 1) == 2
@test xi1[2,2] == nx+os
@test W[[1,2],:] != W1
@test xi1[[1,2],:] != xi
@test t1[1,2] == t_min
@test endtime(t1, Δ) == t_max
nx_true = t[end,1]
i2 = sync_times(t, fs, t_min, t_max)
x_del = get_del_ranges(xi1, nx_true)
test_del_ranges(x_del, i2)
# Final step, putting it all together
x = randn(Float32, nx_true)
x1 = deepcopy(x)
x2 = deepcopy(x)
prune_x!(x1, x_del)
deleteat!(x2, i2)
@test x1 == x2
fs = 100.0
Δ = round(Int64, sμ/fs)
t0 = 1582934400000000
ns = 10
Δ3 = 1.0
# Test three seismic and three irregularly sampled channels
S = randSeisData(3, s=1.0)
# seismic channels will be ns seconds of data sampled at 100 Hz with preset gaps
nx = round(Int64, ns * fs)
for i = 1:3
S.x[i] = rand(nx) # Set S.x to be 1000 samples each
S.fs[i] = fs # Set S.fs to be uniformly 100 Hz
end
S.t[1] = [1 t0; nx 0]
S.t[2] = [1 t0-ns*1000000; nx 0]
S.t[3] = [1 t0; 2 round(Int64, Δ3*sμ); nx 0]
# irregular channels will be 100 points each, randomly sampled between the start and the end
append!(S, randSeisData(3, c=1.0))
n_irr = div(nx, 10)
ni = div(n_irr, ns)
for i = 4:6
S.x[i] = rand(n_irr)
S.fs[i] = 0.0
S.t[i] = zeros(n_irr, 2)
tv = zeros(Int64, n_irr)
#= the definition of rmax here prevents rng giving us a value above tmax
in test (6), which breaks the test
=#
for j = 1:ns
si = 1 + (j-1)*ni
ei = j*ni
rmin = t0 + (j-1)*1000000
rmax = t0 - 1 + (j*1000000) - (j == ns ? 2Δ : 0)
r = UnitRange{Int64}(rmin:rmax)
tv[si:ei] .= rand(r, ni)
end
sort!(tv)
S.t[i][:,1] .= 1:n_irr
S.t[i][:,2] .= tv
end
sx = Lx(S)
# Set derived variables
ts, te = get_edge_times(S)
ts₀ = minimum(ts)
ts₁ = maximum(ts)
# TEST 1 =====================================================================
# Sync to last start; don't sync ends
printstyled(stdout," sync to last start; don't sync ends\n", color=:light_green)
T = sync(S)
basic_checks(T)
ts_new, te_new = get_edge_times(T)
# Expectation: T[2] is deleted
@test T.n == 5
@test S.id[1] == T.id[1]
@test findid(S.id[2], T.id) == 0
for i = 3:6
@test(S.id[i] == T.id[i-1])
end
# Expectation: the latest time in S.t is now the earliest start time
@test minimum(ts_new .≥ ts₁) == true
@test maximum(ts_new .== ts₁) == true
# TEST 2 =====================================================================
# Sync to last start; don't sync ends
# change trace 2 to begin only 1s earlier; resync
S.t[2] = [1 t0-1000000; nx 0]
ts, te = get_edge_times(S)
ts₀ = minimum(ts)
ts₁ = maximum(ts)
T = sync(S)
ts_new, te_new = get_edge_times(T)
# Expectation: T.x[2] is at least 100 pts shorter and exactly 100 shorter than T.x[1]
@test T.n == 6
tx = Lx(T)
@test tx[2] ≤ nx-100
@test tx[2] == tx[1] - 100
@test tx[2] ≤ tx[3]
@test minimum(ts_new .≥ ts₁) == true
@test maximum(ts_new .== ts₁) == true
basic_checks(T)
# TEST 3 =====================================================================
# Sync to last start; sync to first end
printstyled(stdout," sync to last start; sync to first end\n", color=:light_green)
te₀ = minimum(te)
te₁ = maximum(te)
T = sync(S, t="first")
basic_checks(T)
ts_new, te_new = get_edge_times(T)
# Expectation: T.x[1] and T.x[2] are same length, T.x[3] is shorter
tx = Lx(T)
@test tx[1] == tx[2]
@test tx[2] > tx[3]
@test minimum(te_new .≤ te₀) == true
@test maximum(te_new .== te₀) == true
@test minimum(sx.-tx.≥0) == true # vx is always longer
# TEST 4 =====================================================================
# Sync to first start; sync to first end
printstyled(stdout," sync to first start; sync to last end\n", color=:light_green)
U = sync(S, s="first", t="first")
basic_checks(U)
ts_new, te_new = get_edge_times(U)
# Expectation:
ux = Lx(U)
@test minimum(ux.-tx.≥0) == true # vx is always longer
@test tx[1]+100 ≤ ux[1] # U.x[1] gains >100 samples
@test minimum(ts_new .≥ ts₀) == true # Should still be true
@test 2 ∈ findall((ts_new .== ts₀).== true) # Defined earliest start time
@test ux[2]==ux[1] # Due to how we set the gap
@test ux[1]-ux[3] == 100
@test minimum(te_new .≤ te₀) == true
@test maximum(te_new .== te₀) == true
for i = 4:6
@test ux[i] ≥ tx[i]
end
# TEST 5 =====================================================================
# Sync to first start; sync to last end
V = sync(S, s="first", t="last")
basic_checks(V)
ts_new, te_new = get_edge_times(V)
vx = Lx(V)
# Expectation:
@test minimum(vx.-sx.≥0) == true # vx is always longer
t1 = t_expand(V.t[1], V.fs[1])
t2 = t_expand(S.t[1], S.fs[1])
j = setdiff(t1, t2)
@test vx[1]-sx[1] == length(j)
t2 = t_expand(U.t[1], U.fs[1])
j = setdiff(t1, t2)
@test vx[1]-ux[1] == length(j)
@test minimum(ts_new .≥ ts₀) == true # Should still be true
@test 2 ∈ findall((ts_new .== ts₀).== true) # Defined earliest start time
@test vx[2] == vx[1] # Due to how we set the gap
t2 = t_expand(V.t[2], V.fs[2])
t3 = t_expand(V.t[3], V.fs[3])
j = setdiff(t2, t3)
@test vx[2] - vx[3] == length(j)
@test minimum(te_new .≤ te₁) == true
@test maximum(te_new .== te₁) == true
for i = 4:6
@test vx[i] ≥ tx[i]
@test vx[i] == sx[i]
end
# TEST 6 =====================================================================
# Sync to s = DateTime (first trace start), t = "none"
# trace 3 should be 100 samples shorter
# so should trace 2
printstyled(stdout," sync to DateTime; don't sync ends\n", color=:light_green)
ts₆ = S.t[1][1,2]
te₆ = S.t[1][1,2] + Δ*(nx-1)
ds₆ = u2d(ts₆*μs)
de₆ = u2d(te₆*μs)
W = sync(S, s=ds₆)
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# Expectations:
@test sx[2]-wx[2] == 100 # Trace 2 is 100 samples shorter
for i in [1,3,4,5,6]
@test sx[i] == wx[i] # No change in other trace lengths
end
@test minimum(ts_new .≥ ts₆) == true # We start at ts₆, not before
@test findfirst(ts_new .== ts₆).== 1 # Defined start time
# Repeat with an end time
# te₆ = S.t[1][1,2] + round(Int64, sμ*(nx-1)/fs)
# de₆ = u2d(te₆*μs)
W = sync(S, s=ds₆, t=de₆)
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# Expectations:
@test sx[3]-wx[3] == 100 # Trace 3 is 100 samples shorter
for i in [1,2,4,5,6]
@test sx[i] == wx[i] # No change in other trace lengths; 2 gets padded
end
@test minimum(ts_new .≥ ts₆) == true # We start at ts₆, not before
@test findfirst(ts_new .== ts₆).== 1 # Defined start time
# TEST 7 =====================================================================
# Sync to DateTime 1s before first trace, 0.01s after; is it 101 pts longer?
# Repeat with an increased window range
ds₆ -= Second(1)
de₆ += Millisecond(10)
X = sync(S, s=ds₆, t=de₆)
basic_checks(X)
ts_new, te_new = get_edge_times(X)
xx = Lx(X)
# Expectations:
@test xx[1]-sx[1] == 101 # Trace 1 is 101 samples longer
@test xx[2]-sx[2] == 101 # Trace 2 is also 101 samples longer
@test xx[3]-sx[3] == 1 # Trace 3 loses 100 samples at the end, but gains 100 at end and 1 at start ... net gain +1
for i = 4:6
@test xx[i] == sx[i]
end
# Sync to DateTime 2s after first trace, t="first"
ds₆ += Second(3)
X = sync(S, s=ds₆, t=de₆)
basic_checks(X)
ts_new, te_new = get_edge_times(X)
xx = Lx(X)
# Expectations:
@test xx[1] == 801 # Trace 1 should be 801 samples
@test xx[1] == xx[2] # Should be the same due to padding
# In fact trace 2 should have 101 points appended
found = findall([occursin("appended 101", i) for i in X.notes[2]])
@test isempty(found) == false
# TEST 8 =====================================================================
# A few simple combinations; do these work?
printstyled(stdout," sync start to DateTime; sync first end\n", color=:light_green)
X = sync(S, s=ds₆, t="first"); basic_checks(X)
printstyled(stdout," sync start to DateTime; sync to last end\n", color=:light_green)
X = sync(S, s=ds₆, t="last"); basic_checks(X)
printstyled(stdout," sync start to string time; sync to last end\n", color=:light_green)
ss = string(ds₆)
Y = sync(S, s=ds₆, t="last", v=3); basic_checks(Y)
# Expect: X != Y due to notes, but all other fields equal
for f in datafields
if f != :notes
@test isequal(getfield(X,f), getfield(Y,f))
end
end
# TEST 9 =====================================================================
# Do we actually prune campaign data when all times are out of range?
printstyled(stdout," prune all irregular data when all times are out of range\n", color=:light_green)
ss = string(ds₆)
Z = deepcopy(S)
t1 = deepcopy(Z.t[5])
t1 = hcat(t1[:,1:1], vcat(0, diff(t1[:,2:2], dims=1)))
Z.t[5] = deepcopy(t1)
redirect_stdout(out) do
sync!(Z, v=3); basic_checks(Z)
end
# Expect: Z[5] is gone
for i in [1,2,3,4,6]
@test any(Z.id.==S.id[i])
end
@test (any(Z.id.==S.id[5]) == false)
# ===========================================================================
# method extenson to SeisChannel
printstyled(stdout," SeisChannel method extension\n", color=:light_green)
ts₆ = S.t[1][1,2]
te₆ = S.t[1][1,2] + Δ*(nx-1)
ds₆ = u2d(ts₆*μs)
de₆ = u2d(te₆*μs)
C = deepcopy(S[1])
sync!(C, s=ds₆)
W = SeisData(C)
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# Repeat with an end time
C = deepcopy(S[1])
W = SeisData(sync(C, s=ds₆, t=de₆))
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# ===========================================================================
# method extenson to SeisEvent
printstyled(stdout," SeisEvent method extension\n", color=:light_green)
ts₆ = S.t[1][1,2]
te₆ = S.t[1][1,2] + Δ*(nx-1)
ds₆ = u2d(ts₆*μs)
de₆ = u2d(te₆*μs)
Ev = SeisEvent(hdr = randSeisHdr(), data = deepcopy(S))
sync!(Ev.data, s=ds₆)
W = Ev.data
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# Expectations:
@test sx[2]-wx[2] == 100 # Trace 2 is 100 samples shorter
for i in [1,3,4,5,6]
@test sx[i] == wx[i] # No change in other trace lengths
end
@test minimum(ts_new .≥ ts₆) == true # We start at ts₆, not before
@test findfirst(ts_new .== ts₆).== 1 # Defined start time
Ev = SeisEvent(hdr = randSeisHdr(), data = deepcopy(S))
W = sync(Ev.data, s=ds₆)
basic_checks(W)
ts_new, te_new = get_edge_times(W)
wx = Lx(W)
# Expectations:
@test sx[2]-wx[2] == 100 # Trace 2 is 100 samples shorter
for i in [1,3,4,5,6]
@test sx[i] == wx[i] # No change in other trace lengths
end
@test minimum(ts_new .≥ ts₆) == true # We start at ts₆, not before
@test findfirst(ts_new .== ts₆).== 1 # Defined start time
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1220 | printstyled(" taper!\n", color=:light_green)
# Unit tests
# mktaper!
N = 20
fs = 100.0
W = Array{Float32,1}(undef, N)
L = length(W)
mktaper!(W, N)
@test eltype(W) == Float32
# taper_seg
X = randn(Float32, N)
X = X.-mean(X)
Y = deepcopy(X)
μ = Float32(mean(X))
taper_seg!(X, W, L, μ, rev=true)
X = deepcopy(Y)
taper_seg!(X, W, L, μ)
@test isapprox(abs.(X)./abs.(Y), W)
# Test that tapering works on SeisData objects
S = randSeisData(24, s=1.0, fs_min=1.0)
taper!(S)
# Test that tapering works on SeisChannel objects
C = randSeisChannel(s=true)
ungap!(C)
taper!(C)
C = randSeisChannel(s=true)
taper!(C)
# Test that tapering ignores fs=0
S = randSeisData(10, c=1.0, s=0.0)[2:10]
i = findall(S.fs.==0.0)
S = S[i]
U = deepcopy(S)
taper!(S)
@test S==U
C = S[1]
U = deepcopy(C)
taper!(C)
@test C==U
# Test for L_tap < L in a SeisChannel
C = randSeisChannel()
nx = length(C.x)
ny = round(Int, 2*fs)
C.t = vcat(C.t[1:end-1,:], [nx+1 1000000], [nx + ny 0])
append!(C.x, randn(eltype(C.x), ny))
taper!(C)
# test on SeisEvent
V = randSeisEvent()
taper!(V.data)
# Test for out-of-place tapering
C = randSeisChannel()
S = randSeisData(10, c=1.0, s=0.0)[2:10]
V = randSeisEvent()
D = taper(C)
T = taper(S)
W = taper(V.data)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1298 | # resp
printstyled(" Type preservation after processing\n", color=:light_green)
for f in String["convert_seis!", "demean!", "detrend!", "filtfilt!", "merge!", "sync!", "taper!", "ungap!", "unscale!"]
printstyled(string(" ", f, "\n"), color=:light_green)
S = randSeisData(s=1.0, fs_min=30.0)
T = [eltype(S.x[i]) for i=1:S.n]
id = deepcopy(S.id)
ns = [length(findall(isnan.(S.x[i]))) for i in 1:S.n]
getfield(SeisIO, Symbol(f))(S)
for i = 1:S.n
j = findid(S.id[i], id)
if j > 0
@test T[j] == eltype(S.x[i])
nn = length(findall(isnan.(S.x[j])))
if ns[j] == 0 && nn > 0
str = string("channel = ", i, " output ", nn, " NaNs; input had none!")
@warn(str) # goes to warning buffer
println(str) # replicate warning to STDOUT
end
else
str = string("id = ", id, " deleted from S; check randSeisChannel time ranges.")
@warn(str)
println(str)
end
end
end
printstyled(string(" remove_resp!\n"), color=:light_green)
r = fctoresp(1.0, 1.0/sqrt(2.0))
r2 = fctoresp(0.0333, 1.0/sqrt(2.0))
S = randSeisData(3, s=1.0)
S.resp[1] = r
S.resp[2] = deepcopy(r)
S.resp[3] = r2
S.x[1] = randn(Float32, S.t[1][end,1])
T = [eltype(S.x[i]) for i=1:S.n]
remove_resp!(S)
for i=1:S.n
@test T[i] == eltype(S.x[i])
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 559 | printstyled(" ungap!\n", color=:light_green)
printstyled(" subsample negative time gaps (Issue 29)\n", color=:light_green)
gf1 = path * "/SampleFiles/SEED/CIRIO__BHE___2017101.mseed"
opf = path * "/SampleFiles/SEED/obspy.dat"
# read merge/ungap target file
S = read_data(gf1)
C1 = deepcopy(S[1])
i1 = C1.t[2,1]
merge!(S)
ungap!(S)
C2 = S[1]
i2 = C2.t[2,1]
# read ObsPy merge output to compare
io = open(opf, "r")
X2 = Array{Int32,1}(undef, length(C2.x))
read!(io, X2)
X = map(Float32, X2)
# these should be approximately equal
@test isapprox(C2.x, X)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 60 | printstyled("SeisIO.Quake submodule\n", color=:light_green)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1505 | fname = path*"/SampleFiles/fdsn.conf"
hood_reg = Float64[44.8, 46.0, -122.4, -121.0]
rainier_rad = Float64[46.852886, -121.760374, 0.0, 0.1]
printstyled(" FDSNevq\n", color=:light_green)
# FDSNevq
printstyled(" single-server query\n", color=:light_green)
(H,R) = FDSNevq("2011-03-11T05:47:00", mag=[3.0, 9.9], nev=1, src="IRIS", v=0)
(H,R) = FDSNevq("201103110547", mag=[3.0, 9.9], nev=10, src="IRIS", v=0)
@test length(H)==9
printstyled(" single-server query without nev specified\n", color=:light_green)
(H,R) = FDSNevq("2018-06-01",reg=[32.0,38.0,-120.0,-115.0,-50.0,50.0],mag=[2.0,8.0],evw=[0.,375243600.0]);
@test length(H) == length(R)
@test length(H) > 1000
# printstyled(" multi-server query\n", color=:light_green)
# open("FDSNevq.log", "w") do out
# redirect_stdout(out) do
# ot = replace(split(string(now()),'.')[1], r"[-,:,A-Z,a-z]" => "")
# (H,R) = FDSNevq(ot, mag=[3.0, 9.9], evw=[-86400.0, 0.0], src="all", nev=10, v=2)
# end
# end
printstyled(" radius search (rad=)\n", color=:light_green)
(H,R) = FDSNevq("20190101000000", rad=rainier_rad, evw=[31536000.0, 31536000.0], mag=[0.0, 2.9], nev=100, src="IRIS", v=0)
printstyled(" partly-specified region search (reg=)\n", color=:light_green)
(H,R) = FDSNevq("20120601000000", reg=hood_reg, evw=[31536000.0, 31536000.0], mag=[0.0, 2.9], nev=100, src="IRIS", v=0)
# FDSNevt
printstyled(" FDSNevt\n", color=:light_green)
S = FDSNevt("201103110547", "PB.B004..EH?,PB.B004..BS?,PB.B001..BS?,PB.B001..EH?", v=0)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2699 | printstyled(" SAC support extensions\n", color=:light_green)
printstyled(" writesac from SeisEvent\n", color=:light_green)
writesac(rse_wb(8))
sac_file = path*"/SampleFiles/SAC/test_le.sac"
f_stub = "1981.088.10.38.23.460"
f_out = f_stub * "..CDV...R.SAC"
sacv7_out = "v7_out.sac"
test_fs = 50.0
test_lat = 41.5764
test_lon = -87.6194
test_ot = -0.75
test_stla = 41.54
test_stlo = -87.64
test_mag = 3.1f0
# Read test file
C = verified_read_data("sac", sac_file, full=true)[1]
ev_lat = C.misc["evla"]
ev_lon = C.misc["evlo"]
ev_dep = C.misc["evdp"]
ev_id = "8108838" # taken from kevnm
Ev = SeisEvent()
Ev.data = convert(EventTraceData, SeisData(C))
Ev.hdr.mag.val = test_mag
Ev.hdr.loc.dep = ev_dep
Ev.hdr.loc.lat = ev_lat
Ev.hdr.loc.lon = ev_lon
Ev.hdr.id = ev_id
# Write to file
writesac(Ev)
@test safe_isfile(f_out)
# read unmodified file, check for preserved values
C = verified_read_data("sac", f_out, full=true)[1]
@test C.misc["evla"] == ev_lat
@test C.misc["evlo"] == ev_lon
@test C.misc["evdp"] == ev_dep
@test C.misc["mag"] == test_mag
@test string(C.misc["nevid"]) == ev_id
# Modify the original file
printstyled(" SAC v7\n", color=:light_green)
io = open(sac_file, "r")
sac_raw = read(io)
close(io)
# Set version to 7
sac_raw[305] = 0x07
# Set magnitude to test_mag
mag = reinterpret(UInt8, [test_mag])
sac_raw[157:160] .= mag
# Change some values
reset_sacbuf()
dv = BUF.sac_dv
dv[1] = 1.0/test_fs
dv[4] = test_ot
dv[17] = test_lon
dv[18] = test_lat
dv[19] = test_stla
dv[20] = test_stlo
dv2 = deepcopy(dv)
sac_dbl_buf = reinterpret(UInt8, dv)
io = open(sacv7_out, "w")
write(io, sac_raw)
write(io, sac_dbl_buf)
close(io)
C = read_data("sac", sacv7_out, full=true)[1]
@test C.fs == test_fs
printstyled(" fill_sac_evh!\n", color=:light_green)
Ev = SeisEvent()
Ev.data = convert(EventTraceData, SeisData(C))
fill_sac_evh!(Ev, sacv7_out, k=1)
@test Ev.hdr.loc.lat == test_lat
@test Ev.hdr.loc.lon == test_lon
@test Ev.hdr.mag.val == test_mag
@test isapprox(d2u(Ev.hdr.ot) - Ev.data.t[1][1,2]*μs, test_ot, atol=0.001)
printstyled(" big-endian\n", color=:light_green)
sac_be_file = path*"/SampleFiles/SAC/test_be.sac"
io = open(sac_be_file, "r")
sac_raw = read(io)
close(io)
sac_raw[308] = 0x07
reset_sacbuf()
dv .= bswap.(dv2)
sac_dbl_buf = reinterpret(UInt8, dv)
io = open(sacv7_out, "w")
write(io, sac_raw)
write(io, sac_dbl_buf)
close(io)
C = read_data("sac", sacv7_out, full=true)[1]
Ev = SeisEvent()
Ev.data = convert(EventTraceData, SeisData(C))
fill_sac_evh!(Ev, sacv7_out, k=1)
@test Ev.hdr.loc.lat == test_lat
@test Ev.hdr.loc.lon == test_lon
@test isapprox(d2u(Ev.hdr.ot) - Ev.data.t[1][1,2]*μs, test_ot, atol=0.001)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 564 | printstyled(" gcdist\n", color=:light_green)
src = [46.8523, -121.7603]
src2 = [48.7767, -121.8144]
rec = [45.5135 -122.6801; 44.0442 -123.0925; 42.3265 -122.8756]
G0 = gcdist(src, rec) # vec, arr
G1 = gcdist(src[1], src[2], rec) # lat, lon, arr
G2 = gcdist(src[1], src[2], rec[1,1], rec[1,2]) # s_lat, s_lon, r_lat, r_lon
G3 = gcdist(vcat(src',src2'), rec[1,:]) # arr, arr
G4 = gcdist(vcat(src',src2'), rec[1,:]) # arr, rec[1,:]
@test G0 == G1
@test G0[1,:] == G1[1,:] == G2[1,:]
@test G3 == G4
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2309 | # Merge with EventTraceData
printstyled(stdout," merge! on EventTraceData\n", color=:light_green)
# Test case where we have to merge phase catalogs
(S,T) = mktestseis()
S = convert(EventTraceData, S)
T = convert(EventTraceData, T)
for i = 1:S.n
S.az[i] = (rand()-0.5)*360.0
S.baz[i] = (rand()-0.5)*360.0
S.dist[i] = (rand()-0.5)*360.0
S.pha[i] = randPhaseCat()
end
for i = 1:T.n
T.az[i] = (rand()-0.5)*360.0
T.baz[i] = (rand()-0.5)*360.0
T.dist[i] = (rand()-0.5)*360.0
end
# Force a phase pick mismatch
P_true = SeisPha(rand(Float64,8)..., 'U', '0')
P_false = SeisPha(rand(Float64,8)..., '+', '1')
i = findid("CC.LON..BHZ", S)
j = findid("CC.LON..BHZ", T)
# T always begins later and should thus preserve P_true in the merge
S.pha[i]["P"] = deepcopy(P_false)
T.pha[j]["P"] = deepcopy(P_true)
merge!(S,T)
i = findid("CC.LON..BHZ", S)
@test S.pha[i]["P"] == P_true
# Check that purge works
V = purge(S)
purge!(S)
@test S == V
printstyled(stdout," merge! extensions to EventTraceData, EventChannel\n", color=:light_green)
S = convert(EventTraceData, randSeisData())
T = deepcopy(S)
merge!(S)
@test merge(T) == S
merge!(S,T)
sort!(T)
@test S == T
C = convert(EventChannel, randSeisChannel())
T = merge(S, C)
merge!(S, C)
@test S == T
C = convert(EventChannel, randSeisChannel())
S = convert(EventTraceData, randSeisData())
@test merge(C, S) == merge(S, C)
A = EventTraceData[convert(EventTraceData, randSeisData()),
convert(EventTraceData, randSeisData()),
convert(EventTraceData, randSeisData())]
merge(A)
S = convert(EventTraceData, randSeisData())
T = convert(EventTraceData, randSeisData())
@test S*T == T*S
@test S*C == merge(S, EventTraceData(C))
C = convert(EventChannel, randSeisChannel())
D = convert(EventChannel, randSeisChannel())
S = merge(C,D)
@test typeof(S) == EventTraceData
@test C*D == S
# Check that dist, az, baz merge correctly
(S,T) = mktestseis()
S = convert(EventTraceData, S)
T = convert(EventTraceData, T)
T.az[3:4] .= 0.0
T.baz[3:4] .= 0.0
T.dist[3:4] .= 0.0
U = deepcopy(T)
for i = 1:S.n
S.az[i] = (rand()-0.5)*360.0
S.baz[i] = (rand()-0.5)*360.0
S.dist[i] = (rand()-0.5)*360.0
end
merge!(T,S)
for i in 1:S.n
n = findid(S.id[i], T)
if n > 0
@test S.az[i] == T.az[n]
@test S.baz[i] == T.baz[n]
@test S.dist[i] == T.dist[n]
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 505 | # save to disk/read from disk
savfile1 = "test.evt"
printstyled(" read/write of EventTraceData with compression\n", color=:light_green)
SeisIO.KW.comp = 0x02
S = convert(EventTraceData, randSeisData())
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test R == S
SeisIO.KW.comp = 0x01
S = convert(EventTraceData, randSeisData())
C = convert(EventChannel, SeisChannel())
nx = SeisIO.KW.n_zip*2
C.t = [1 0; nx 0]
C.x = randn(nx)
push!(S, C)
wseis(savfile1, S)
R = rseis(savfile1)[1]
@test R == S
rm(savfile1)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3168 | src = [46.8523, -121.7603]
src2 = [48.7767, -121.8144]
rec = [45.5135 -122.6801; 44.0442 -123.0925; 42.3265 -122.8756]
G0 = gcdist(src, rec) # vec, arr
G1 = gcdist(src[1], src[2], rec) # lat, lon, arr
G2 = gcdist(src[1], src[2], rec[1,1], rec[1,2]) # s_lat, s_lon, r_lat, r_lon
G3 = gcdist(vcat(src',src2'), rec[1,:]) # arr, arr
G4 = gcdist(vcat(src',src2'), rec[1,:]) # arr, rec[1,:]
@test G0 == G1
@test G0[1,:] == G1[1,:] == G2[1,:]
@test G3 == G4
printstyled(" phase_utils\n", color=:light_green)
GC.gc()
spad = 10.0
epad = 10.0
to = 30
src = "IRIS"
sta = "PB.B004..EH?,PB.B004..BS?,PB.B001..BS?,PB.B001..EH?"
# First, a well-formatted string
(H,R) = FDSNevq("2018-11-30T17:29:29.00", nev=1, src="IRIS")
H = H[1]
# Now, not so hot
(H,R) = FDSNevq("201103110547", mag=[3.0, 9.9], nev=1, src="IRIS")
H = H[1]
# Create channel data
s = H.ot # Start time for FDSNsta is event origin time
t = u2d(d2u(s) + 3600.0) # End time is 60 minutes later; we'll truncate
S = FDSNsta(sta, s=s, t=t, to=to)
# Check that nothing is initially in the phase catalog
Ev = SeisEvent(hdr=H, data=S[1:1])
@test length(Ev.data.pha[1]) == 0
printstyled(" request with invalid parameter\n", color=:light_green)
redirect_stdout(out) do
get_pha!(Ev, pha="", model="do.my.little.dance.on.the.catwalk", to=to, v=2)
end
@test length(Ev.data.pha[1]) == 0
printstyled(" request with user-specified phase list\n", color=:light_green)
Ev = SeisEvent(hdr=H, data=S[1:1])
get_pha!(Ev, pha="P,S", to=to)
@test length(Ev.data.pha[1]) == 2
println("")
show_phases(Ev.data.pha[1])
println("")
printstyled(" request styles that return all phases\n", color=:light_green)
Ev = SeisEvent(hdr=H, data=S[1:1])
get_pha!(Ev, pha="", to=to)
pcat1 = Ev.data.pha[1]
Ev = SeisEvent(hdr=H, data=S[1:1])
get_pha!(Ev, pha="all", to=to)
pcat2 = Ev.data.pha[1]
Ev = SeisEvent(hdr=H, data=S[1:1])
get_pha!(Ev, pha="ttall", to=to)
pcat3 = Ev.data.pha[1]
@test pcat1 == pcat2 == pcat3
# This should work for all stations, yielding only the S time
printstyled(" multi-channel request\n", color=:light_green)
J = findall([endswith(id, "EHZ") for id in S.id])
printstyled(" default phase\n", color=:light_green)
Ev = SeisEvent(hdr=H, data=S[J])
get_pha!(Ev, pha="S", to=to)
for i = 1:Ev.data.n
@test length(Ev.data.pha[i]) == 1
@test haskey(Ev.data.pha[i], "S")
end
printstyled(" user-specified phase list\n", color=:light_green)
Ev = SeisEvent(hdr=H, data=S[J])
get_pha!(Ev, pha="pP,PP", to=to)
for i = 1:Ev.data.n
@test length(Ev.data.pha[i]) == 2
@test haskey(Ev.data.pha[i], "PP")
@test haskey(Ev.data.pha[i], "pP")
end
# This should work for all stations, yielding all times
printstyled(" all phases\n", color=:light_green)
Ev = SeisEvent(hdr=H, data=S[J])
get_pha!(Ev, pha="all", to=to)
for i = 1:Ev.data.n
@test length(Ev.data.pha[i]) > 1
@test haskey(Ev.data.pha[i], "P")
@test haskey(Ev.data.pha[i], "S")
@test haskey(Ev.data.pha[i], "pP")
@test haskey(Ev.data.pha[i], "PP")
end
println("")
show_phases(Ev.data.pha[1])
println("")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4449 | xml_evfile1 = path*"/SampleFiles/XML/fdsnws-event_2017-01-12T03-18-55Z.xml"
xml_evfile2 = path*"/SampleFiles/XML/ISC_2011-tohoku-oki.xml"
xml_stfile = path*"/SampleFiles/XML/fdsnws-station_2017-01-12T03-17-42Z.xml"
printstyled(" QuakeML\n", color=:light_green)
id_err = "error in Station ID creation!"
unit_err = "units don't match instrument code!"
true_id = String["3337497", "3279407", "2844986", "2559759", "2092067", "1916079", "2413"]
true_ot = DateTime("2011-03-11T05:46:23.200")
true_loc = Float64[2.2376 38.2963; 93.0144 142.498; 26.3 19.7]
true_mag = Float32[8.6, 9.1, 8.8, 8.5, 8.6, 9.0, 8.5]
true_msc = String["MW", "MW", "MW", "MW", "MW", "MW", ""]
r1 = PZResp(Complex{Float32}.([ 0.0+0.0im -981.0+1009.0im
0.0+0.0im -981.0-1009.0im
0.0+0.0im -3290.0+1263.0im
0.0+0.0im -3290.0-1263.0im]), rev=true)
r2 = PZResp(Complex{Float32}.([ 0.0+0.0im -0.037-0.037im
0.0+0.0im -0.037+0.037im
-15.15+0.0im -15.64+0.0im
-176.6+0.0im -97.34-400.7im
-463.1-430.5im -97.34+400.7im
-463.1+430.5im -374.8+0.0im
0.0+0.0im -520.3+0.0im
0.0+0.0im -10530.0-10050.0im
0.0+0.0im -10530.0+10050.0im
0.0+0.0im -13300.0+0.0im
0.0+0.0im -255.097+0.0im ]),rev=true)
printstyled(" file read 1\n", color=:light_green)
(EC,RR) = read_qml(xml_evfile1)
Nev = length(EC)
@test Nev == length(true_id)
for i = 1:Nev
@test EC[i].id == true_id[i]
@test EC[i].mag.val == true_mag[i]
@test EC[i].mag.scale == true_msc[i]
end
@test EC[2].ot==true_ot
for i = 1:2
@test ≈(EC[i].loc.lat, true_loc[1,i])
@test ≈(EC[i].loc.lon, true_loc[2,i])
@test ≈(EC[i].loc.dep, true_loc[3,i])
end
printstyled(" file read 2\n", color=:light_green)
H, R = read_qml(xml_evfile2)
H = H[1]
R = R[1]
# Check basic headers
@test H.typ == "earthquake"
@test H.id == "16461282"
# Check that the correct magnitude is retained
@test H.mag.val ≥ 9.0f0
@test H.mag.scale == "MW"
# Check H.loc
@test H.loc.lat ≈ 38.2963
@test H.loc.lon ≈ 142.498
@test H.loc.dep ≈ 19.7152
@test H.loc.nst == 2643
@test H.loc.gap ≈ 6.314
@test H.loc.dt ≈ 0.31
@test H.loc.se ≈ 2.1567
@test H.loc.dmin ≈ 0.917
@test H.loc.dmax ≈ 163.253
@test H.loc.src == "smi:ISC/origid=602227159,ISC"
# Check source params
@test R.id == "600002952"
@test R.m0 == 5.312e22
@test R.mt == [1.73e22, -2.81e21, -1.45e22, 2.12e22, 4.55e22, -6.57e21]
@test R.dm == [6.0e19, 5.0e19, 5.0e19, 6.8e20, 6.5e20, 4.0e19]
@test R.pax == [295.0 115.0 205.0; 55.0 35.0 0.0; 5.305e22 -5.319e22 1.4e20]
@test R.planes == [25.0 203.0; 80.0 10.0; 90.0 88.0]
@test R.st.dur == 70.0
@test R.misc["methodID"] == "smi:ISC/methodID=Best_double_couple"
@test R.misc["pax_desc"] == "azimuth, plunge, length"
@test R.misc["planes_desc"] == "strike, dip, rake"
@test R.misc["derivedOriginID"] == "600126955"
printstyled(" file write\n", color=:light_green)
xf = "test.xml"
if isfile(xf)
safe_rm(xf)
end
write_qml(xf, H, R)
H1, R1 = read_qml(xf)
H1 = H1[1]
R1 = R1[1]
@test H1.src == abspath(xf)
@test R1.src == abspath(xf)
compare_SeisHdr(H1, H)
compare_SeisSrc(R1, R)
write_qml(xf, [H], [R])
write_qml(xf, H)
write_qml(xf, [H])
H1, R1 = read_qml(xf)
H1 = H1[4]
R1 = R1[2]
compare_SeisHdr(H1, H)
compare_SeisSrc(R1, R)
xstr = read(xml_evfile1)
io = open(xf, "w")
write(io, xstr)
close(io)
write_qml(xf, [H], [R])
H1, R1 = read_qml(xf)
H1 = H1[2:end]
R1 = R1[2:end]
H, R = read_qml(xml_evfile1)
for i in 1:length(R)
@test H1[i].src == abspath(xf)
@test R1[i].src == abspath(xf)
compare_SeisHdr(H1[i], H[i])
compare_SeisSrc(R1[i], R[i])
end
# Write with no location set: should warn
H1[1].loc = EQLoc()
write_qml(xf, H1, R1)
# Write with no magnitude
Ev = randSeisEvent()
Ev.hdr.mag = EQMag()
write_qml(xf, Ev, v=3)
H1, R1 = read_qml(xf)
H1 = H1[end]
R1 = R1[end]
compare_SeisHdr(H1, Ev.hdr)
compare_SeisSrc(R1, Ev.source)
printstyled(" does trying to append a non-XML file error?\n", color=:light_green)
io = open(xf, "w")
write(io, rand(UInt8, 64))
close(io)
@test_throws ErrorException write_qml(xf, H, R)
# Clean up
safe_rm(xf)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 791 | qml_file = path*"/SampleFiles/XML/ISC_2011-tohoku-oki.xml"
uw_file = path*"/SampleFiles/UW/99011116541"
suds_file = path*"/SampleFiles/SUDS/eq_wvm1.sud"
printstyled(" read_quake wrapper\n", color=:light_green)
printstyled(" QML\n", color=:light_green)
H, R = read_qml(qml_file)
Ev1 = SeisEvent(hdr = H[1], source = R[1])
Ev2 = read_quake("qml", qml_file)
@test Ev2.hdr.src == abspath(qml_file)
@test Ev2.source.src == abspath(qml_file)
Ev2.hdr.src = Ev1.hdr.src
Ev2.source.src = Ev1.source.src
@test Ev1 == Ev2
printstyled(" SUDS\n", color=:light_green)
Ev1 = readsudsevt(suds_file)
Ev2 = read_quake("suds", suds_file)
@test Ev1 == Ev2
printstyled(" UW\n", color=:light_green)
Ev1 = readuwevt(uw_file, full=true)
Ev2 = read_quake("uw", uw_file, full=true)
@test Ev1 == Ev2
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1286 | printstyled(" convert\n", color=:light_green)
C = randSeisChannel()
C1 = convert(EventChannel, C)
TD = convert(EventTraceData, C1)
@test sizeof(TD) > 136
C2 = convert(SeisChannel, C)
@test C == C2
EC = convert(EventChannel, randSeisChannel())
TD = unsafe_convert(EventTraceData, randSeisData(10))
for f in datafields
@test length(getfield(TD, f)) == 10
end
S = unsafe_convert(SeisData, randSeisEvent(10).data)
for f in datafields
@test length(getfield(S, f)) == 10
end
printstyled(" show\n", color=:light_green)
redirect_stdout(out) do
for i = 1:10
for T in (SeisHdr, SeisSrc, SeisEvent, EventTraceData, EventChannel, EQMag, EQLoc, SourceTime)
repr(T(), context=:compact=>true)
repr(T(), context=:compact=>false)
show(T())
end
summary(randSeisEvent())
summary(randSeisHdr())
summary(randSeisSrc())
show(randSeisEvent())
show(randSeisHdr())
show(randSeisSrc())
end
end
# EQMag
Δ = 75.3
m1 = EQMag(3.2f0, "Ml", 23, Δ, "localmag")
m2 = EQMag(3.2f0, "Ml", 23, Δ, "localmag")
@test hash(m1) == hash(m2)
# SourceTime
@test isempty(SourceTime())
ST1 = SourceTime()
@test hash(ST1) == hash(SourceTime())
# SeisSrc
@test isempty(SeisSrc())
@test isempty(SeisSrc(m0=1.0e22)) == false
@test isempty(SeisSrc(id = "123")) == false
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2969 | printstyled("RandSeis\n", color=:light_green)
fs_range = exp10.(range(-6, stop=4, length=50))
fc_range = exp10.(range(-4, stop=2, length=20))
printstyled(" getbandcode\n", color=:light_green)
for fs in fs_range
for fc in fc_range
getbandcode(fs, fc=fc)
end
end
printstyled(" rand_misc\n", color=:light_green)
for i = 1:100
D = rand_misc(1000)
end
# Similarly for the yp2 codes
printstyled(" iccodes_and_units\n", color=:light_green)
for i = 1:1000
cha, u = RandSeis.iccodes_and_units('s', true)
@test isa(cha, String)
@test isa(u, String)
cha, u = RandSeis.iccodes_and_units('s', false)
@test isa(cha, String)
@test isa(u, String)
end
# check that rand_t produces sane gaps
printstyled(" rand_t\n", color=:light_green)
fs = 100.0
nx = 100
printstyled(" controlled gaps\n", color=:light_green)
t = RandSeis.rand_t(fs, nx, 10, 1)
@test size(t, 1) == 12
t = RandSeis.rand_t(fs, nx, 0, 1)
@test size(t, 1) == 2
t = RandSeis.rand_t(100.0, 1000, 4, 200)
@test size(t, 1) == 6
@test t[:,1] == [1, 200, 400, 600, 800, 1000]
printstyled(" gap < Δ/2 + 1\n", color=:light_green)
for i in 1:1000
fs = rand(RandSeis.fs_vals)
nx = round(Int64, rand(1200:7200)*fs)
t = RandSeis.rand_t(fs, nx, 0, 1)
δt = div(round(Int64, sμ/fs), 2) + 1
gaps = t[2:end-1, 2]
if length(gaps) > 0
@test minimum(gaps) ≥ δt
end
@test minimum(diff(t[:,1])) > 0
end
printstyled(" rand_resp\n", color=:light_green)
R = RandSeis.rand_resp(1.0, 8)
@test length(R.z) == length(R.p) == 8
printstyled(" namestrip\n", color=:light_green)
str = String(0x00:0xff)
S = randSeisData(3)
S.name[2] = str
for key in keys(bad_chars)
test_str = namestrip(str, key)
@test length(test_str) == 256 - (32 + length(bad_chars[key]))
end
redirect_stdout(out) do
test_str = namestrip(str, "Nonexistent List")
end
namestrip!(S)
@test length(S.name[2]) == 210
printstyled(" repop_id!\n", color=:light_green)
S = randSeisData()
S.id[end] = deepcopy(S.id[1])
id = deepcopy(S.id)
RandSeis.repop_id!(S)
@test id != S.id
printstyled(" randSeis*\n", color=:light_green)
for i = 1:10
randSeisChannel()
randSeisData()
randSeisHdr()
randSeisSrc()
randSeisEvent()
end
printstyled(" keywords\n", color=:light_green)
printstyled(" a0\n", color=:light_green)
try
randSeisData(a0=true)
catch err
println("a0 = true threw error ", err)
end
printstyled(" c\n", color=:light_green)
for i = 1:10
S = randSeisData(10, c=1.0)
@test maximum(S.fs) == 0.0
end
printstyled(" fc\n", color=:light_green)
C = randSeisChannel(fc = 2.0)
resp_a0!(C.resp)
r1 = fctoresp(2.0f0, 1.0f0)
r2 = fctoresp(2.0f0, Float32(1.0/sqrt(2)))
resp_a0!(r1)
resp_a0!(r2)
r = C.resp
for f in (:a0, :f0, :z, :p)
@test isapprox(getfield(r, f), getfield(r1, f), rtol=eps(Float32)) || isapprox(getfield(r, f), getfield(r2, f), rtol=eps(Float32))
end
printstyled(" s\n", color=:light_green)
for i = 1:10
S = randSeisData(10, s=1.0)
@test maximum(S.fs) > 0.0
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1470 | # The file test.mseed comes from an older IRIS libmseed, found by anowacki
# It has a more complicated structure than the test.mseed file in more recent
# versions of libmseed, which reads with no issues
printstyled("SEED submodule\n", color=:light_green)
using SeisIO.SEED
printstyled(" info dump\n", color=:light_green)
redirect_stdout(out) do
dataless_support()
mseed_support()
seed_support()
resp_wont_read()
end
printstyled(" internals\n", color=:light_green)
printstyled(" seed_time\n", color=:light_green)
u16 = ones(UInt16, 3)
u16[3] = 0x0000
@test u2d(1.0e-6*SEED.seed_time(u16, 0x00, 0x00, 0x00, 0)) == DateTime("0001-01-01T00:00:00")
u16[1] = 0x0640 # 1600
u16[2] = 0x003c # 60
@test u2d(1.0e-6*SEED.seed_time(u16, 0x00, 0x00, 0x00, 0)) == DateTime("1600-02-29T00:00:00")
u16[1] = 0x076c # 1900
@test u2d(1.0e-6*SEED.seed_time(u16, 0x00, 0x00, 0x00, 0)) == DateTime("1900-03-01T00:00:00")
u16[1] = 0x07d0 # 2000
@test u2d(1.0e-6*SEED.seed_time(u16, 0x00, 0x00, 0x00, 0)) == DateTime("2000-02-29T00:00:00")
# 23, 59, 59
@test u2d(1.0e-6*SEED.seed_time(u16, 0x17, 0x3b, 0x3b, 0)) == DateTime("2000-02-29T23:59:59")
@test u2d(1.0e-6*SEED.seed_time(u16, 0x17, 0x3b, 0x3b, -110000000)) == DateTime("2000-02-29T23:58:09")
@test u2d(1.0e-6*SEED.seed_time(u16, 0x17, 0x3b, 0x3b, -115900000)) == DateTime("2000-02-29T23:58:03.1")
@test u2d(1.0e-6*SEED.seed_time(u16, 0x17, 0x3b, 0x3b, Int64(typemax(Int32)))) == DateTime("2000-03-01T00:35:46.484")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 874 | printstyled(" dataless SEED\n", color=:light_green)
redirect_stdout(out) do
metafile = path*"/SampleFiles/SEED/jones.hood.dataless"
S = read_meta("dataless", metafile, v=3,
s="2008-01-01T00:00:00",
t="2008-02-01T00:00:00",
units=true)
S2 = read_dataless( metafile, v=3,
s=DateTime("2008-01-01T00:00:00"),
t=DateTime("2008-02-01T00:00:00"),
units=true)
@test S == S2
files = ls(path*"/SampleFiles/SEED/*.dataless")
for i in files
println("Reading file ", i)
S = read_meta("dataless", i, v=0, units=false)
S = read_meta("dataless", i, v=1, units=false)
S = read_meta("dataless", i, v=2, units=false)
S = read_meta("dataless", i, v=3, units=false)
S = read_meta("dataless", i, v=3, units=true)
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1561 | printstyled(" scan_seed\n", color=:light_green)
fname = string(path, "/SampleFiles/SEED/test.mseed")
test_sac_file = string(path, "/SampleFiles/SAC/test_le.sac")
@test_throws ErrorException scan_seed(test_sac_file)
redirect_stdout(out) do
soh = scan_seed(fname, v=3)
S = read_data("mseed", fname)
h = String.(strip.(split(soh[1], ",")))
nx = parse(Int64, split(h[2], "=")[2])
@test nx == length(S.x[1])
ng = parse(Int64, split(h[3], "=")[2])
@test ng == 0
nfs = parse(Int64, split(h[4], "=")[2])
@test nfs == 1
end
if has_restricted
redirect_stdout(out) do
fname = path * "/SampleFiles/Restricted/Steim2-AllDifferences-BE.mseed"
soh = scan_seed(fname, quiet=true)
h = String.(strip.(split(soh[1], ",")))
nx = parse(Int64, split(h[2], "=")[2])
@test nx == 3096
ng = parse(Int64, split(h[3], "=")[2])
@test ng == 0
nfs = parse(Int64, split(h[4], "=")[2])
@test nfs == 1
fname = path * "/SampleFiles/Restricted/SHW.UW.mseed"
scan_seed(fname, fs_times=true)
scan_seed(fname, seg_times=true)
soh = scan_seed(fname, quiet=true)
S = read_data("mseed", fname)
nfs_expect = [143, 7]
for i in 1:S.n
h = String.(strip.(split(soh[i], ",")))
nx = parse(Int64, split(h[2], "=")[2])
@test nx == length(S.x[i])
# This is occasionally off-by-one from the true total
ng = parse(Int64, split(h[3], "=")[2])
@test abs(ng - (size(S.t[i], 1) - 2)) ≤ 1
nfs = parse(Int64, split(h[4], "=")[2])
@test nfs == nfs_expect[i]
end
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 8974 | resp_file_0 = path*"/SampleFiles/SEED/YUK7.RESP"
resp_file_1 = path*"/SampleFiles/SEED/RESP.cat"
resp_file_2 = path*"/SampleFiles/SEED/RESP.*"
rtol = eps(Float32)
printstyled(" SEED RESP\n", color=:light_green)
printstyled(" single-record file\n", color=:light_green)
S = SeisData()
read_seed_resp!(S, [resp_file_0], false, true)
printstyled(" multi-record file\n", color=:light_green)
S = SeisData()
read_seed_resp!(S, [resp_file_1], true, true)
# Channel 1 =================================================================
# Station info
@test S.id[1] == "CO.HAW.00.HHZ"
@test S.units[1] == "m/s"
R = S.resp[1]
for f in fieldnames(MultiStageResp)
@test length(getfield(R, f)) == 11
end
# First stage
@test typeof(R.stage[1]) == PZResp64
@test isapprox(R.stage[1].a0, 5.71404E+08, rtol=rtol)
@test isapprox(R.stage[1].f0, 1.0)
@test length(R.stage[1].z) == 2
@test length(R.stage[1].p) == 5
@test isapprox(R.stage[1].z[1], 0.0+0.0im)
@test isapprox(R.stage[1].z[2], 0.0+0.0im)
@test isapprox(R.stage[1].p[1], -3.70080E-02 + 3.70080E-02im)
@test isapprox(R.stage[1].p[2], -3.70080E-02 - 3.70080E-02im)
@test isapprox(R.stage[1].p[3], -5.02650E+02 + 0.0im)
@test isapprox(R.stage[1].p[4], -1.00500E+03 + 0.0im)
@test isapprox(R.stage[1].p[5], -1.13100E+03 + 0.0im)
# Second-to-last stage
@test typeof(R.stage[10]) == CoeffResp
@test length(R.stage[10].b) == 251
@test length(R.stage[10].a) == 0
@test isapprox(R.stage[10].b[1:5], [+2.18133E-08, +1.07949E-07, +2.97668E-07, +6.73280E-07, +1.29904E-06])
@test R.fac[10] == 5
@test R.os[10] == 0
@test R.delay[10] ≈ 2.5000E-01
@test R.corr[10] ≈ 2.5000E-01
@test R.gain[10] ≈ 1.0
@test R.fg[10] ≈ 1.0
# Last stage
@test typeof(R.stage[11]) == CoeffResp
@test length(R.stage[11].b) == 251
@test length(R.stage[11].a) == 0
@test isapprox(R.stage[11].b[end-4:end], [-2.22747E-02, -1.03605E-01, +2.25295E-02, +3.17473E-01, +4.77384E-01])
@test R.fac[11] == 2
@test R.os[11] == 0
@test R.delay[11] ≈ 1.25
@test R.corr[11] ≈ 1.25
@test R.gain[11] ≈ 1.0
@test R.fg[11] ≈ 1.0
@test S.fs[1] == R.fs[end]/R.fac[end]
# Channel 5 =================================================================
@test S.id[5] == "PD.NS04..HHZ"
@test S.units[5] == "m/s"
@test S.gain[5] ≈ 1.7e8
# Total stages
R = S.resp[5]
for f in fieldnames(MultiStageResp)
@test length(getfield(R, f)) == 4
end
# First stage
@test typeof(R.stage[1]) == PZResp64
@test isapprox(R.stage[1].a0, 1.4142, rtol=rtol)
@test isapprox(R.stage[1].f0, 1.0)
@test length(R.stage[1].z) == 2
@test length(R.stage[1].p) == 2
@test isapprox(R.stage[1].z[1], 0.0+0.0im)
@test isapprox(R.stage[1].z[2], 0.0+0.0im)
@test isapprox(R.stage[1].p[1], -4.442900E+00 + 4.442900E+00im)
@test isapprox(R.stage[1].p[2], -4.442900E+00 - 4.442900E+00im)
# Second stage
@test typeof(R.stage[2]) == CoeffResp
@test isapprox(R.fs[2], 3.000000E+03)
@test R.fac[2] == 1
@test R.os[2] == 0
@test R.delay[2] ≈ 0.0
@test R.corr[2] ≈ 0.0
@test R.gain[2] ≈ 1.0e6
@test R.fg[2] ≈ 0.0
# Third stage
@test typeof(R.stage[3]) == CoeffResp
@test length(R.stage[3].b) == 180
@test length(R.stage[3].a) == 0
@test isapprox(R.stage[3].b[1:3], [1.327638E-08, 4.137208E-08, 9.662694E-08])
@test R.fac[3] == 6
@test R.os[3] == 0
@test R.delay[3] ≈ 0.0
@test R.corr[3] ≈ 0.0
@test R.gain[3] ≈ 1.0
@test R.fs[3] ≈ 3000.0
@test R.fg[3] ≈ 0.0
# Fourth stage
@test typeof(R.stage[4]) == CoeffResp
@test length(R.stage[4].b) == 160
@test length(R.stage[4].a) == 0
@test isapprox(R.stage[4].b[1:3], [3.863808E-09, 2.261888E-09, -2.660399E-08])
@test R.fs[4] ≈ 500.0
@test R.fac[4] == 5
@test R.os[4] == 0
@test R.delay[4] ≈ 0.0
@test R.corr[4] ≈ 0.0
@test R.gain[4] ≈ 1.0
@test R.fg[4] ≈ 0.0
# Last Channel ===============================================================
n = lastindex(S.id)
@test S.id[n] == "XX.NS236..SHZ"
@test S.units[n] == "m/s"
@test S.gain[n] ≈ 3.450000e+02
# Total stages
R = S.resp[n]
for f in fieldnames(MultiStageResp)
@test length(getfield(R, f)) == 2
end
# First stage
@test typeof(R.stage[1]) == PZResp64
@test isapprox(R.stage[1].a0, 1.0, rtol=rtol)
@test isapprox(R.stage[1].f0, 5.0)
@test length(R.stage[1].z) == 2
@test length(R.stage[1].p) == 2
@test isapprox(R.stage[1].z[1], 0.0 + 0.0im)
@test isapprox(R.stage[1].z[2], 0.0 + 0.0im)
@test isapprox(R.stage[1].p[1], -4.44 + 4.44im)
@test isapprox(R.stage[1].p[2], -4.44 - 4.44im)
@test R.fac[1] == 0
@test R.os[1] == 0
@test R.delay[1] ≈ 0.0
@test R.corr[1] ≈ 0.0
@test R.gain[1] ≈ 345.0
@test R.fg[1] ≈ 5.0
# Second stage
@test typeof(R.stage[2]) == CoeffResp
@test length(R.stage[2].b) == 1
@test length(R.stage[2].a) == 0
@test isapprox(R.stage[2].b[1], 1.0)
@test R.fac[2] == 1
@test R.os[2] == 0
@test R.delay[2] ≈ 0.0
@test R.corr[2] ≈ 0.0
@test R.gain[2] ≈ 1.0
@test R.fs[2] ≈ 40.0
@test R.fg[2] ≈ 5.0
# ===========================================================================
# Test that response start and end times are used correctly to determine
# which responses are kept
# These control values were extracted manually from RESP.cat
gain = [1.007E+09, 8.647300E+08]
a0 = [62695.2, 86083]
f0 = [0.02, 0.02]
p1 = [-8.985E+01 + 0.0im, -5.943130E+01 + 0.0im]
#=
Target window 1:
B050F03 Station: ANMO
B050F16 Network: IU
B052F03 Location: ??
B052F04 Channel: BHZ
B052F22 Start date: 1989,241
B052F23 End date: 1991,023,22:25
Target window 2:
B050F03 Station: ANMO
B050F16 Network: IU
B052F03 Location: ??
B052F04 Channel: BHZ
B052F22 Start date: 1995,080,17:16
B052F23 End date: 1995,195
=#
tt = [632188800000000, 801792000000000]
for (i,t0) in enumerate(tt)
nx = 20000
C = SeisChannel(id = "IU.ANMO..BHZ", fs = 20.0, t = [1 t0; nx 0], x = randn(nx))
S = SeisData(C)
read_meta!(S, "resp", resp_file_1)
j = findid(S, "IU.ANMO..BHZ")
@test S.gain[j] ≈ gain[i]
@test S.resp[j].stage[1].a0 ≈ a0[i]
@test S.resp[j].stage[1].f0 ≈ f0[i]
@test S.resp[j].stage[1].p[1] ≈ p1[i]
end
# ===========================================================================
# Test that mutli-file read commands work
n += 1
printstyled(" multi-file read\n", color=:light_green)
S = read_meta("resp", resp_file_2, units=true)
# Channel 1, file 2 =========================================================
# Station info
@test S.id[n] == "AZ.DHL..BS1"
@test S.units[n] == "m/m"
R = S.resp[n]
for f in fieldnames(MultiStageResp)
@test length(getfield(R, f)) == 9
end
# First stage
@test typeof(R.stage[1]) == PZResp64
@test R.fac[1] == 0
@test R.os[1] == 0
@test R.delay[1] ≈ 0.0
@test R.corr[1] ≈ 0.0
@test R.gain[1] ≈ 3.23
@test R.fg[1] ≈ 1.0
@test R.fs[1] ≈ 0.0
# Second stage
@test typeof(R.stage[2]) == CoeffResp
@test length(R.stage[2].b) == 0
@test length(R.stage[2].a) == 0
@test R.fac[2] == 1
@test R.os[2] == 0
@test R.delay[2] ≈ 0.0
@test R.corr[2] ≈ 0.0
@test R.gain[2] ≈ 5.263200E+05
@test R.fg[2] ≈ 1.0
@test R.fs[2] ≈ 1.28e5
# Third stage
@test typeof(R.stage[3]) == CoeffResp
@test length(R.stage[3].b) == 29
@test length(R.stage[3].a) == 0
@test R.stage[3].b[1:3] ≈ [2.441410E-04, 9.765620E-04, 2.441410E-03]
@test R.fac[3] == 8
@test R.os[3] == 0
@test R.delay[3] ≈ 8.750000E-04
@test R.corr[3] ≈ 8.750000E-04
@test R.gain[3] ≈ 1.0
@test R.fg[3] ≈ 1.0
@test R.fs[3] ≈ 1.28e5
# Strain channel
n = findid(S, "PB.CHL1.LM.LS1")
@test S.units[n] == "m/m"
R = S.resp[n]
for f in fieldnames(MultiStageResp)
@test length(getfield(R, f)) == 4
end
# First stage
@test typeof(R.stage[1]) == PZResp64
@test R.stage[1].a0 ≈ 1.0
@test R.stage[1].f0 ≈ 0.0
@test isempty(R.stage[1].z)
@test isempty(R.stage[1].p)
@test R.fac[1] == 0
@test R.os[1] == 0
@test R.delay[1] ≈ 0.0
@test R.corr[1] ≈ 0.0
@test R.gain[1] ≈ 5.901050E-07
@test R.fg[1] ≈ 0.0
@test R.fs[1] ≈ 0.0
# Second stage
@test typeof(R.stage[2]) == PZResp64
@test R.stage[2].a0 ≈ 1.0
@test R.stage[2].f0 ≈ 0.0
@test length(R.stage[2].z) == 0
@test length(R.stage[2].p) == 4
@test R.stage[2].p[1:2] ≈ [-8.3E-02+0.0im, -8.4E-02+0.0im]
@test R.fac[2] == 0
@test R.os[2] == 0
@test R.delay[2] ≈ 0.0
@test R.corr[2] ≈ 0.0
@test R.gain[2] ≈ 1.0
@test R.fg[2] ≈ 0.0
@test R.fs[2] ≈ 0.0
# Third stage
@test typeof(R.stage[3]) == CoeffResp
@test length(R.stage[3].b) == 0
@test length(R.stage[3].a) == 0
@test R.fac[3] == 1
@test R.os[3] == 0
@test R.delay[3] ≈ 0.0
@test R.corr[3] ≈ 0.0
@test R.gain[3] ≈ 3.052E-04
@test R.fg[3] ≈ 0.0
@test R.fs[3] ≈ 10.0
# Fourth stage
@test typeof(R.stage[4]) == CoeffResp
@test length(R.stage[4].b) == 10
@test length(R.stage[4].a) == 1
@test R.stage[4].a[1] ≈ 1.0
for i = 1:10
@test R.stage[4].b[i] ≈ 0.1
end
@test R.fac[4] == 10
@test R.os[4] == 4
@test R.delay[4] ≈ 0.5
@test R.corr[4] ≈ 0.0
@test R.gain[4] ≈ 1.0
@test R.fg[4] ≈ 0.0
@test R.fs[4] ≈ 10.0
printstyled(" logging to :notes\n", color=:light_green)
for i in 1:S.n
@test any([occursin("RESP.obspy.cat", n) for n in S.notes[i]])
if length(S.notes[i]) > 1
@test any([occursin("RESP.cat", n) for n in S.notes[i]])
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 708 | # Packages that throw "not installed" error on upgrade: Compat, DSP, HDF5
using Pkg
function pkg_check(pkgs::Array{String,1})
# why would you deprecate something this useful?
if VERSION >= v"1.4"
installs = Dict{String, VersionNumber}()
for (uuid, dep) in Pkg.dependencies()
dep.is_direct_dep || continue
dep.version === nothing && continue
installs[dep.name] = dep.version
end
else
installs = Pkg.installed()
end
for p in pkgs
if get(installs, p, nothing) == nothing
@warn(string(p * " not found! Installing."))
Pkg.add(p)
else
println(p * " found. Not installing.")
end
end
return nothing
end
pkg_check(["DSP", "HDF5"])
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1427 | using Blosc, Dates, DSP, HDF5, Logging, Printf, SeisIO, Statistics, Test
using SeisIO.FastIO, SeisIO.Nodal, SeisIO.Quake, SeisIO.RandSeis, SeisIO.SEED, SeisIO.SeisHDF, SeisIO.SUDS, SeisIO.UW
import Dates: DateTime, Hour, now
import DelimitedFiles: readdlm
import Random: rand, randperm, randstring
import SeisIO: BUF,
KW,
FDSN_sta_xml,
TimeSpec,
auto_coords,
bad_chars,
buf_to_double,
channel_match,
checkbuf!,
checkbuf_8!,
cmatch_p!,
code2resptyp,
code2typ,
datafields,
datareq_summ,
diff_x!,
endtime,
fillx_i16_le!,
fillx_i32_be!,
fillx_i32_le!,
findhex,
formats,
get_del_ranges,
get_http_post,
get_http_req,
get_views,
ibmfloat,
int2tstr,
int_x!,
mean,
minreq,
minreq!,
mk_t!,
mk_t,
mktaper!,
mktime,
nx_max,
parse_charr,
parse_chstr,
parse_sl,
poly,
polyfit,
polyval,
read_sacpz!,
read_sacpz,
read_seed_resp!,
read_station_xml!,
read_station_xml,
read_sxml,
reset_sacbuf,
resptyp2code,
safe_isdir,
safe_isfile,
sort_segs!,
sort_segs,
starttime,
sync_t,
sμ,
t_arr!,
t_bounds,
t_collapse,
t_expand,
t_extend,
t_win,
taper_seg!,
tnote,
trid,
tstr2int,
typ2code,
w_time,
webhdr,
x_inds,
xtmerge!,
μs
import SeisIO.RandSeis: iccodes_and_units, rand_misc
import SeisIO.Quake: unsafe_convert
import SeisIO.SeisHDF:read_asdf, read_asdf!, id_match, id_to_regex
import Statistics: mean
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2740 | # ===========================================================================
# All constants needed by tests are here
const unicode_chars = String.(readdlm(path*"/SampleFiles/julia-unicode.csv", '\n')[:,1])
const n_unicode = length(unicode_chars)
const breaking_dict = Dict{String, Any}(
"0" => rand(Char),
"1" => randstring(2^rand(2:10)),
"16" => rand(UInt8),
"17" => rand(UInt16),
"18" => rand(UInt32),
"19" => rand(UInt64),
"20" => rand(UInt128),
"32" => rand(Int8),
"33" => rand(Int16),
"34" => rand(Int32),
"35" => rand(Int64),
"36" => rand(Int128),
"48" => rand(Float16),
"49" => rand(Float32),
"50" => rand(Float64),
"80" => rand(Complex{UInt8}),
"81" => rand(Complex{UInt16}),
"82" => rand(Complex{UInt32}),
"83" => rand(Complex{UInt64}),
"84" => rand(Complex{UInt128}),
"96" => rand(Complex{Int8}),
"97" => rand(Complex{Int16}),
"98" => rand(Complex{Int32}),
"99" => rand(Complex{Int64}),
"100" => rand(Complex{Int128}),
"112" => rand(Complex{Float16}),
"113" => rand(Complex{Float32}),
"114" => rand(Complex{Float64}),
"128" => rand(Char, 2^rand(2:6)),
"129" => [randstring(2^rand(3:8)) for i = 1:rand(4:24)],
"144" => rand(UInt8, rand(4:24)),
"145" => rand(UInt16, rand(4:24)),
"146" => rand(UInt32, rand(4:24)),
"147" => rand(UInt64, rand(4:24)),
"148" => rand(UInt128, rand(4:24)),
"160" => rand(Int8, rand(4:24)),
"161" => rand(Int16, rand(4:24)),
"162" => rand(Int32, rand(4:24)),
"163" => rand(Int64, rand(4:24)),
"164" => rand(Int128, rand(4:24)),
"176" => rand(Float16, rand(4:24)),
"177" => rand(Float32, rand(4:24)),
"178" => rand(Float64, rand(4:24)),
"208" => rand(Complex{UInt8}, rand(4:24)),
"209" => rand(Complex{UInt16}, rand(4:24)),
"210" => rand(Complex{UInt32}, rand(4:24)),
"211" => rand(Complex{UInt64}, rand(4:24)),
"212" => rand(Complex{UInt128}, rand(4:24)),
"224" => rand(Complex{Int8}, rand(4:24)),
"225" => rand(Complex{Int16}, rand(4:24)),
"226" => rand(Complex{Int32}, rand(4:24)),
"227" => rand(Complex{Int64}, rand(4:24)),
"228" => rand(Complex{Int128}, rand(4:24)),
"240" => rand(Complex{Float16}, rand(4:24)),
"242" => rand(Complex{Float64}, rand(4:24)),
"241" => rand(Complex{Float32}, rand(4:24))
)
const NOOF = "
HI ITS CLOVER LOL
,-'-, `---..
/ \\
=, .
______<3. ` ,+, ,\\`
( \\ + `-”.` .; ` `.\\
(_/ \\ | (( ) \\
|_ ; \" \\ ( ,’ |\\
\\ ,- '💦 (,\\_____,’ / “\\
\\__---+ }._) |\\
/ _\\__💧”)/ +
( / 💧” \\ ++_
\\) ,“ |) ++ ++
💧 “💧 ( * +***
"
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 399 | # ===========================================================================
# Redirect info, warnings, and errors to the logger
out = open("runtests.log", "a")
logger = SimpleLogger(out)
global_logger(logger)
@info("stdout redirect and logging")
# Set some keyword defaults
SeisIO.KW.comp = 0x00
has_restricted = safe_isdir(path * "/SampleFiles/Restricted/")
keep_log = false
keep_samples = true
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3642 | # Aliases and short functions go here
# ===========================================================================
# Length of data vector in each channel in S
Lx(S::GphysData) = [length(S.x[i]) for i in 1:S.n]
# Change a string separator to a path separator
change_sep(S::Array{String,1}) = [replace(i, "/" => Base.Filesystem.pathsep()) for i in S]
# Check that fields are preserved from S1 to S2
test_fields_preserved(S1::GphysData, S2::GphysData, x::Int, y::Int) =
@test(minimum([getfield(S1,f)[x]==getfield(S2,f)[y] for f in datafields]))
test_fields_preserved(S1::SeisChannel, S2::GphysData, y::Int) =
@test(minimum([getfield(S1,f)==getfield(S2,f)[y] for f in datafields]))
# Randomly colored printing
printcol(r::Float64) = (r ≥ 1.00 ? 1 : r ≥ 0.75 ? 202 : r ≥ 0.50 ? 190 : r ≥ 0.25 ? 148 : 10)
# This function exists because Appveyor can't decide on its own write permissions
function safe_rm(file::String)
try
rm(file)
catch err
@warn(string("Can't remove ", file, ": throws error ", err))
end
return nothing
end
# test that each field has the right number of entries
function sizetest(S::GphysData, nt::Integer)
@test ≈(S.n, nt)
@test ≈(maximum([length(getfield(S,i)) for i in datafields]), nt)
@test ≈(minimum([length(getfield(S,i)) for i in datafields]), nt)
return nothing
end
# Basic sanity checks
function basic_checks(S::GphysData; allow_empty::Bool=false)
# are we in scoping hell again?
@test isempty(S) == false
# does each field have the right number of entries?
sizetest(S, S.n)
for i = 1:S.n
if allow_empty == false
# were data read in?
@test isempty(S.x[i]) == false
# does :t indexing match :x?
@test length(S.x[i]) == (S.fs[i] == 0.0 ? size(S.t[i],1) : length(S.x[i]))
elseif isempty(S.x[i]) == false
# does :t indexing match :x?
@test length(S.x[i]) == (S.fs[i] == 0.0 ? size(S.t[i],1) : length(S.x[i]))
else
# an empty :x should never occur without an empty :t
@test isempty(S.t[i])
end
end
return nothing
end
# Get the start and end times of each channel in S
function get_edge_times(S::GphysData)
ts = [S.t[i][1,2] for i=1:S.n]
te = copy(ts)
for i=1:S.n
if S.fs[i] == 0.0
te[i] = S.t[i][end,2]
else
te[i] = endtime(S.t[i], S.fs[i])
end
end
return ts, te
end
# Convert lat, lon to x, y
function latlon2xy(xlat::Float64, xlon::Float64)
s = sign(xlon)
c = 111194.6976
y = c*xlat
d = acosd(cosd(xlon*s)*cosd(xlat))
x = sqrt(c^2*d^2-y^2)
return [round(Int32, s*x), round(Int32, y)]
end
# A simple time counting loop
function loop_time(ts::Int64, te::Int64; ti::Int64=86400000000)
t1 = deepcopy(ts)
j = 0
while t1 < te
j += 1
t1 = min(ts + ti, te)
s_str = int2tstr(ts + 1)
t_str = int2tstr(t1)
ts += ti
end
return j
end
# Remove low-gain seismic data channels
function remove_low_gain!(S::GphysData)
i_low = findall([occursin(r".EL?", S.id[i]) for i=1:S.n])
if !isempty(i_low)
for k = length(i_low):-1:1
@warn(join(["Low-gain, low-fs channel removed: ", S.id[i_low[k]]]))
S -= S.id[i_low[k]]
end
end
return nothing
end
# Test that data are time synched correctly within a SeisData structure
function sync_test!(S::GphysData)
local L = [length(S.x[i])/S.fs[i] for i = 1:S.n]
local t = [S.t[i][1,2] for i = 1:S.n]
@test maximum(L) - minimum(L) ≤ maximum(2.0./S.fs)
@test maximum(t) - minimum(t) ≤ maximum(2.0./S.fs)
return nothing
end
function test_del_ranges(x_del::Array{UnitRange, 1}, ii::Array{Int64, 1})
rr = [collect(i) for i in x_del]
jj = vcat(rr...)
@test jj == ii
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2730 | # "Hall of Shame" time matrices that broke previous tests
function breaking_tstruct(ts::Int64, nx::Int64, fs::Float64)
Δ = round(Int64, sμ/fs)
t = rand([
[1 ts+Δ; nx-1 7000; nx 57425593],
[1 ts+Δ; nx-2 7000; nx-1 12345; nx 57425593],
[1 ts+Δ; nx-1 5001; nx 57425593],
[1 ts+Δ; 10 13412300; 11 123123123; nx-3 3030303; nx-2 -30000000; nx-1 12300045; nx 5700],
[1 ts+Δ; 10 13412300; 11 123123123; nx-3 303030300000; nx-1 12300045; nx 57425593],
[1 ts+Δ; 10 13412300; 11 123123123; nx-3 303030300000; nx-1 12300045; nx 57425593],
[1 ts+Δ; 10 13412300; 11 123123123; nx-3 3030303; nx-2 -30000000; nx-1 12300045; nx 57425593]])
δ = div(Δ, 2) + 1
i = findall(t[:,2] .< δ)
t[i, 2] .= δ
return t
end
#=
Time matrices beyond program limitations:
(1) Scenario gap of length |δ| ≤ Δ/2 for any Δ
Example δ = div(Δ,2); t = [1 ts+Δ; nx-1 δ; nx 57425593]
Test write and reread
Outcome last two windows combine
Cause gap length in penultimate window too short
=#
function breaking_seis()
S = SeisData(randSeisData(), randSeisEvent(), randSeisData(2, c=1.0, s=0.0)[2])
# Test a channel with every possible dict type
S.misc[1] = breaking_dict
# Test a channel with no notes
S.notes[1] = []
# Need a channel with a very long name to test in show.jl
S.name[1] = "The quick brown fox jumped over the lazy dog"
# Need a channel with a non-ASCII filename
S.name[2] = "Moominpaskanäköinen"
S.misc[2]["whoo"] = String[] # ...and an empty String array in :misc
S.misc[2]["♃♄♅♆♇"] = rand(3,4,5,6) # ...and a 4d array in :misc
#= Here we test true, full Unicode support;
only 0xff can be a separator in S.notes[2] =#
S.notes[2] = Array{String,1}(undef,6)
S.notes[2][1] = String(Char.(0x00:0xfe))
for i = 2:1:6
uj = randperm(rand(1:n_unicode))
S.notes[2][i] = join(unicode_chars[uj])
end
# Test short data, loc arrays
S.loc[1] = GenLoc()
S.loc[2] = GeoLoc()
S.loc[3] = UTMLoc()
S.loc[4] = XYLoc()
# Responses
S.resp[1] = GenResp()
S.resp[2] = PZResp()
S.resp[3] = MultiStageResp(6)
S.resp[3].stage[1] = CoeffResp()
S.resp[3].stage[2] = PZResp()
S.resp[3].gain[1] = 3.5e15
S.resp[3].fs[1] = 15.0
S.resp[3].stage[1].a = randn(Float64, 120)
S.resp[3].stage[1].b = randn(Float64, 120)
S.resp[3].i[1] = "{counts}"
S.resp[3].o[1] = "m/s"
S.x[4] = rand(Float64,4)
S.t[4] = vcat(S.t[4][1:1,:], [4 0])
# Some IDs that I can search for
S.id[1] = "UW.VLL..EHZ"
S.id[2] = "UW.VLM..EHZ"
S.id[3] = "UW.TDH..EHZ"
# Breaking time structures in previous tests
for i in 1:3
if S.fs[i] > 0
S.t[i] = breaking_tstruct(S.t[i][1,2], length(S.x[i]), S.fs[i])
end
end
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 6348 | # Uses "fallback" start and end times to deal with outages
function check_get_data!(S::SeisData, protocol::String, channels::Union{String, Array{String,1}};
incr::DatePeriod = Day(1) , # Increment for failed reqs
max_retries::Integer = 7 , # Maximum retries on fail
autoname::Bool = false , # Auto-generate file names?
demean::Bool = false , # Demean after download?
detrend::Bool = false , # Detrend after download?
fmt::String = KW.fmt , # File format
msr::Bool = false , # Get multi-stage response?
nd::Real = KW.nd , # Number of days per request
opts::String = KW.opts , # Options string
rad::Array{Float64, 1} = KW.rad , # Query radius
reg::Array{Float64, 1} = KW.reg , # Query region
prune::Bool = KW.prune , # Prune empty channels?
rr::Bool = false , # Remove instrument response?
s::TimeSpec = 0 , # Start
si::Bool = KW.si , # Fill station info?
src::String = KW.src , # Data source
taper::Bool = false , # Taper after download?
t::TimeSpec = (-600) , # End or Length (s)
to::Int64 = KW.to , # Timeout (s)
ungap::Bool = false , # Remove time gaps?
unscale::Bool = false , # Unscale (divide by gain)?
v::Integer = KW.v , # Verbosity
w::Bool = KW.w , # Write to disc?
xf::String = "FDSNsta.xml" , # XML save file
y::Bool = KW.y , # Sync
)
has_data(S::SeisData) = (isempty(S) ? 0 : maximum([length(x) for x in S.x])) > 0
warn_str(s::DateTime, α::DateTime, i::Integer, incr::DatePeriod; a::Bool=false) =
string("S had no data ",
a ? string("after ", i, " retries") : string("until retry #", i),
" (", typeof(incr)(α-s), " before original request begin time)")
s,t = parsetimewin(s,t)
max_retries = max(max_retries, 3)
s = DateTime(s)
t = DateTime(t)
α = deepcopy(s)
i = 0
while i ≤ max_retries
i += 1
try
get_data!(S, protocol, channels,
autoname=autoname,
demean=demean,
detrend=detrend,
fmt=fmt,
msr=msr,
nd=nd,
opts=opts,
prune=prune,
rad=rad,
reg=reg,
rr=rr,
s=s,
si=si,
src=src,
t=t,
taper=taper,
to=to,
ungap=ungap,
unscale=unscale,
v=v,
w=w,
xf=xf,
y=y)
catch err
@warn(string("Calling get_data threw error ", err))
end
if has_data(S)
if i > 1
str = warn_str(s, α, i-1, incr)
printstyled("WARNING: ", str, ".\nCheck server; contact admins if problem persists.\n", color=:light_yellow)
@warn(str)
end
return nothing
end
s -= incr
t -= incr
println("Retrying; start time decremented by ", incr)
end
str = warn_str(s, α, max_retries, incr, a=true)
printstyled("WARNING: ", str, ".\nCheck ", protocol, " scripts and dependencies for new bugs!\n", color=:magenta, bold=true)
@warn(str)
return nothing
end
function check_get_data(protocol::String, channels::Union{String, Array{String,1}};
incr::DatePeriod = Day(1) , # Increment for failed reqs
max_retries::Integer = 7 , # Maximum retries on fail
autoname::Bool = false , # Auto-generate file names?
demean::Bool = false , # Demean after download?
detrend::Bool = false , # Detrend after download?
fmt::String = KW.fmt , # File format
msr::Bool = false , # Get multi-stage response?
nd::Real = KW.nd , # Number of days per request
opts::String = KW.opts , # Options string
rad::Array{Float64, 1} = KW.rad , # Query radius
reg::Array{Float64, 1} = KW.reg , # Query region
prune::Bool = KW.prune , # Prune empty channels?
rr::Bool = false , # Remove instrument response?
s::TimeSpec = 0 , # Start
si::Bool = KW.si , # Fill station info?
src::String = KW.src , # Data source
taper::Bool = false , # Taper after download?
t::TimeSpec = (-600) , # End or Length (s)
to::Int64 = KW.to , # Timeout (s)
ungap::Bool = false , # Remove time gaps?
unscale::Bool = false , # Unscale (divide by gain)?
v::Integer = KW.v , # Verbosity
w::Bool = KW.w , # Write to disc?
xf::String = "FDSNsta.xml" , # XML save file
y::Bool = KW.y , # Sync
)
S = SeisData()
check_get_data!(S, protocol, channels,
autoname=autoname,
demean=demean,
detrend=detrend,
fmt=fmt,
msr=msr,
nd=nd,
opts=opts,
prune=prune,
rad=rad,
reg=reg,
rr=rr,
s=s,
si=si,
src=src,
t=t,
taper=taper,
to=to,
ungap=ungap,
unscale=unscale,
v=v,
w=w,
xf=xf,
y=y)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2516 | # :src isn't tested; changes depending on file/web origin
function compare_SeisHdr(H1::SeisHdr, H2::SeisHdr)
for f in fieldnames(EQLoc)
if typeof(getfield(H1.loc, f)) <: Union{AbstractFloat, Int64}
(f == :rms) && continue
@test isapprox(getfield(H1.loc, f), getfield(H2.loc,f))
end
end
@test getfield(H1.loc, :typ) == getfield(H2.loc, :typ)
@test getfield(H1.loc, :src) == getfield(H2.loc, :src)
@test H1.mag.val ≈ H2.mag.val
@test H1.mag.gap ≈ H2.mag.gap
@test H1.mag.src == H2.mag.src
@test H1.mag.scale == H2.mag.scale
@test H1.mag.nst == H2.mag.nst
@test H1.id == H2.id
@test H1.ot == H2.ot
@test H1.typ == H2.typ
return nothing
end
function compare_SeisSrc(R1::SeisSrc, R2::SeisSrc)
@test R1.id == R2.id
@test R1.eid == R2.eid
@test R1.m0 ≈ R2.m0
@test R1.mt ≈ R2.mt
@test R1.dm ≈ R2.dm
@test R1.gap ≈ R2.gap
@test R1.pax ≈ R2.pax
@test R1.planes ≈ R2.planes
@test R1.st.desc == R2.st.desc
@test R1.st.dur ≈ R2.st.dur
@test R1.st.rise ≈ R2.st.rise
@test R1.st.decay ≈ R2.st.decay
return nothing
end
function compare_SeisData(S1::SeisData, S2::SeisData)
sort!(S1)
sort!(S2)
@test S1.id == S2.id
@test S1.name == S2.name
@test S1.units == S2.units
@test isapprox(S1.fs, S2.fs)
@test isapprox(S1.gain, S2.gain)
for i in 1:S1.n
L1 = S1.loc[i]
L2 = S2.loc[i]
@test isapprox(L1.lat, L2.lat)
@test isapprox(L1.lon, L2.lon)
@test isapprox(L1.el, L2.el)
@test isapprox(L1.dep, L2.dep)
@test isapprox(L1.az, L2.az)
@test isapprox(L1.inc, L2.inc)
R1 = S1.resp[i]
R2 = S2.resp[i]
for f in fieldnames(PZResp)
@test isapprox(getfield(R1, f), getfield(R2,f))
end
# Changed 2020-03-05
t1 = t_expand(S1.t[i], S1.fs[i])
t2 = t_expand(S2.t[i], S2.fs[i])
ii = sortperm(t1)
jj = sortperm(t2)
@test isapprox(t1[ii], t2[jj])
@test isapprox(S1.x[i][ii], S2.x[i][jj])
end
return nothing
end
#=
old :t, :x tests:
@test S1.t[i] == S2.t[i]
@test isapprox(S1.x[i],S2.x[i])
reason for change:
with gaps, representations of sample times aren't unique.
in rare cases, writing and rereading :t, :x to/from ASDF yields a different
time matrix :t but the data and sample times haven't changed.
=#
function compare_events(Ev1::SeisEvent, Ev2::SeisEvent)
compare_SeisHdr(Ev1.hdr, Ev2.hdr)
compare_SeisSrc(Ev1.source, Ev2.source)
S1 = convert(SeisData, Ev1.data)
S2 = convert(SeisData, Ev2.data)
compare_SeisData(S1, S2)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 890 | function mktestseis()
L0 = 30
L1 = 10
os = 5
tt = time()
t1 = round(Int64, tt/μs)
t2 = round(Int64, (L0+os)/μs) + t1
S = SeisData(5)
S.name = ["Channel 1", "Channel 2", "Channel 3", "Longmire", "September Lobe"]
S.id = ["XX.TMP01.00.BHZ","XX.TMP01.00.BHN","XX.TMP01.00.BHE","CC.LON..BHZ","UW.SEP..EHZ"]
S.fs = collect(Main.Base.Iterators.repeated(100.0, S.n))
S.fs[4] = 20.0
for i = 1:S.n
os1 = round(Int64, 1/(S.fs[i]*μs))
S.x[i] = randn(Int(L0*S.fs[i]))
S.t[i] = [1 t1+os1; length(S.x[i]) 0]
end
T = SeisData(4)
T.name = ["Channel 6", "Channel 7", "Longmire", "September Lobe"]
T.id = ["XX.TMP02.00.EHZ","XX.TMP03.00.EHN","CC.LON..BHZ","UW.SEP..EHZ"]
T.fs = collect(Main.Base.Iterators.repeated(100.0, T.n))
T.fs[3] = 20.0
for i = 1:T.n
T.x[i] = randn(Int(L1*T.fs[i]))
T.t[i] = [1 t2; length(T.x[i]) 0]
end
return (S,T)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 814 | function naive_filt!(C::SeisChannel;
fl::Float64=1.0,
fh::Float64=15.0,
np::Int=4,
rp::Int=10,
rs::Int=30,
rt::String="Bandpass",
dm::String="Butterworth"
)
T = eltype(C.x)
fe = 0.5 * C.fs
low = T(fl / fe)
high = T(fh / fe)
# response type
if rt == "Highpass"
ff = Highpass(fh, fs=fs)
elseif rt == "Lowpass"
ff = Lowpass(fl, fs=fs)
else
ff = getfield(DSP.Filters, Symbol(rt))(fl, fh, fs=fs)
end
# design method
if dm == "Elliptic"
zp = Elliptic(np, rp, rs)
elseif dm == "Chebyshev1"
zp = Chebyshev1(np, rp)
elseif dm == "Chebyshev2"
zp = Chebyshev2(np, rs)
else
zp = Butterworth(np)
end
# polynomial ratio
pr = convert(PolynomialRatio, digitalfilter(ff, zp))
# zero-phase filter
C.x[:] = filtfilt(pr, C.x)
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1550 | # randSeisEvent_well-behaved
function rse_wb(n::Int64)
Ev = randSeisEvent(n, s=1.0)
# Prevent problematic :source fields
Ev.source.eid = Ev.hdr.id
Ev.source.npol = 0
Ev.source.notes = String[]
Ev.source.misc = Dict{String,Any}(
"pax_desc" => "azimuth, plunge, length",
"mt_id" => "smi:SeisIO/moment_tensor;fmid="*Ev.source.id,
"planes_desc" => "strike, dip, rake")
note!(Ev.source, "+origin ¦ " * Ev.source.src)
# Prevent read/write of problematic :hdr fields
Ev.hdr.int = (0x00, "")
Ev.hdr.loc.datum = ""
Ev.hdr.loc.typ = ""
Ev.hdr.loc.rms = 0.0
Ev.hdr.mag.src = Ev.hdr.loc.src * ","
Ev.hdr.notes = String[]
Ev.hdr.misc = Dict{String,Any}()
note!(Ev.hdr, "+origin ¦ " * Ev.hdr.src)
# Ensure flags will re-read accurately
flags = bitstring(Ev.hdr.loc.flags)
if flags[1] == '1' || flags[2] == '1'
flags = "11" * flags[3:8]
Ev.hdr.loc.flags = parse(UInt8, flags, base=2)
end
# Prevent very low fs, bad locations, rubbish in :misc, :notes
for j in 1:Ev.data.n
Ev.data.fs[j] = max(Ev.data.fs[j], 1.0)
Ev.data.misc[j] = Dict{String,Any}()
Ev.data.notes[j] = String[]
Ev.data.loc[j] = RandSeis.rand_loc(false)
# Use time structures that formerly broke check_for_gap! / t_extend
if j < 3
Ev.data.t[j] = breaking_tstruct(Ev.data.t[j][1,2],
length(Ev.data.x[j]),
Ev.data.fs[j])
end
end
return Ev
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1710 | function SL_wait(ta::Array{Union{Task,Nothing}, 1}, t_interval::Int64)
t = 0
while t < 300
if any([!istaskdone(t) for t in ta])
sleep(t_interval)
t += t_interval
elseif t ≥ 60
println(" one or more queries incomplete after 60 s; skipping test.")
for i = 1:4
ta[i] = nothing
GC.gc()
end
break
else
tf1 = fetch(ta[1])
tf2 = fetch(ta[2])
tf3 = fetch(ta[3])
tf4 = fetch(ta[4])
@test tf1 == tf2 == tf3 == tf4
break
end
end
return nothing
end
function wait_on_data!(S::GphysData, tmax::Real)
τ = 0.0
t = 5.0
printstyled(string(" (sleep up to ", tmax, " s)\n"), color=:green)
redirect_stdout(out) do
show(S)
# Here we actually wait for data to arrive
while isempty(S)
if any(isopen.(S.c)) == false
break
end
if τ > tmax-t
show(S)
break
end
sleep(t)
τ += t
end
# Close the connection cleanly (write & close are redundant, but
# write should close it instantly)
for q = 1:length(S.c)
if isopen(S.c[q])
if q == 3
show(S)
end
close(S.c[q])
end
end
sleep(t)
τ += t
end
# Synchronize (the reason we used d0,d1 in our test sessions)
prune!(S)
if !isempty(S)
sync!(S, s="first")
str = string("time elapsed = ", τ, " s")
@info(str)
printstyled(" "*str*"\n", color=:green)
else
str = (if τ < tmax
string("connection closed after ", τ, " s")
else
string("no data after ", τ, " s...is server up?")
end)
@warn(str)
printstyled(" "*str*"\n", color=:green)
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1182 | """
test_chan_ext(file::String, fmt::String, id::String, fs::Float64, i::Int64, t0::Int64)
Test for Issue 34
"""
function test_chan_ext(file::String, fmt::String, id::String, fs::Float64, i::Int64, t0::Int64)
# Comparison struct
S = verified_read_data(fmt, file)
j = findid(id, S)
if j == 0
println("Test failing; cannot find ID!")
println(rpad("File: ", 12), file)
println(rpad("Format: ", 12), fmt)
println("Channel ID: ", id)
println("Output IDs: ", S.id)
throw("Test failed; ID not found!")
end
# Create SeisData struct with data present using correct headers
ny = max(100, round(Int64, 10*fs))
y = rand(Float32, ny)
S1 = SeisData(
SeisChannel(
id = id,
fs = fs,
gain = S.gain[j],
units = S.units[j],
loc = deepcopy(S.loc[j]),
resp = deepcopy(S.resp[j]),
t = [1 rand(0:10000000); ny 0],
x = copy(y)
)
)
# Read into S1
verified_read_data!(S1, fmt, file)
t1 = t_expand(S1.t[i], S.fs[i])[ny+1]
t2 = S.t[j][1,2]
# Check that read preserves time, data
@test S1.x[i][1:ny] == y
@test S1.x[i][ny+1:end] == S.x[j]
@test t0 == t1 == t2
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1894 | function verified_read_data!(S::GphysData, fmt::String, fpat::Union{String, Array{String,1}};
full ::Bool = false, # full SAC/SEGY hdr
cf ::String = "", # win32 channel info file
jst ::Bool = true, # are sample times JST (UTC+9)?
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = true,
swap ::Bool = false, # do byte swap?
v ::Int64 = KW.v, # verbosity level
vl ::Bool = false, # verbose logging
allow_empty::Bool = false
)
read_data!(S, fmt, fpat,
full = full,
cf = cf,
jst = jst,
nx_add = nx_add,
nx_new = nx_new,
strict = strict,
swap = swap,
v = v,
vl = vl
)
basic_checks(S, allow_empty = allow_empty)
return nothing
end
function verified_read_data(fmt::String, fpat::Union{String, Array{String,1}};
full ::Bool = false, # full SAC/SEGY hdr
cf ::String = "", # win32 channel info file
jst ::Bool = true, # are sample times JST (UTC+9)?
nx_add ::Int64 = KW.nx_add, # append nx_add to overfull channels
nx_new ::Int64 = KW.nx_new, # new channel samples
strict ::Bool = true,
swap ::Bool = false, # do byte swap?
v ::Int64 = KW.v, # verbosity level
vl ::Bool = false, # verbose logging
allow_empty::Bool = false
)
S = read_data(fmt, fpat,
full = full,
cf = cf,
jst = jst,
nx_add = nx_add,
nx_new = nx_new,
strict = strict,
swap = swap,
v = v,
vl = vl
)
basic_checks(S, allow_empty = allow_empty)
return S
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2666 | printstyled(stdout, " mathematical properties\n", color=:light_green)
H = randSeisHdr()
V = randSeisEvent()
S = randSeisData()
T = convert(EventTraceData, S)
C = randSeisChannel()
D = convert(EventChannel, C)
printstyled(stdout, " reflexivity\n", color=:light_green)
@test C==C
@test D==D
@test H==H
@test S==S
@test T==T
@test V==V
printstyled(stdout, " commutativity\n", color=:light_green)
printstyled(stdout, " S1 + S2 == S2 + S1\n", color=:light_green)
# SeisData + SeisData
S1 = randSeisData()
S2 = randSeisData()
@test S1 + S2 == S2 + S1
# EventTraceData + EventTraceData
S1 = convert(EventTraceData, randSeisData())
S2 = convert(EventTraceData, randSeisData())
@test S1 + S2 == S2 + S1
printstyled(stdout, " S + C == C + S\n", color=:light_green)
# SeisData + SeisChannel
S = randSeisData()
C = randSeisChannel()
U = deepcopy(S)
push!(U, C)
U = sort(U)
@test C + S == S + C == U
@test findid(C, U) == findid(C, S + C) == findid(C, C + S)
# EventTraceData + EventChannel
S = convert(EventTraceData, randSeisData())
C = convert(EventChannel, randSeisChannel())
U = deepcopy(S)
push!(U, C)
U = sort(U)
@test C + S == S + C == U
@test findid(C, U) == findid(C, S + C) == findid(C, C + S)
printstyled(stdout, " C1 + C2 == C2 + C1\n", color=:light_green)
# SeisChannel + SeisChannel
C1 = randSeisChannel()
C2 = randSeisChannel()
@test C1 + C2 == C2 + C1
# EventChannel + EventChannel
C1 = convert(EventChannel,randSeisChannel())
C2 = convert(EventChannel,randSeisChannel())
@test C1 + C2 == C2 + C1
printstyled(stdout, " S + U - U == S (for sorted S)\n", color=:light_green)
# SeisData + SeisData - SeisData
(S, T) = mktestseis()
S = sort(S)
@test (S + T - T) == S
# EventTraceData + EventTraceData - EventTraceData
(S, T) = mktestseis()
S = convert(EventTraceData, S)
T = convert(EventTraceData, T)
@test (S + T - T) == sort(S)
printstyled(stdout, " associativity\n", color=:light_green)
printstyled(stdout, " (S1 + S2) + S3 == S1 + (S2 + S3)\n", color=:light_green)
# SeisData + SeisData + SeisData
S1 = randSeisData()
S2 = randSeisData()
S3 = randSeisData()
@test S1 + (S2 + S3) == (S1 + S2) + S3
printstyled(stdout, " (S1 + S2) + C == S1 + (S2 + C)\n", color=:light_green)
# SeisData + SeisChannel
C1 = randSeisChannel()
@test S1 + (S2 + C1) == (S1 + S2) + C1
# EventTraceData + EventTraceData + EventTraceData
S1 = convert(EventTraceData, randSeisData())
S2 = convert(EventTraceData, randSeisData())
S3 = convert(EventTraceData, randSeisData())
@test S1 + (S2 + S3) == (S1 + S2) + S3
# EventTraceData + EventChannel
C1 = convert(EventChannel, randSeisChannel())
@test S1 + (S2 + C1) == (S1 + S2) + C1
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1793 | # Responses
printstyled(" InstrumentResponse subtypes\n", color=:light_green)
@test resptyp2code(CoeffResp()) == 0x03
redirect_stdout(out) do
v = 1.0 + 1.0*im
X = rand(12,3)
Y = rand(12,3)
R = GenResp("", X, Y)
@test R == GenResp(complex.(X,Y))
@test hash(R) == hash(GenResp(complex.(X,Y)))
@test sizeof(R) > sizeof(X)+sizeof(Y)
@test R[10] == getindex(R.resp, 10) == getindex(R, 10)
@test R[11,2] == getindex(R.resp, 11, 2) == getindex(R, 11, 2)
R[3] = v
R[4,2] = 1.0
@test getindex(R.resp, 3) == v == getindex(R, 3, 1)
@test real(getindex(R.resp, 4, 2)) == 1.0
show(stdout, R)
repr(R, context=:compact=>true)
for T in (CoeffResp, GenResp, MultiStageResp, PZResp, PZResp64)
R = T()
@test isempty(R) == true
show(stdout, R)
repr(R, context=:compact=>true)
repr(R, context=:compact=>false)
R2 = T()
@test R == R2
@test hash(R) == hash(R2)
@test code2resptyp(resptyp2code(R)) == T
@test sizeof(R) == sizeof(R2)
end
nr = 255
R = MultiStageResp(nr)
for f in (:fs, :gain, :fg, :delay, :corr, :fac, :os)
setfield!(R, f, rand(eltype(getfield(R, f)), nr))
end
i = Array{String,1}(undef,nr)
o = Array{String,1}(undef,nr)
o[1] = "m/s"
i[1] = "{counts}"
for j = 2:nr
i[j] = randstring(2^rand(2:6))
o[j] = i[j-1]
end
R.i = i
R.o = o
R.stage[1] = RandSeis.rand_resp(20.0, 0)
for i = 2:nr
R.stage[i] = CoeffResp(b = rand(rand(1:1200)))
end
show(R)
end
printstyled(" resp codes\n", color=:light_green)
codes = (0x00, 0x01, 0x02, 0x03, 0x04, 0xff)
types = (GenResp, PZResp, PZResp64, CoeffResp, MultiStageResp, Nothing)
for c in codes
T = code2resptyp(c)()
@test c == resptyp2code(T)
end
for (i, T) in enumerate(types)
resp = T()
@test resptyp2code(resp) == codes[i]
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3792 | printstyled(stdout," extended methods\n", color=:light_green)
printstyled(stdout," getindex\n", color=:light_green)
(S,T) = mktestseis()
# @test findid(T, S) == [0, 0, 4, 5]
printstyled(stdout," getindex + Int on SeisData ==> SeisChannel\n", color=:light_green)
C = S[1]
@test S[firstindex(S)] == C
i_targ = 3
C = S[i_targ]
test_fields_preserved(C,S,i_targ)
printstyled(stdout," getindex + Range on SeisData ==> SeisData\n", color=:light_green)
D = S[i_targ:i_targ+1]
test_fields_preserved(D,S,2,i_targ+1)
printstyled(stdout," setindex!\n", color=:light_green)
A = SeisData(3)
setindex!(A, C, 3)
test_fields_preserved(C, A, 3)
A[1:2]=D
test_fields_preserved(A, S, 2, i_targ+1)
test_fields_preserved(C, S, 3)
printstyled(stdout," in\n", color=:light_green)
@test ("XX.TMP01.00.BHZ" in S.id)
printstyled(stdout," findid\n", color=:light_green)
@test ≈(findid("CC.LON..BHZ",S), findid(S,"CC.LON..BHZ"))
@test ≈(findid(S,"CC.LON..BHZ"), 4)
@test ≈(findid(S,C), 3)
printstyled(stdout," isempty\n", color=:light_green)
D = SeisData()
@test isempty(D)
printstyled(stdout," append!\n", color=:light_green)
(S,T) = mktestseis()
append!(S, T)
sizetest(S, 9)
C = deepcopy(S[4])
deleteat!(S, 4)
sizetest(S, 8)
@test ≈(length(findall(S.name.==C.name)),1)
C = deepcopy(S[3:4])
deleteat!(S,3:4)
nt = 6
@test ≈(S.n, nt)
@test ≈(maximum([length(getfield(S,i)) for i in datafields]), nt)
@test ≈(minimum([length(getfield(C,i)) for i in datafields]), 2)
@test length(findall(S.name.==C.name[1])).*length(findall(S.id.==C.id[1])) == 0
@test length(findall(S.name.==C.name[2])).*length(findall(S.id.==C.id[2])) == 1
s = "CC.LON..BHZ"
delete!(S, s)
sizetest(S, 5)
s = r"EH"
# @test_throws BoundsError S - s
delete!(S, s, exact=false)
sizetest(S, 2)
# untested methods in SeisData
for i = 1:5
S = randSeisData()
@test sizeof(S) > 0
r = S.id[1]
U = S - r
@test sizeof(U) < sizeof(S)
@test S.n == U.n + 1
T = pull(S,r)
@test isa(T, SeisChannel)
@test(U == S)
S = randSeisData(5)
C = S[1]
@test firstindex(S) == 1
@test S[firstindex(S)] == S[1] == C
@test length(S) == S.n == lastindex(S)
push!(S, SeisChannel())
T = prune(S)
prune!(S)
@test S == T
@test S.n == T.n == 5
end
H = randSeisHdr()
@test sizeof(H) > 0
clear_notes!(H)
@test length(H.notes) == 1
(S,T) = mktestseis()
U = S-T
sizetest(S,5)
sizetest(T,4)
sizetest(U,3)
(S,T) = mktestseis()
U = deepcopy(S)
deleteat!(U, 1:3)
@test (S - [1,2,3]) == U
sizetest(S,5)
(S,T) = mktestseis()
@test in("UW.SEP..EHZ",S)
U = S[3:S.n]
V = deepcopy(S)
deleteat!(S, 1:2)
deleteat!(V, [1,2])
@test S == U == V
delete!(S, "CC.", exact=false)
delete!(U,V)
(S,T) = mktestseis()
X = deepcopy(T)
U = pull(S, 5)
@test U.id == "UW.SEP..EHZ"
sizetest(S, 4)
@test findid("UW.SEP..EHZ", S.id) == 0
U = pull(S, 3:4)
sizetest(S, 2)
@test findid(U.id[1], S.id) == 0
@test findid(U.id[2], S.id) == 0
V = pull(T, [2,3,4])
sizetest(T, 1)
@test findid(V.id[1], T.id) == 0
@test findid(V.id[2], T.id) == 0
@test findid(V.id[3], T.id) == 0
V = sort(V)
deleteat!(X, 1)
# @test findid(V,X) == [2,3,1]
Y = sort(X)
@test V == Y
# added 2019-02-23
S = SeisData(randSeisData(5), SeisChannel(), SeisChannel(),
SeisChannel(id="UW.SEP..EHZ", name="Darth Exploded",
loc=GeoLoc(lat=46.1967, lon=-122.1875, el=1440.0), t=[1 0; 1024 0], x=rand(1024)))
prune!(S)
@test (S.n == 6)
J = findchan("EHZ",S)
@test (6 in J)
printstyled(" SeisChannel methods\n", color=:light_green)
id = "UW.SEP..EHZ"
name = "Darth Exploded"
Ch = randSeisChannel()
Ch.id = id
Ch.name = name
S = SeisData(Ch)
@test in(id, Ch) == true
@test isempty(Ch) == false
@test convert(SeisData, Ch) == SeisData(Ch)
@test findid(Ch, S) == 1
@test sizeof(Ch) > 0
@test lastindex(S) == 1
Ch.gain = 1.0
Ch.fs = 0.0
@test isempty(Ch) == false
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2096 | printstyled(" uniqueness of memory allocation\n", color=:light_green)
nx = 1024
S = SeisData(2)
for i=1:S.n
# Check arrays of same type
@test pointer_from_objref(S.loc[i]) != pointer_from_objref(S.x[i])
@test pointer_from_objref(S.gain) != pointer_from_objref(S.fs)
end
for i in datafields
v = getfield(S,i)
for j in datafields
if i != j
@test pointer_from_objref(getfield(S,i)) != pointer_from_objref(getfield(S,j))
end
end
end
# Manually set all fields in the stupidest way possible
S.id[1] *= "...YYY"
S.name[1] *= "New Station"
S.loc[1] = UTMLoc()
S.fs[1] += 100.0
S.gain[1] *= 2.0
S.resp[1] = GenResp(resp=[0.0+0.0*im 1.0+1.0*im; 0.0+0.0*im 1.0-1.0*im])
S.units[1] *= "Unknown"
S.src[1] *= "www"
append!(S.notes[1], ["sadfasgasfg","kn4ntl42ntlk4n"])
S.misc[1]["P"] = 2.0
S.t[1] = [S.t[1]; [1 round(Int, time()*sμ); nx 0]]
append!(S.x[1], rand(nx))
for f in datafields
v = getfield(S, f)
@test v[1] != v[2]
end
# Now set all fields like a smart person
S = SeisData(2)
S.id[1] = "...YYY"
S.name[1] = "New Station"
S.loc[1] = GeoLoc()
S.fs[1] = 100.0
S.gain[1] = 2.0
S.resp[1] = PZResp([0.0+0.0*im 1.0+0.767*im; 0.0+0.0*im 1.0-0.767*im])
S.units[1] = "Unknown"
S.src[1] = "www"
S.notes[1] = ["sadfasgasfg","kn4ntl42ntlk4n"]
S.misc[1] = Dict{String,Any}("P" => 2.0)
S.t[1] = [1 round(Int, time()*sμ); nx 0]
S.x[1] = rand(nx)
S.id[2] = "...zzz"
S.name[2] = "Old Station"
S.loc[2] = GeoLoc()
S.fs[2] = 50.0
S.gain[2] = 22.0
S.resp[2] = PZResp64(z = [0.0+0.0*im, 0.0+0.0*im], p = [1.0+1.0*im, 1.0-1.0*im])
S.units[2] = "ms/2"
S.src[2] = "file"
S.notes[2] = ["0913840183","klnelgng"]
S.misc[2] = Dict{String,Any}("S" => 6.5)
S.t[2] = [1 round(Int, time()*sμ); nx 0]
S.x[2] = rand(nx)
for i in datafields
v = getfield(S,i)
if !isimmutable(v[1])
@test pointer_from_objref(v[1]) != pointer_from_objref(v[2])
end
end
# Unambiguous numbered initialization
S1 = SeisData(SeisChannel(), SeisChannel())
S2 = SeisData(2)
S3 = SeisData(S2)
S4 = SeisData(SeisData(1), SeisChannel())
S5 = SeisData(SeisChannel(), SeisData(1))
@test (S1 == S2 == S3 == S4 == S5)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1687 | Ch = randSeisChannel()
clear_notes!(Ch)
@test length(Ch.notes) == 1
printstyled(" annotation and logging\n", color=:light_green)
S = randSeisData(2)
id_str = "XX.STA.00.EHZ"
S.id[1] = id_str
printstyled(" note!\n", color=:light_green)
note!(S, 1, "hi")
@test occursin("hi", S.notes[1][end])
note!(S, "poor SNR")
@test occursin("poor SNR", S.notes[2][end])
note!(S, string(id_str, " SNR OK"))
@test occursin(" SNR OK", S.notes[1][end])
note!(S, id_str, "why is it clipping again")
@test occursin("clipping", S.notes[1][end])
printstyled(" clear_notes!\n", color=:light_green)
clear_notes!(S, 1)
@test length(S.notes[1]) == 1
@test occursin("notes cleared.", S.notes[1][1])
clear_notes!(S)
for i = 1:2
@test length(S.notes[i]) == 1
@test occursin("notes cleared.", S.notes[i][1])
end
note!(S, 2, "whee")
clear_notes!(S, id_str)
@test S.notes[1] != S.notes[2]
@test_throws ErrorException clear_notes!(S, "YY.STA.11.BHE")
clear_notes!(S)
Ev = randSeisEvent()
clear_notes!(Ev)
for i = 1:Ev.data.n
@test length(Ev.data.notes[i]) == 1
@test occursin("notes cleared.", Ev.data.notes[i][1])
end
@test length(Ev.hdr.notes) == 1
@test occursin("notes cleared.", Ev.hdr.notes[1])
Ngaps = [size(S.t[i],1)-2 for i =1:2]
ungap!(S)
for i = 1:2
@test ==(size(S.t[i],1), 2)
end
S.gain = rand(Float64,2)
unscale!(S)
for i = 1:2
@test ==(S.gain[i], 1.0)
end
demean!(S)
printstyled(" accuracy of automatic logging\n", color=:light_green)
for i = 1:2
c = (Ngaps[i]>0) ? 1 : 0
@test length(S.notes[i]) == (3+c)
if c > 0
@test occursin("ungap!", S.notes[i][2])
end
@test occursin("unscale!", S.notes[i][2+c])
@test occursin("demean!", S.notes[i][3+c])
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 749 | printstyled(stdout," show\n", color=:light_green)
# SeisChannel show
S = SeisData()
C = randSeisChannel()
C.fs = 100.0
nx = (1, 2, 3, 4, 5, 10, 100, 10000)
C.t = Array{Int64,2}(undef, 0, 2)
C.x = Float32[]
push!(S, C)
redirect_stdout(out) do
for i in nx
C.t = [1 0; i 0]
C.x = randn(Float32, i)
show(C)
push!(S, C)
end
show(S)
end
redirect_stdout(out) do
# show
show(breaking_seis())
show(randSeisData(1))
show(SeisChannel())
show(SeisData())
show(randSeisChannel())
show(randSeisData(10, c=1.0))
# summary
summary(randSeisChannel())
summary(randSeisData())
# invoke help-only functions
seed_support()
mseed_support()
dataless_support()
resp_wont_read()
@test web_chanspec() == nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 355 | printstyled(" \"splat\" structure creation\n", color=:light_green)
U = randSeisData()
C = randSeisChannel()
W = randSeisEvent()
TD = convert(EventTraceData, randSeisData())
EC = convert(EventChannel, randSeisChannel())
S = SeisData( U, C, EC, TD, W)
T = EventTraceData( U, C, EC, TD, W)
for f in datafields
@test getfield(S, f) == getfield(T, f)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 4379 | # Locs
printstyled(" InstrumentPosition\n", color=:light_green)
redirect_stdout(out) do
L = GenLoc(); show(stdout, L)
@test isempty(L) == true
@test hash(L) == hash(GenLoc())
L = GenLoc(rand(Float64,12))
@test getindex(L, 10) == getindex(L.loc, 10)
setindex!(L, 1.0, 10)
@test getindex(L.loc, 10) == 1.0
@test isempty(L) == false
@test sizeof(L) > sizeof(L.loc)
L1 = GeoLoc(datum="WGS84")
L2 = GeoLoc(datum="Unknown")
show(stdout, L1)
@test !(L1 == L2)
@test sizeof(L1) > 104
repr(L1, context=:compact=>true)
repr(L1, context=:compact=>false)
L = UTMLoc()
show(stdout, L)
@test isempty(L)
@test hash(L) == hash(UTMLoc())
@test L == UTMLoc()
@test sizeof(L) == 114
L2 = UTMLoc(datum="NAD83")
@test isequal(L, L2) == false
repr(L, context=:compact=>true)
repr(L, context=:compact=>false)
L = XYLoc()
show(stdout, L)
@test isempty(L)
@test hash(L) == hash(XYLoc())
L.x = 10.0
L.datum = "Ye olde map of 1833"
@test !isempty(L)
@test !(L == UTMLoc())
@test sizeof(L) > 136
L2 = XYLoc()
@test isequal(L, L2) == false
repr(L, context=:compact=>true)
repr(L, context=:compact=>false)
L = EQLoc()
show(stdout, L)
@test isempty(L)
@test hash(L) == hash(EQLoc())
@test L == EQLoc()
@test sizeof(L) > 114
repr(L, context=:compact=>true)
repr(L, context=:compact=>false)
L = NodalLoc()
show(stdout, L)
@test isempty(L)
@test hash(L) == hash(NodalLoc())
@test L == NodalLoc()
@test sizeof(L) == 48
repr(L, context=:compact=>true)
repr(L, context=:compact=>false)
end
# Seismic phases
printstyled(" SeisPha\n", color=:light_green)
@test isempty(SeisPha())
printstyled(" PhaseCat\n", color=:light_green)
@test isempty(PhaseCat())
# Seismic phase catalogs
@test isempty(PhaseCat())
P = PhaseCat()
@test isequal(PhaseCat(), P)
# EventChannel, EventTraceData
printstyled(" EventChannel, EventTraceData\n", color=:light_green)
EC1 = EventChannel()
@test isempty(EC1)
TD = EventTraceData()
@test isempty(TD)
@test EventTraceData(EC1) == EventTraceData(EventChannel())
TD1 = convert(EventTraceData, randSeisData())
TD2 = convert(EventTraceData, randSeisData())
EC1 = TD1[1]
EC1.id = "CC.VALT..BHZ"
TD1.id[2] = "CC.VALT..BHZ"
@test !isempty(EC1)
EC2 = EventChannel( az = 180*rand(),
baz = 180*rand(),
dist = 360*rand(),
fs = 10.0*rand(1:10),
gain = 10.0^rand(1:10),
id = "YY.MONGO..FOO",
loc = UTMLoc(),
misc = Dict{String,Any}("Dont" => "Science While Drink"),
name = "<I Made This>",
notes = Array{String,1}([tnote("It clipped"), tnote("It clipped again")]),
pha = PhaseCat("P" => SeisPha(),
"S" => SeisPha(rand()*100.0,
rand()*100.0,
rand()*100.0,
rand()*100.0,
rand()*100.0,
rand()*100.0,
rand()*100.0,
rand()*100.0,
'D', 'F')),
resp = GenResp(),
src = "foo",
t = Array{Int64,2}([1 1000; 2*length(EC1.x) 0]),
units = "m/s",
x = randn(2*length(EC1.x))
)
@test findid(EC1, TD1) == 2 == findid(TD1, EC1)
@test findid(TD2, EC1) == 0 == findid(EC1, TD2)
@test sizeof(TD1) > sizeof(EC1) > 136
# Cross-Type Tests
C = randSeisChannel()
C.id = identity(EC1.id)
@test findid(C, TD1) == 2 == findid(TD1, C)
@test findid(TD2, C) == 0 == findid(C, TD2)
S1 = randSeisData(12)
S2 = randSeisData(12)
S1.id[11] = "CC.VALT..BHZ"
@test findid(EC1, S1) == 11 == findid(S1, EC1)
@test findid(S2, EC1) == findid(EC1, S2)
TD = EventTraceData(EC2, convert(EventTraceData, randSeisData()))
EC3 = pull(TD, 1)
@test findid(EC3, TD) == 0
setindex!(TD, EC3, 2)
@test findid(EC3, TD) == 2
namestrip!(EC3)
@test EC3.name == "I Made This"
Ev = SeisEvent(hdr=randSeisHdr(), data=TD)
@test sizeof(Ev) > 16
# SeisHdr
@test isempty(SeisHdr())
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2425 | S = randSeisData(3, s=1.0, nx=1024)
@test mkchans(1, S) == [1]
@test mkchans(1:2, S) == [1, 2]
@test mkchans([2,3], S) == [2, 3]
S.x[2] = Float64[]
@test mkchans([2,3], S) == [3]
S.t[3] = Array{Int64, 2}(undef, 0, 2)
@test mkchans(1:3, S, f=:t) == [1,2]
@test mkchans(1:3, S) == [1,3]
S.t[3] = [1 S.t[1][1,2]; length(S.x[3]) 0]
push!(S, randSeisChannel(c=true))
@test mkchans(1:4, S, keepirr=false) == [1, 3]
@test mkchans([2,3,4], S, keepirr=false) == [3]
S = randSeisData(3, s=1.0, nx=1024)
@test get_seis_channels(S, chans=3) == [3]
@test get_seis_channels(S, chans=1:2) == [1,2]
@test get_seis_channels(S, chans=[1,3]) == [1,3]
cr = [1,2,3]
c0 = deepcopy(cr)
filt_seis_chans!(cr, S)
@test cr == c0
S.id[1] = "...0"
filt_seis_chans!(cr, S)
@test cr != c0
@test cr == [2,3]
printstyled("channel_match\n", color=:light_green)
C = SeisChannel()
D = SeisChannel()
@test channel_match(C, D)
C = randSeisChannel(s=true)
D = deepcopy(C)
C.gain = D.gain*0.5
@test channel_match(C, D) == false
@test channel_match(C, D, use_gain = false) == true
C.gain = D.gain
C.fs = D.fs*0.5
@test channel_match(C, D) == false
printstyled("cmatch_p!\n", color=:light_green)
C = randSeisChannel(s=true)
D = deepcopy(C)
C0 = deepcopy(C)
# Scenarios that must work:
# C set, D unset
D.fs = 0.0
D.gain = 1.0
D.loc = GeoLoc()
D.resp = PZResp()
D.units = ""
m = cmatch_p!(C,D)
@test m == true
@test channel_match(C, D) == true
# D set, C unset
C.fs = 0.0
C.gain = 1.0
C.loc = GeoLoc()
C.resp = PZResp()
C.units = ""
m = cmatch_p!(C,D)
@test m == true
@test channel_match(C, D) == true
# Values must preserve those in C0
@test channel_match(C, C0) == true
# Scenarios that must fail
C = randSeisChannel(s=true)
C.loc = GeoLoc(lat = 48.79911, lon=-122.54064, el=45.1104)
C.resp = PZResp(a0 = 223.43015f0, f0 = 2.0f0, p = ComplexF32.([-8.89+8.89im, 8.89-8.89im]))
C0 = deepcopy(C)
D = deepcopy(C0)
while D.units == C.units
D.units = randstring()
end
@test cmatch_p!(C,D) == false
@test C == C0
D = deepcopy(C0)
D.fs = 2.0*C.fs
D0 = deepcopy(D)
@test cmatch_p!(C,D) == false
@test C == C0
@test D == D0
D = deepcopy(C0)
D.gain = 2.0*C.gain
D0 = deepcopy(D)
@test cmatch_p!(C,D) == false
@test C == C0
@test D == D0
D = deepcopy(C0)
D.loc.lat = 89.1
D0 = deepcopy(D)
@test cmatch_p!(C,D) == false
@test C == C0
@test D == D0
D = deepcopy(C0)
D.resp.f0 = 1.0f0
D0 = deepcopy(D)
@test cmatch_p!(C,D) == false
@test C == C0
@test D == D0
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 251 | printstyled(" findid\n", color=:light_green)
id = "UW.TDH..EHZ"
IDs = ["UW.WWVB..TIM","UW.TCG..TIM","UW.TDH..EHZ","UW.VLM..EHZ"]
@test findid(id, IDs) == findid(codeunits(id), IDs) == findfirst(IDs.==id) == 3
@test findid("aslkasnglknsgf", IDs) == 0
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 475 | printstyled(" get_seis_channels\n", color=:light_green)
S = randSeisData(20, s=1.0)
S = S[get_seis_channels(S)]
@test get_seis_channels(S, chans=1) == get_seis_channels(S, chans=1:1) == get_seis_channels(S, chans=[1]) == [1]
c1 = collect(1:S.n)
filt_seis_chans!(c1, S)
@test c1 == 1:S.n
C1 = randSeisChannel(s=true)
C2 = randSeisChannel(s=false, c=true)
C2.id = "...YYY"
S = SeisData(C1, C2, S)
chans = collect(1:S.n)
filt_seis_chans!(chans, S)
@test (2 in chans) == false
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3998 | printstyled(" guess\n", color=:light_green)
# This should error
@test_throws ErrorException guess("www.google.com")
# These should work
printstyled(" ability to determine file types unambiguously\n", color=:light_green)
ah1f = path*"/SampleFiles/AH/ah1.f"
ah2f = path*"/SampleFiles/AH/ah2.f"
pasf = path*"/SampleFiles/SEGY/03.334.12.09.00.0362.1"
sac = path .* [ "/SampleFiles/SAC/test_be.sac",
"/SampleFiles/SAC/test_le.sac" ]
segyf = path*"/SampleFiles/SEGY/03.334.12.09.00.0362.1"
segpat = path*"/SampleFiles/SEGY/03*"
segyf2 = path*"/SampleFiles/SEGY/FORGE_78-32_iDASv3-P11_UTC190428135038.sgy"
sudsf = path*"/SampleFiles/Restricted/10081701.WVP"
uw = path*"/SampleFiles/UW/" .* ["00012502123W"]
geocsv1 = path*"/SampleFiles/ASCII/geo-tspair.csv"
geocsv2 = path*"/SampleFiles/ASCII/geo-slist.csv"
lennf = path*"/SampleFiles/ASCII/0215162000.c00"
seisf = path*"/SampleFiles/SEIS/2019_M7.1_Ridgecrest.seis"
# xml_stfile = path*"/SampleFiles/fdsnws-station_2017-01-12T03-17-42Z.xml"
# resp_file = path*"/SampleFiles/RESP.cat"
self = path*"/Utils/test_guess.jl"
printstyled(" known file formats\n", color=:light_green)
redirect_stdout(out) do
@test guess(ah1f, v=3) == ("ah1", true)
end
@test guess(ah2f) == ("ah2", true)
[@test guess(i) == ("bottle", false) for i in ls(path*"/SampleFiles/Bottle/*")]
@test guess(pasf) == ("passcal", false)
[@test guess(i) == ("mseed", false) for i in ls(path*"/SampleFiles/SEED/*seed")]
[@test guess(i) == ("sac", false) for i in sac]
[@test guess(i) == ("sac", false) for i in ls(path*"/SampleFiles/SUDS/*sac")]
@test guess(segyf2) == ("segy", true)
if safe_isfile(sudsf)
@test guess(sudsf) == ("suds", false)
end
[@test guess(i) == ("uw", true) for i in uw]
@test guess(geocsv1) == ("geocsv", false)
@test guess(geocsv2) == ("geocsv.slist", false)
@test guess(lennf) == ("lennartz", false)
# @test guess(xml_stfile) == ("sxml", false)
# @test guess(resp_file) == ("resp", false)
@test guess(seisf) == ("seisio", false)
@test guess(self) == ("unknown", false)
# Restricted files
if safe_isdir(path*"/SampleFiles/Restricted")
path2 = (path*"/SampleFiles/Restricted/")
[@test guess(i) == ("mseed", false) for i in ls(path2*"*seed")]
[@test guess(i) == ("win32", true) for i in ls(path2*"*cnt")]
@test guess(path2*"test_rev_1.segy") == ("segy", true)
end
# Does the method for read_data with guess actually work?
printstyled(" read_data with guess()\n", color=:light_green)
SEG = read_data(segyf, full=true)
@test SEG.misc[1]["gain_const"] == 32
@test SEG.gain[1] == SEG.misc[1]["scale_fac"]
@test isapprox(1.0/SEG.gain[1], 4.47021e-07/SEG.misc[1]["gain_const"], atol=eps(Float32))
@test SEG.fs[1] == 100.0 == 1.0e6 / SEG.misc[1]["delta"]
@test lastindex(SEG.x[1]) == 247698
@test SEG.misc[1]["trace_seq_line"] == 3
@test SEG.misc[1]["trace_seq_file"] == 3
@test SEG.misc[1]["rec_no"] == 1
@test SEG.misc[1]["channel_no"] == 2
@test SEG.misc[1]["trace_id_code"] == 3
@test SEG.misc[1]["h_units_code"] == 2
@test SEG.misc[1]["nx"] == 32767
@test SEG.misc[1]["samp_rate"] == 10000
@test SEG.misc[1]["gain_type"] == 1
@test SEG.misc[1]["year"] == 2003
@test SEG.misc[1]["day"] == 334
@test SEG.misc[1]["hour"] == 12
@test SEG.misc[1]["minute"] == 9
@test SEG.misc[1]["second"] == 0
@test SEG.misc[1]["ms"] == 5
@test SEG.misc[1]["time_code"] == 2
@test SEG.misc[1]["trigyear"] == 2003
@test SEG.misc[1]["trigday"] == 334
@test SEG.misc[1]["trighour"] == 12
@test SEG.misc[1]["trigminute"] == 9
@test SEG.misc[1]["trigsecond"] == 0
@test SEG.misc[1]["trigms"] == 5
@test SEG.misc[1]["data_form"] == 1
@test SEG.misc[1]["inst_no"] == 0x016a # 0362
@test strip(SEG.misc[1]["sensor_serial"]) == "UNKNOWN"
@test strip(SEG.misc[1]["station_name"]) == "362"
St = SeisData()
read_data!(St, segyf, full=true)
if Sys.iswindows() == false
Su = SeisData()
read_data!(Su, segpat, full=true)
# @test SEG == St == Su
# BUG: path inconsistency with symlinks leads to different :src strings
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 231 | S = SeisData(3)
S.fs = [1.0, 2.0, 3.0]
S.x[1] = randn(1024); S.t[1] = [1 0; 2 1092831; 1000 12; 1023 12312421; 1024 0]
S.x[2] = randn(768); S.t[2] = [1 0; 768 0]
S.x[3] = randn(1900); S.t[3] = [1 0; 1900 0]
@test nx_max(S) == 1900
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2630 | printstyled(" parsing\n", color=:light_green)
import SeisIO: string_time, stream_float, stream_time
date_buf = BUF.date_buf
s = [ "2010",
"2010-01-03",
"2012-03-01",
"2010-11-17",
"2010-11-17T20:20:10",
"2010-11-17T20:20:01",
"2010-11-17T20:20:00.100000Z",
"2010-11-17T20:20:00.010000Z",
"2010-11-17T20:20:00.001000Z",
"2010-11-17T20:20:00.000100Z",
"2010-11-17T20:20:00.000010Z",
"2010-11-17T20:20:00.000001Z" ]
t = [ 1262304000000000,
1262476800000000,
1330560000000000,
1289952000000000,
1290025210000000,
1290025201000000,
1290025200100000,
1290025200010000,
1290025200001000,
1290025200000100,
1290025200000010,
1290025200000001 ]
for (i,j) in enumerate(s)
@test string_time(j, date_buf) == t[i]
@test string_time(j) == t[i]
@test stream_time(IOBuffer(j*"~"), date_buf) == t[i]
end
float_strings = ["49.981",
".9183948913749817",
"1232.0",
"1232.34a",
"0",
"313,",
"-12345",
"-12345.0",
"3.1E19",
"1.23E-08",
"+1.23E+8",
"1.234E-8",
"1.234E+8",
"3.4028235f38", # largest finite Float32
"-3.4028235f38", # smallest finite Float32
"1.0f-45"]
float_vals = Float32[49.981,
0.91839486f0,
1232.0,
1232.34,
0.0,
313.0,
-12345.0,
-12345.0,
3.1f19,
1.23f-8,
1.23f8,
1.234f-8,
1.234f8,
3.4028235f38,
-3.4028235f38,
1.0f-45]
for (i,j) in enumerate(float_strings)
# println("test ", i, ": ", j)
b = IOBuffer(j*"~")
@test stream_float(b, 0x00) ≈ float_vals[i]
close(b)
end
bad_floats = ["49.9.81",
".9f",
".9fff183948913749817",
"+-+-+-1232.0",
"12a32.34a",
"threeve",
"Texa\$",
"13.13.13.",
"-.123efe"]
printstyled(" how bad float strings parse:\n", color=:light_green)
for (i,j) in enumerate(bad_floats)
# println("test ", i, ": ", j, " => ", string(bad_vals[i]))
b = IOBuffer(j*"~")
try
f = stream_float(b, 0x00)
println(" ", j, " => ", string(f))
catch err
println(" ", j, " => error (", err, ")")
end
close(b)
end
printstyled(" buf_to_double\n", color=:light_green)
expect = [1.23e45, 1.23e21, 1.234e35, 1.23e23]
for (n, str) in enumerate(["1.23E45", "1.23e45", "1.23F21", "1.23f21", "1234E32", "1234e32", "123f21", "123F21"])
buf = Array{UInt8,1}(str)
L = length(str)
x = expect[ceil(Int64, n/2)]
@test x ≈ buf_to_double(buf, L)
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 2528 | printstyled(" units\n", color=:light_green)
# Cases from randSeisChannel
printstyled(" check that RandSeis uses valid UCUM units\n", color=:light_green)
redirect_stdout(out) do
u = join(SeisIO.RandSeis.irregular_units, '.')
@test(vucum(u, v=2))
for u in SeisIO.RandSeis.irregular_units
@test units2ucum(u) == u
end
end
S = randSeisData()
@test validate_units(S) == trues(S.n)
S = randSeisData(2)
S.units = [S.units[1], "FOO"]
@test validate_units(S) == [true, false]
C = S[1]
@test validate_units(C)
@test_logs (:warn, "Error thrown for unit string: meters per second") SeisIO.vucum("meters per second")
@test vucum("m/s^2") == false
# Now some real units that show up constantly in web requests
printstyled(" test conversions to UCUM units\n", color=:light_green)
@test units2ucum("m/s^1") == "m/s"
@test units2ucum("m/s**1") == "m/s"
@test units2ucum("m s^-1") == "m/s"
@test units2ucum("m*s^-1") == "m/s"
@test units2ucum("m.s^-1") == "m/s"
@test units2ucum("m⋅s^-1") == "m/s"
@test units2ucum("m s**-1") == "m/s"
@test units2ucum("m*s**-1") == "m/s"
@test units2ucum("m.s**-1") == "m/s"
@test units2ucum("m⋅s**-1") == "m/s"
@test units2ucum("m × s^-1") == "m/s"
@test units2ucum("m ⋅ s^-1") == "m/s"
@test units2ucum("nm/s") == "nm/s"
@test units2ucum("nm/s^1") == "nm/s"
@test units2ucum("nm/s**1") == "nm/s"
@test units2ucum("nm s^-1") == "nm/s"
@test units2ucum("nm*s^-1") == "nm/s"
@test units2ucum("nm.s^-1") == "nm/s"
@test units2ucum("nm.s**-1") == "nm/s"
@test units2ucum("nm*s**-1") == "nm/s"
@test units2ucum("nm s**-1") == "nm/s"
# Acceleration
@test units2ucum("m/s^2") == "m/s2"
@test units2ucum("m/s**2") == "m/s2"
@test units2ucum("m s^-2") == "m/s2"
@test units2ucum("m*s^-2") == "m/s2"
@test units2ucum("m.s^-2") == "m/s2"
@test units2ucum("m⋅s^-2") == "m/s2"
@test units2ucum("m s**-2") == "m/s2"
@test units2ucum("m*s**-2") == "m/s2"
@test units2ucum("m.s**-2") == "m/s2"
@test units2ucum("m⋅s**-2") == "m/s2"
# Fictitious cases; we're unlikely to see anything like these
@test units2ucum("s^43/s**3") == "s43/s3"
@test units2ucum("s**10/m^4") == "s10/m4"
@test units2ucum("s^43 m**-3") == "s43/m3"
@test units2ucum("s^43 cm^-3") == "s43/cm3"
@test units2ucum("s^43 cm^-3 g") == "s43/cm3.g"
@test units2ucum("s^43 cm**+3 g") == "s43.cm3.g"
@test units2ucum("m s^-1 K") == "m/s.K"
@test units2ucum("m s^-1*K") == "m/s.K"
@test units2ucum("s^43 cm^-3 V^-4") == "s43/cm3.V4"
@test units2ucum("s^43.cm^-3*V^-4") == "s43/cm3.V4"
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 7009 | xml_evfile1 = path*"/SampleFiles/XML/fdsnws-event_2017-01-12T03-18-55Z.xml"
xml_evfile2 = path*"/SampleFiles/XML/ISC_2011-tohoku-oki.xml"
xml_stfile = path*"/SampleFiles/XML/fdsnws-station_2017-01-12T03-17-42Z.xml"
d1 = "2019-03-14T02:18:00"
d2 = "2019-03-14T02:28:00"
to = 30
# fdsn_hdr
# @test fdsn_hdr("http://http://service.ncedc.org/fdsnws/station/1/") == ["Host" => "service.ncedc.org", "User-Agent" => "curl/7.60.0", "Accept" => "*/*"]
# @test fdsn_hdr("http://http://service.ncedc.org/fdsnws/dataselect/1/") == ["Host" => "service.ncedc.org", "User-Agent" => "curl/7.60.0", "Accept" => "*/*"]
# @test fdsn_hdr("http://http://service.ncedc.org/fdsnws/event/1/") == ["Host" => "service.ncedc.org", "User-Agent" => "curl/7.60.0", "Accept" => "*/*"]
# @test fdsn_hdr("http://service.scedc.caltech.edu/fdsnws/dataselect/1/") == ["User-Agent" => "Julia"]
# @test fdsn_hdr("http://service.scedc.caltech.edu/fdsnws/station/1/") == ["User-Agent" => "Julia"]
# @test fdsn_hdr("http://service.scedc.caltech.edu/fdsnws/event/1/") == ["User-Agent" => "Julia"]
# @test fdsn_hdr("http://www.cnn.com.") == webhdr
# From FDSN the response code is 200
url = "http://service.iris.edu/fdsnws/dataselect/1/query?format=miniseed&net=UW&sta=XNXNX&loc=99&cha=QQQ&start="*d1*"&end="*d2*"&szsrecs=true"
req_info_str = datareq_summ("FDSNWS data", "UW.XNXNX.99.QQQ", d1, d2)
(req, parsable) = get_http_req(url, req_info_str, to, status_exception=false)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
(req,parsable) = get_http_req(url, req_info_str, to, status_exception=true)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
# From IRIS the response code is 400 and we can get an error
url = "http://service.iris.edu/irisws/timeseries/1/query?net=DE&sta=NENA&loc=99&cha=LUFTBALLOONS&start="*d1*"&end="*d2*"&scale=AUTO&format=miniseed"
req_info_str = datareq_summ("IRISWS data", "DE.NENA.99.LUFTBALLOONS", d1, d2)
(req,parsable) = get_http_req(url, req_info_str, to, status_exception=false)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
(req, parsable) = get_http_req(url, req_info_str, to, status_exception=true)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
(req, parsable) = get_http_post(url, NOOF, to, status_exception=false)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
(req, parsable) = get_http_post(url, NOOF, to, status_exception=true)
@test typeof(req) == Array{UInt8,1}
@test startswith(String(req), "HTTP.Messages.Response")
@test parsable == false
printstyled(" FDSN XML\n", color=:light_green)
id_err = "error in Station ID creation!"
unit_err = "units don't match instrument code!"
true_id = String["3337497", "3279407", "2844986", "2559759", "2092067", "1916079", "2413"]
true_ot = DateTime("2011-03-11T05:46:23.200")
true_loc = Float64[2.2376 38.2963; 93.0144 142.498; 26.3 19.7]
true_mag = Float32[8.6, 9.1, 8.8, 8.5, 8.6, 9.0, 8.5]
true_msc = String["MW", "MW", "MW", "MW", "MW", "MW", ""]
r1 = PZResp(f0 = 0.02f0, p = ComplexF32[-981.0+1009.0im, -981.0-1009.0im, -3290.0+1263.0im, -3290.0-1263.0im])
resp_a0!(r1)
r2 = PZResp(Complex{Float32}.([ 0.0+0.0im -0.037-0.037im
0.0+0.0im -0.037+0.037im
-15.15+0.0im -15.64+0.0im
-176.6+0.0im -97.34-400.7im
-463.1-430.5im -97.34+400.7im
-463.1+430.5im -374.8+0.0im
0.0+0.0im -520.3+0.0im
0.0+0.0im -10530.0-10050.0im
0.0+0.0im -10530.0+10050.0im
0.0+0.0im -13300.0+0.0im
0.0+0.0im -255.097+0.0im ]),rev=true)
r2.z = r2.z[1:6]
r2.f0 = 0.02f0
resp_a0!(r2)
printstyled(" QuakeML test 1\n", color=:light_green)
(EC,RR) = read_qml(xml_evfile1)
Nev = length(EC)
@test Nev == length(true_id)
for i = 1:Nev
@test EC[i].id == true_id[i]
@test EC[i].mag.val == true_mag[i]
@test EC[i].mag.scale == true_msc[i]
end
@test EC[2].ot==true_ot
for i = 1:2
@test ≈(EC[i].loc.lat, true_loc[1,i])
@test ≈(EC[i].loc.lon, true_loc[2,i])
@test ≈(EC[i].loc.dep, true_loc[3,i])
end
printstyled(" QuakeML test 2\n", color=:light_green)
H, R = read_qml(xml_evfile2)
H = H[1]
R = R[1]
# Check basic headers
@test H.typ == "earthquake"
@test H.id == "16461282"
# Check that the correct magnitude is retained
@test H.mag.val ≥ 9.0f0
@test H.mag.scale == "MW"
# Check H.loc
@test H.loc.lat == 38.2963
@test H.loc.lon == 142.498
@test H.loc.dep == 19.7152
@test H.loc.se == 2.1567
@test H.loc.nst == 2643
@test H.loc.src == "smi:ISC/origid=602227159,ISC"
# Check source params
@test R.id == "600002952"
@test R.m0 == 5.312e22
@test R.mt == [1.73e22, -2.81e21, -1.45e22, 2.12e22, 4.55e22, -6.57e21]
@test R.dm == [6.0e19, 5.0e19, 5.0e19, 6.8e20, 6.5e20, 4.0e19]
@test R.pax == [295.0 115.0 205.0; 55.0 35.0 0.0; 5.305e22 -5.319e22 1.4e20]
@test R.planes == [25.0 203.0; 80.0 10.0; 90.0 88.0]
@test R.st.dur == 70.0
@test R.misc["methodID"] == "smi:ISC/methodID=Best_double_couple"
@test R.misc["pax_desc"] == "azimuth, plunge, length"
@test R.misc["planes_desc"] == "strike, dip, rake"
@test R.misc["derivedOriginID"] == "600126955"
printstyled(" station XML\n", color=:light_green)
io = open(xml_stfile, "r")
xsta = read(io, String)
close(io)
web_chanspec()
web_chanspec()
web_chanspec()
# Test tracking of changes in SeisData structure S ==========================
# Does S change? Let's see.
printstyled(" Tracking with track_on!, track_off!\n", color=:light_green)
S = SeisData()
track_on!(S) # should do nothing but no error
@test track_off!(S) == nothing # should return nothing for empty struct
S = SeisData(3)
@test track_off!(S) == [true,true,true]
# @test_throws ErrorException("Tracking not enabled!") track_off!(S)
# Now replace S with randSeisData
S = randSeisData(3)
track_on!(S)
@test haskey(S.misc[1], "track")
push!(S, randSeisChannel())
u = track_off!(S)
@test (u == [false, false, false, true])
@test haskey(S.misc[1], "track") == false
# Now turn tracking on again and move things around
track_on!(S)
@test haskey(S.misc[1], "track")
Ch1 = deepcopy(S[1])
Ch3 = deepcopy(S[3])
S[3] = deepcopy(Ch1)
S[1] = deepcopy(Ch3)
@test haskey(S.misc[3], "track")
@test haskey(S.misc[1], "track") == false
@test haskey(S.misc[2], "track") == false
append!(S.x[1], rand(Float64, 1024)) # Should flag channel 1 as updated
S.id[2] = reverse(S.id[2]) # Should flag channel 2 as updated
u = track_off!(S)
@test (u == [true, true, false, false])
@test haskey(S.misc[3], "track") == false
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 11284 | fname = path*"/SampleFiles/fdsn.conf"
sac_pz_file = path*"/SampleFiles/SAC/test_sac.pz"
hood_reg = Float64[44.8, 46.0, -122.4, -121.0]
rainier_rad = Float64[46.852886, -121.760374, 0.0, 0.1]
h5file = "sacreq.h5"
seisfile = "sacreq.seis"
printstyled(" FDSN web requests\n", color=:light_green)
safe_isfile(h5file) && safe_rm(h5file)
printstyled(" fdsn_chp\n", color=:light_green)
# fdsn_chp
c1 = "CC.VALT..,PB.B001..BS?,PB.B001..E??,XX.YYY.00.BHZ"
c2 = ["CC.VALT..", "PB.B001..BS?", "PB.B001..E??", "XX.YYY.00.BHZ"]
c3 = ["CC" "VALT" "" ""; "PB" "B001" "" "BS?"; "PB" "B001" "" "E??"; "XX" "YYY" "00" "BHZ"]
try
@test (minreq(SeisIO.fdsn_chp(c1, 0)))[:,1:4] == SeisIO.fdsn_chp(c2, 0)[:,1:4] == minreq(SeisIO.fdsn_chp(c3, 0))
catch err
@warn("Inconsistent reslts from fdsn_chp!")
end
# FDSNsta
printstyled(" FDSNsta\n", color=:light_green)
S = FDSNsta("CC.VALT..,PB.B001..BS?,PB.B001..E??")
for i in ("PB.B001.T0.BS1", "PB.B001..EHZ", "CC.VALT..BHZ")
j = findid(S, i)
try
@test j > 0
catch
@warn(string("No data from ", i, "; check connection!"))
end
end
# FDSNsta with MultiStageResp
S = FDSNsta("CC.VALT..,PB.B001..BS?,PB.B001..E??", msr=true)
# With autoname
printstyled(" get_data(\"FDSN\", ..., autoname=true)\n", color=:light_green)
req_f = "2019.001.00.00.00.000.UW.VLL..EHZ.R.mseed"
req_ok = (try
S = check_get_data("FDSN", "UW.VLL..EHZ", src="IRIS", s="2019-01-01", t=3600, autoname=true)
true
catch
@warn("Station VLL appears to be offline; test skipped.")
false
end)
if req_ok
@test safe_isfile(req_f)
rm(req_f)
printstyled(" test match to IRIS filename convention\n", color=:light_green)
S = check_get_data("IRIS", "UW.VLL..EHZ", s="2019-01-01", t=3600, autoname=true)
@test safe_isfile(req_f)
end
printstyled(" radius search (rad=)\n", color=:light_green)
rad = Float64[45.373514, -121.695919, 0.0, 0.1]
S = FDSNsta(rad=rainier_rad)
try
@test S.n > 0 # Test will break if everything around Mt. Rainier is offline
catch
@warn("Stations around Mt. Rainier appear to be offline.")
end
printstyled(" rectangular search (reg=)\n", color=:light_green)
S = FDSNsta(reg=hood_reg)
try
@test S.n > 0 # Test will break if everything around Mt. Hood is offline
catch
@warn("Stations around Mt. Rainier appear to be offline.")
end
printstyled(" get_data\n", color=:light_green)
printstyled(" GeoCSV output\n", color=:light_green)
S = check_get_data("FDSN", "CC.JRO..BHZ,IU.COLA.00.*", src="IRIS", s=-600, t=0, fmt="geocsv", w=true)
S = check_get_data("FDSN", "CC.JRO..BHZ,CC.VALT.*", src="IRIS", s=-300, t=0, fmt="geocsv.slist")
printstyled(" config file for channel spec\n", color=:light_green)
S = SeisData()
check_get_data!(S, "FDSN", fname, src="IRIS", s=-600, t=0, w=true)
# Ensure we got data
if (isempty(S) ? 0 : maximum([length(x) for x in S.x])) == 0
@warn("Request returned no data")
else
printstyled(" writesac with channel range on this request\n", color=:light_green)
if S.n > 3
writesac(S, chans=1:2)
for i in 1:2
@test any([(occursin("writesac", n) && occursin(S.id[i], n)) for n in S.notes[i]])
end
end
printstyled(" bandpass filtering of this request\n", color=:light_green)
deleteat!(S, findall(S.fs.<25.0))
filtfilt!(S, fl=0.01, fh=10.0)
for i in 1:2
@test any([occursin("processing", n) for n in S.notes[i]])
end
end
# Ensure station headers are set
ids = ["UW.HOOD..ENE", "CC.VALT..BHZ", "UW.TDH..EHZ", "UW.VLL.EHZ"]
fss = [100.0, 50.0, 100.0, 100.0]
codes = ['N', 'H', 'H', 'H']
for i = 1:4
j = findid(S, ids[i])
if j > 0
@test ≈(S.fs[j], fss[i])
@test inst_code(S, j) == codes[i]
break
else
@warn(string("No data from ", ids[i], "; check connection!"))
end
end
# Check that headers get overwritten with SACPZ info when we use read_sacpz
Nc = S.n
read_sacpz!(S, sac_pz_file)
@test S.n > Nc
i = findid("CC.VALT..BHZ", S)
if i > 0
@test S.misc[i]["OUTPUT UNIT"] == "COUNTS"
end
i = findid("UW.HOOD..ENE", S)
if i > 0
@test S.misc[i]["INSTTYPE"] == "ES-T-3339=Q330S+-6410"
end
# Check that msr=true works
S = SeisData()
check_get_data!(S, "FDSN", fname, src="IRIS", msr=true, s=-600, t=0)
if isempty(S)
@warn("Empty request; check connectivity!")
else
for i in 1:S.n
@test typeof(S.resp[i]) == MultiStageResp
end
end
# Try a string array for input
printstyled(" string array for channel spec\n", color=:light_green)
S = SeisData()
check_get_data!(S, "FDSN", ["UW.HOOD..E??", "CC.VALT..???", "UW.XNXNX.99.QQQ"], src="IRIS", s=-600, t=0, opts="szsrecs=true")
# Try a single string
printstyled(" string for channel spec\n", color=:light_green)
S = check_get_data("FDSN", "CC.JRO..BHZ,IU.COLA.00.*", src="IRIS", s=-600, t=0, v=1,
demean=true,
detrend=true,
rr=true,
taper=true,
ungap=true,
unscale=true)
# This should return exactly 4 days of data, which we know IRIS' FDSN server has
printstyled(" multi-day request\n", color=:light_green)
ts = "2018-01-31T00:00:00"
te = "2018-02-02T00:00:00"
S = get_data("FDSN", "CI.ADO..BH?", s=ts, t=te)
id = "CI.ADO..BHE"
i = findid(S, id)
if i == 0
@warn(string("No data from ", id, "; check connection!"))
elseif isempty(S)
@warn(string("Request empty; rest of test skipped!"))
else
# Check that we have two complete days of data with no gaps
if (length(S.x[i]) / (86400*S.fs[i])) != 2.0
@warn(string("Partial outage; missing data from ", id, "; check connection!"))
end
printstyled(" are data written identically?\n", color=:green)
# get rid of bad request channels
k = Int64[]
append!(k, findall([startswith(id, "XX.FAIL") for id in S.id]))
append!(k, findall([startswith(id, "XX.FMT") for id in S.id]))
isempty(k) || deleteat!(S, k)
# write ASDF first, since this modifies the first sample start time in S
write_hdf5(h5file, S, add=true, ovr=true, len=Day(2))
S1 = read_hdf5(h5file, ts, te, msr=false)
for f in (:id, :loc, :fs, :gain, :resp, :units, :misc)
@test isequal(getfield(S1,f), getfield(S,f))
end
for i in 1:S.n
(isempty(S.x[i]) || isempty(S1.x[i])) && continue
x1 = S1.x[i]
x2 = S.x[i]
if (any(isnan.(x1)) == false) && (any(isnan.(x2)) == false)
@test x1 ≈ x2
end
end
# Check that these data can be written and read faithfully in ASDF, SAC, and SeisIO
writesac(S)
wseis(seisfile, S)
S2 = rseis(seisfile)[1]
if !(isempty(S) || isempty(S2))
for f in (:id, :loc, :fs, :gain, :resp, :units, :misc)
@test isequal(getfield(S2,f), getfield(S,f))
end
for i in 1:S.n
x1 = S.x[i]
x2 = S2.x[i]
if (any(isnan.(x1)) == false) && (any(isnan.(x2)) == false)
@test x1 ≈ x2
end
end
end
# These are the only fields preserved; :loc is preserved to Float32 precision
d0 = DateTime(ts)
y0 = Year(d0).value
j0 = md2j(y0, Month(d0).value, Day(d0).value)
sac_str = string(y0, ".", lpad(j0, 3, '0'), "*CI.ADO..BH*SAC")
S1 = read_data("sac", sac_str)
for f in (:id, :fs, :gain, :t)
@test getfield(S, f) == getfield(S1, f)
@test getfield(S, f) == getfield(S2, f)
end
for f in (:lat, :lon, :el, :dep, :az, :inc)
@test isapprox(getfield(S.loc[i], f), getfield(S1.loc[i], f), atol=1.0e-3)
end
for i in 1:S.n
x = S.x[i]
x1 = S1.x[i]
x2 = S2.x[i]
if (any(isnan.(x)) == false) && (any(isnan.(x1)) == false) && (any(isnan.(x2)) == false)
@test x ≈ x1
@test x1 ≈ x2
end
end
# clean up
safe_rm(seisfile)
safe_rm(h5file)
end
# A bad data format should produce warnings and "error" channels
printstyled(" bad request logging\n", color=:light_green)
redirect_stdout(out) do
try
S = SeisData()
bad_id = "DE.NENA.99.LUFT"
# this should return a request error channel with ID = XX.FAIL..001
get_data!(S, "FDSN", bad_id, v=3, si=false, fmt="sac.zip")
# this should return a format error channel with ID = XX.FMT..001
get_data!(S, "FDSN", "UW.LON.."; src="IRIS", s=-600, t=0, v=3, fmt="sac.zip")
# Check that the info appears where it needs to
@test S.id[1] == "XX.FAIL..001"
@test any([occursin("request failed", n) for n in S.notes[1]])
@test haskey(S.misc[1], "msg")
@test S.id[S.n] == "XX.FMT..001"
@test any([occursin("unparseable format", n) for n in S.notes[S.n]])
@test haskey(S.misc[S.n], "raw")
catch err
@warn(string("Bad request logging test failed; caught error ", err))
end
end
# Potsdam test
printstyled(" request from GFZ\n", color=:light_green)
S = check_get_data("FDSN", "GE.BKB..BH?", src="GFZ", s="2011-03-11T06:00:00", t="2011-03-11T06:05:00", v=0, y=false)
if isempty(S)
@warn(string("No data from GFZ request; check connection!"))
end
# ❄❄❄❄❄❄❄❄❄❄❄❄❄❄❄❄❄❄ (oh, California...)
printstyled(" servers with special headers:\n", color=:light_green)
rubric = [
"NCEDC" "BK.MOD..BHE"
"SCEDC" "CI.SDD..BHZ"
]
❄ = size(rubric, 1)
for i = 1:❄
printstyled(" ", rubric[i,1], ":\n", color=:light_green)
ds = now()-Day(1)
ds -= Millisecond(ds)
s = string(ds)
t = string(ds+Hour(1))
try
printstyled(" station info\n", color=:light_green)
S = FDSNsta(rubric[i,2], s=s, t=t, msr=true, src=rubric[i,1])
printstyled(" trace data\n", color=:light_green)
get_data!(S, "FDSN", rubric[i,2], src=rubric[i,1], s=s, t=t, msr=true)
if isempty(S)
printstyled(" No data; check headers & connection!\n", color=:red)
end
catch err
@warn(string("Request errored; error output below.\n\n", err))
end
end
printstyled(" issues 42 & 43\n", color=:light_green)
redirect_stdout(out) do
S = get_data("FDSN", "IU.ANMO.00.LHZ", s="2019-02-14", t="2019-02-15", v=3)
# This should have two "fail" channels; one from each day with no data
S = get_data("FDSN", "C0.HAYD.00.LHZ", s="2019-01-01", t="2019-01-05", v=3)
@test S.n == 3
end
# test IRISPH5 mseed
printstyled(" IRISPH5\n", color=:light_green)
s = DateTime(2016,6,23)
t = s + Minute(1) # this is 250 Hz data
id = "YW.1002..DP?"
printstyled(" station info\n", color=:light_green)
S = FDSNsta(id, src="IRISPH5", s=s, t=t, msr=true)
printstyled(" trace data\n", color=:light_green)
printstyled(" mini-SEED\n", color=:light_green)
get_data!(S, "FDSN", id, src="IRISPH5", s=s, t=t, msr=true)
S = get_data("PH5", id, s=s, t=t, opts="reduction=10")
printstyled(" GeoCSV tspair\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, msr=true, fmt="geocsv.tspair")
printstyled(" GeoCSV slist\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, msr=true, fmt="geocsv.slist")
# these will fail
# need to implement multi-channel version of read_sac_stream
# segy in FDSN-like requests is NYI
printstyled(" SAC (should fail)\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, msr=true, fmt="sac")
printstyled(" segy1 (NYI)\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, msr=true, fmt="segy1")
printstyled(" segy2 (NYI)\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, msr=true, fmt="segy2")
printstyled(" unsupported format\n", color=:light_green)
S = get_data("FDSN", id, src="IRISPH5", s=s, t=t, fmt="nothing")
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3357 | ts = "2019-03-23T23:10:00"
te = "2019-03-23T23:17:00"
chans = [ "CC.JRO..BHZ",
"CC.LON..BHZ",
"CC.VALT..BHZ",
"UW.HOOD..ENZ",
"UW.HOOD..ENN",
"UW.HOOD..ENE" ]
printstyled(" IRISWS continuous data requests\n", color=:light_green)
printstyled(" SAC and GeoCSV requests\n", color=:light_green)
for i = 1:length(chans)
cha = chans[i]
# println("cha = ", cha)
S = get_data("IRIS", cha, src="IRIS", s=ts, t=te, fmt="sacbl", v=0, w=true)
basic_checks(S)
sleep(1)
T = get_data("IRIS", cha, src="IRIS", s=ts, t=te, fmt="mseed", v=0, w=true)
basic_checks(T)
U = get_data("IRIS", cha, src="IRIS", s=ts, t=te, fmt="geocsv", v=0)
basic_checks(U)
if S.n == 0 || T.n == 0 || U.n == 0
@warn(string(cha, " appears to be offline; trying next."))
if i == lastindex(chans)
error("No data for any station; failing test due to connection errors.")
end
else
printstyled(" SAC == MSEED == GeoCSV\n", color=:light_green)
sync!(S, s=ts, t=te)
sync!(T, s=ts, t=te)
sync!(U, s=ts, t=te)
@test S.x[1] ≈ T.x[1] ≈ U.x[1]
# :notes and :src will be different; as of 2020-05-28, so will :loc
# :units is only set in GeoCSV
for f in Symbol[:id, :name, :fs, :gain, :resp, :t]
@test getfield(S,f) == getfield(T,f) == getfield(U,f)
end
#= Change 2020-05-28
after IRISWS timeseries issues (first reported to IRIS 2020-05-16),
:loc and :gain are being set in SAC requests.
=#
# printstyled(" GeoCSV\n", color=:light_green)
break
end
end
# Test bad data formats
printstyled(" bad request logging\n", color=:light_green)
sta_matrix = vcat(["IU" "ANMO" "00" "BHZ"],["IU" "ANMO" "00" "BHE"])
test_sta = deepcopy(sta_matrix)
ts = "2005-01-01T00:00:00"
te = "2005-01-02T00:00:00"
redirect_stdout(out) do
try
# this is an unparseable format
S = get_data("IRIS", sta_matrix, s=ts, t=te, v=2, fmt="audio")
# this is a bad request due to the wild card
get_data!(S, "IRIS", "IU.ANMO.*.*", s=ts, t=te, v=2, fmt="ascii")
@test S.n == 3
@test S.id[2] == "XX.FAIL..001"
@test any([occursin("request failed", n) for n in S.notes[2]])
@test haskey(S.misc[2], "msg")
@test S.id[1] == "XX.FMT..001"
@test any([occursin("unparseable format", n) for n in S.notes[1]])
@test haskey(S.misc[1], "raw")
catch err
@warn(string("Bad request logging test failed; caught error ", err))
end
end
# check that these aren't modified in-place by the request (very old bug)
@test sta_matrix == test_sta
printstyled(" complicated IRISWS request\n", color=:light_green)
chans = ["UW.TDH..EHZ", "UW.VLL..EHZ", "CC.JRO..BHZ"] # HOOD is either offline or not on IRISws right now
st = -86400.0
en = -86100.0
(d0,d1) = parsetimewin(st,en)
S = get_data("IRIS", chans, s=d0, t=d1, y=true)
if isempty(S)
@warn(string("No data for channels ", join(chans, ", "), "; test skipped."))
else
L = [length(S.x[i])/S.fs[i] for i = 1:S.n]
t = [S.t[i][1,2] for i = 1:S.n]
L_min = minimum(L)
L_max = maximum(L)
t_min = minimum(t)
t_max = maximum(t)
try
@test(L_max - L_min <= maximum(2 ./ S.fs))
@test(t_max - t_min <= round(Int64, sμ * 2.0/maximum(S.fs)))
catch
@warn(string("Unexpected request length; check for partial outage at IRIS for ", join(chans, ", "), "!"))
end
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 3180 | # Seedlink with command-line stations
config_file = path*"/SampleFiles/seedlink.conf"
st = 0.0
en = 60.0
seq = "500000"
sta = ["CC.SEP", "UW.HDW"]
pat = ["?????.D", "?????.D"]
trl = ".??.???.D"
sta_matrix = String.(reshape(split(sta[2],'.'), 1,2))
printstyled(" SeedLink\n", color=:light_green)
printstyled(" (SeedLink tests require up to 6 minutes)\n", color=:green)
# has_stream
printstyled(" has_stream\n", color=:light_green)
ta = Array{Union{Task,Nothing}, 1}(undef, 4)
ta[1] = @async has_stream(sta)[2]
ta[2] = @async has_stream(sta, pat, d='.')[2]
ta[3] = @async has_stream(join(sta, ','))[2]
ta[4] = @async has_stream(sta_matrix)[1]
SL_wait(ta, 1)
# has_stream
printstyled(" has_sta\n", color=:light_green)
ta[1] = @async has_sta(sta[1])[1]
ta[2] = @async has_sta(sta[1]*trl)[1]
ta[3] = @async has_sta(sta)[1]
ta[4] = @async has_sta(parse_charr(sta, '.', false))[1]
SL_wait(ta, 1)
# Attempting to produce errors
printstyled(" produce expected errors and warnings\n", color=:light_green)
redirect_stdout(out) do
S1 = SeisData()
@test_throws ErrorException seedlink!(S1, "DATA", [sta[1]], ["*****.X"])
S2 = seedlink("DATA", [sta[1]], pat, x_on_err=false)
write(S2.c[1], "BYE\r")
close(S2.c[1])
@test_throws ErrorException seedlink!(S2, "DATA", [replace(sta[1], "SEP" => "XOX")], ["?????.D"])
S3 = seedlink("DATA", [replace(sta[1], "SEP" => "XOX")], ["*****.X"], x_on_err=false)
write(S3.c[1], "BYE\r")
close(S3.c[1])
S4 = seedlink("DATA", hcat(sta_matrix, "***", "***", "X"), x_on_err=false)
write(S4.c[1], "BYE\r")
close(S4.c[1])
end
# DATA mode
printstyled(" DATA mode\n", color=:light_green)
printstyled(" link 1: command-line station list\n", color=:light_green)
T = SeisData()
redirect_stdout(out) do
seedlink!(T, "DATA", sta, refresh=9.9, kai=7.0, v=1)
end
printstyled(" link 2: station file\n", color=:light_green)
redirect_stdout(out) do
seedlink!(T, "DATA", config_file, refresh=13.3, v=3)
end
wait_on_data!(T, 60.0)
# To ensure precise timing, we'll pass d0 and d1 as strings
dt = en-st
(d0,d1) = parsetimewin(st,en)
# FETCH mode (indistinguishable from DATA mode for most users)
printstyled(" FETCH mode with seq (should fail)\n", color=:light_green)
redirect_stdout(out) do
V = seedlink("FETCH", refresh=10.0, config_file, seq=seq, s=now()-Hour(1), v=2)
wait_on_data!(V, 30.0)
end
printstyled(" FETCH mode\n", color=:light_green)
V = seedlink("FETCH", refresh=10.0, config_file)
printstyled(" link initialized\n", color=:light_green)
wait_on_data!(V, 30.0)
# SeedLink time mode (more complicated)
printstyled(" TIME mode\n", color=:light_green)
U = SeisData()
seedlink!(U, "TIME", sta, refresh=10.0, s=d0, t=d1, w=true)
printstyled(" first link initialized\n", color=:light_green)
# Seedlink with a config file
seedlink!(U, "TIME", config_file, refresh=10.0, s=d0, t=d1)
printstyled(" second link initialized\n", color=:light_green)
# Seedlink with a config string
redirect_stdout(out) do
seedlink!(U, "TIME", "CC.VALT..???, UW.ELK..EHZ", refresh=10.0, s=d0, t=d1, v=3)
end
printstyled(" third link initialized\n", color=:light_green)
wait_on_data!(U, 60.0)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 1940 | using SeisIO, SeisIO.RandSeis, Test
"""
check_tutorial_data(S::GphysData; prune::Bool=true)
Check tutorial data for possible errors. If `prune=true`, channels with data
errors are deleted.
Current checks:
* No data
* Data are all NaNs (common with bad filtering parameters)
* Data are all equal (very rare; theoretically possible but we've never seen it)
"""
function check_tutorial_data!(S::GphysData; prune::Bool=true)
pull = falses(S.n)
for i in 1:S.n
T = eltype(S.x[i])
# check for no data
if length(S.x[i]) == 0
@warn(string("Channel ", i, " [id = ", S.id[i], "] has no data."))
pull[i] = true
continue
end
# check for NaNs
nn = length(findall(isnan.(S.x[i])))
if nn > 0
@warn(string("Channel ", i,
" [id = ", S.id[i], ", data type = ", T,
"]. Output contains ", nn, " NaNs."))
pull[i] = true
end
# check for variation
mx = maximum(abs.(diff(S.x[i])))
if mx == zero(T)
@warn(string("Channel ", i,
" [id = ", S.id[i], ", data type = ", T,
"]. Output is a constant."))
pull[i] = true
end
end
if prune
inds = findall(pull.==true)
if isempty(inds)
@info("No data issues found.")
else
info_strings = join([string(i, ": ", S.id[i]) for i in inds], "\n")
@info(string("Deleting channels: \n", info_strings))
deleteat!(S, inds)
end
end
return nothing
end
# run a test to ensure this is behaving correctly
function test_check_tutorial_data()
@info(string("Testing check_tutorial_data(). Expect two Warnings and an Info string."))
S = randSeisData(6, nx=32000, s=1.0)
U = deepcopy(S)
S.x[1][12:20] .= NaN
fill!(S.x[3], 3*one(eltype(S.x[3])))
S.x[6] = eltype(S.x[6])[]
check_tutorial_data!(S)
@test S.id == U.id[[2, 4, 5]]
@test S.x == U.x[[2, 4, 5]]
println("Test of check_tutorial_data! complete.")
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 339 | import SeisIO: get_svn
path = joinpath(Base.source_dir(),"../test/TestHelpers/0_check_deps.jl")
include(path)
include("./check_data.jl")
pkg_check(["DSP", "SeisIO", "IJulia"])
get_svn("https://github.com/jpjones76/SeisIO-TestData/trunk/Tutorial", "DATA")
using IJulia
jupyterlab(dir=joinpath(dirname(dirname(pathof(SeisIO))),"tutorial"))
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | code | 154 | function safe_rm(file::String)
try
rm(file)
catch err
@warn(string("Can't remove ", file, ": throws error ", err))
end
return nothing
end
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 2590 | # SeisIO.jl
[](https://travis-ci.org/jpjones76/SeisIO.jl) [](https://ci.appveyor.com/project/jpjones76/seisio-jl/branch/master) [](https://codecov.io/gh/jpjones76/SeisIO.jl)[](https://coveralls.io/github/jpjones76/SeisIO.jl?branch=master) [](https://seisio.readthedocs.io/en/latest/?badge=latest)
[](https://www.repostatus.org/#active)
A minimalist, platform-agnostic package for univariate geophysical data.
## Installation | [Documentation](http://seisio.readthedocs.org)
From the Julia prompt, type: `] add SeisIO`; (Backspace); `using SeisIO`
## Summary | [Collaboration](docs/CONTRIBUTE.md)
Designed for speed, efficiency, and ease of use. Includes web clients, readers for common seismic data formats, and fast file writers. Utility functions allow time synchronization, data merging, padding time gaps, and other basic data processing.
* Web clients: SeedLink, FDSN (dataselect, event, station), IRIS (TauP, timeseries)
* File formats: ASDF (r/w), Bottles, GeoCSV (slist, tspair), QuakeML (r/w), SAC (r/w), SEED (dataless, mini-SEED, resp), SEG Y (rev 0, rev 1, PASSCAL), SLIST, SUDS, StationXML (r/w), Win32, UW
## Getting Started | [Formats](docs/FORMATS.md) | [Web Clients](docs/WEB.md)
Start the tutorials in your browser from the Julia prompt with
```julia
using SeisIO
cd(dirname(pathof(SeisIO)))
include("../tutorial/install.jl")
```
To run SeisIO package tests and download sample data, execute
```julia
using Pkg, SeisIO; Pkg.test("SeisIO")
```
Sample data downloaded for the tests can be found thereafter at
```julia
cd(dirname(pathof(SeisIO)))
sfdir = realpath("../test/SampleFiles/")
```
## Publications | [Changelog](docs/CHANGELOG.md) | [Issues](docs/ISSUES.md)
Jones, J.P., Okubo, K., Clements. T., \& Denolle, M. (2020). SeisIO: a fast, efficient geophysical data architecture for the Julia language. *Seismological Research Letters* doi: https://doi.org/10.1785/0220190295
This work has been partially supported by a grant from the Packard Foundation.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 13713 | # 2021-03-19
* `writesac` now allows the user to specify SAC file header version (SAC variable NVHDR) using keyword `nvhdr=`. The default is 6. Versions 6 and 7 are supported.
+ Reason for change: SAC won't read a file if NVHDR is greater than expected.
- Example: SAC v101.x (NVHDR = 6) *will not* read a SAC file created in/for SAC v102.x (NVHDR = 7), even though the two file header versions are effectively interchangeable.
* `filtfilt!`: breaking combinations of data and filter parameters should now be identical in `DSP.filtfilt` and `SeisIO.filtfilt!`, even on Float32 data.
+ Implicit feature request from issue #82.
+ This should never force data conversion to Float64. (We already test this.)
+ It's always possible to choose a filter that outputs NaNs, but the two filtering functions should now behave identically in this regard.
# SeisIO v1.2.0 Release: 2021-02-02
# 2021-01-31
* Calling `writesac` on a SeisEvent object now always writes event header values to the correct byte indices for SAC v101 and above.
* `SeisData(S::T) where T<:GphysData` should now be aliased to `convert(SeisData, S)` for all GphysData subtypes.
# 2021-01-29
* New developer (internal) function `cmatch_p!`:
+ Matches a pair of GphysChannel objects on (:fs, :id, :loc, :resp, :units)
* Declares a match even if some fields are unset in one object
* On a match, unset fields in each object are copied from the other object
* SAC read/write support extended to SAC v7 (files produced by/for SAC v102.0).
# 2021-01-27
* `merge!` has been extended to pairs of GphysChannel objects. Non-default values must be identical in :fs, :id, :loc, :resp, :units.
* `convert` expansion:
+ `convert(NodalData, S)` and `convert(EventTraceData, S)` should now work for all GphysData subtypes.
+ `convert(NodalChannel, C)` and `convert(EventChannel, C)` should now work for all GphysChannel subtypes.
+ `EventTraceData(S::T) where T<:GphysData` is now aliased to `convert(EventTraceData, C)`.
+ `EventChannel(C::T) where T<:GphysChannel` is now defined as an alias to `convert(EventChannel, C)`.
* Calling `writesac` on a SeisEvent object now writes event IDs that can be parsed to Int32.
* New function `fill_sac_evh!` in submodule Quake fills SeisEvent header info from a SAC file.
# 2020-12-03
* `ChanSpec` now includes `StepRange{Int, Int}` in its Type union
* Added processing function `rescale!` for fast conversion and matching of scalar gains
# 2020-12-02
* `writesac` should now write begin time (word 6) in a way that never shifts sample times, even by subsample values. (Fixes issue #60)
# 2020-11-23
* `ungap!` should now work correctly on a channel whose only time gap occurs before the last sample. (Fixes issue #74)
# 2020-10-31
* `scan_seed` now always parses Blockette [1000]. (Fixes issue #73)
# 2020-10-30
* Fixed a performance issue when reading a large file whose data contain many negative time gaps. (#72)
# 2020-10-29
* Added utility `scan_seed` to submodule `SeisIO.SEED` for SEED volumes. (#62)
+ `scan_seed` can report changes within a SEED file, including:
- Samples per channel (KW `npts`)
- Gaps (KW `ngaps`), or exact gap times (`seg_times=true`)
- Changes in sampling frequency (KW `nfs`), or exact times of fs changes (`fs_times=true`)
+ Reports to stdout (suppress with `quiet=true`)
+ Returns a String array of comma-delineated outputs, one entry per channel.
+ Please open feature request Issues if you need to scan for additional changes within SEED volumes.
+ This won't interact directly with online SEED requests. To use `scan_seed` with an online request for a seed volume, use `w=true` to dump the raw request to disk, and scan the file(s) created by the download.
# 2020-10-27
* NodalData no longer errors on `resample!`; fixes issue #65. (Merged PR #68 from tclements/Resample)
# 2020-10-26
* NodalLoc (:loc field of NodalData) now has x, y, z subfields. (Merged PR #64 from tclements/NodalLoc)
* NodalData now uses AbstractArray{Float32, 2} for the :data field, rather than Array{Float32, 2}. (Merged PR #66 from tclements/Nodal)
# SeisIO v1.1.0 Release: 2020-08-26
# 2020-08-26
* HDF5 compatibility has changed to "0.12, 0.13" as HDF5.jl v0.13.5 fixes the read slowdown issue. Versions of HDF5 in range 0.12.3 < VERSION < 0.13.5 might still have slow HDF5 read times. Resolves issue #49.
# 2020-08-22
* `read_nodal` has switched channel syntax to use `chans=` for numeric channel
values, lists, or ranges, as the data processing functions. The keywords `ch_s`
and `ch_e` have been removed.
* Channel names and IDs in `read_nodal` now use the channel number from the file.
* Changed the initialization method for NodalData to avoid using any keywords:
`NodalData(data, info, ts; ch_s, ch_e` is now `NodalData(data, info, chans, ts)`
# 2020-08-18
* `read_nodal` now requires a format string as the first argument
+ This change makes syntax identical to `read_data(fmt, file, ...)`
* Implemented `read_nodal` SEG Y format in SeisIO.Nodal; requested in Issue #55
+ Note that `read_nodal("segy", ... )` produces different `:id` values
* Fixed a bug where `read_data("segy", ..., full=true)` could copy some SEGY file header values to `:misc` keys in the wrong byte order.
## Dev/Backend:
* `do_trace` no longer uses `fname` as a positional, but needs a UInt8 for `ll`
# 2020-08-17
* Fixed Issue #56
* When calling `read_data("segy", ..., full=true)`, two key names have changed:
+ `:misc["cdp"]` => `:misc["ensemble_no"]`
+ `:misc["event_no"]` => `:misc["rec_no"]`
* Fixed Issue #57 : `read_data("segy", ...)` has a new keyword: `ll=` sets the
two-character location field in `:id` (NNN.SSS.**LL**.CC), using values in the
SEG Y trace header:
* 0x00 None (don't set location subfield) -- default
* 0x01 Trace sequence number within line
* 0x02 Trace sequence number within SEG Y file
* 0x03 Original field record number
* 0x04 Trace number within the original field record
* 0x05 Energy source point number
* 0x06 Ensemble number
* 0x07 Trace number within the ensemble
# 2020-08-11
* Automated testing for Julia v.1.4 has ended. Tested versions of the language include v1.0 (LTS) and v1.5 (stable).
* Changed internal function `SeisIO.dtr!` to accept `::AbstractArray{T,1}` in first positional argument; fixes Issue #54
* Added tests for processing functions on a NodalData object; tests Issue #54
* Added explicit warning that `translate_resp!` can be acausal; from discussion of Issue #47
# 2020-07-15
Added SeisIO.Nodal for reading data files from nodal arrays
* New types:
+ NodalData <: GphysData
+ NodalChannel <: GphysChannel
+ NodalLoc <: InstrumentPosition
* Wrapper: `read_nodal`
+ Current file format support: Silixa TDMS (default, or use `fmt="silixa"`)
* Utility functions: `info_dump`
### 2020-07-09
* The data field `:x` of GphysData and GphysChannel objects can now be
an AbstractArray{Float32, 1} or AbstractArray{Float64, 1}.
* Merged pull request #53 from @tclements: `get_data` now supports IRISPH5
for mseed and geocsv. (Implements request in issue #52)
+ Both `get_data("PH5")` and `get_data("FDSN", ..., src="IRISPH5")` work.
+ SAC and SEGY support is NYI.
+ PH5 GeoCSV doesn't parse correctly at present, and will error if a
decimation key is passed to `opts=`. At issue is the precision of GeoCSV
floats was documented only by oral tradition. This will be fixed in a future
patch.
### 2020-07-02
* minor bug fix: in Julia v1.5+, calling `sizeof(R)` on an empty `MultiStageResp`
object should no longer throw an error
* `resample!` has been rewritten, fixing issues #50 and #51. syntax and keywords
are unchanged.
+ The current version consumes slightly more memory than the previous one.
+ There may be one further rewrite in coming weeks, to switch to FFT-based filtering.
### 2020-06-18
* `get_data` should no longer error when a multiday request begins on a day when one channel has no data. (Issue #43)
* Fixed behavior of reading a SEED Blockette 100 to match the mini-SEED C library. (Issue #48)
* Parsing SEED Blockette 100 now logs `:fs` changes to `:notes`.
### 2020-05-30
* Automated testing for Julia v.1.3 has ended. Tested versions of the language include v1.0 (LTS), v1.4 (stable), and v1.5 (upcoming release).
### 2020-05-28
* `get_data("IRIS", ...)` now accepts `fmt="sac"` as an alias to `fmt="sacbl"`.
#### IRISWS changes
A server-side issue with IRISWS timeseries, affecting `get_data("IRIS", ... )`, has caused minor behavior changes:
* While `:gain` still appear to be 1.0 in SeisIO, the channel gain is now set (and hence, unscaled, but logged to `:notes`) in SAC and GeoCSV requests. Other data formats still don't do this.
* SAC and GeoCSV currently set lat, lon, and el in requests, but mini-SEED doesn't. Until requests return format-agnostic locations, `get_data("IRIS", ... )` will return an empty GeoLoc() object for the `:loc` field.
##### Potential Inconsistencies
However, as a result of the above changes:
1. With `get_data("IRIS", ... , w=true)`, `:misc` is now always format-dependent.
2. For formats "geocsv" and "sac", `S = get_data("IRIS", ... , w=true)` now differs slightly from calling `read_data` on the files created by the `get_data` command.
+ `:loc` will be set in objects read from SAC and GeoCSV files, but not mini-SEED.
+ Data in objects read from SAC or GeoCSV files will be scaled by the Stage 0 gain; fix this with `unscale!`.
### 2020-05-16
* Documentation improvements for issue #44 and #45.
* Fixed issue #43; reading Steim-compressed mini-SEED into an existing channel with a Float64 data vector.
### 2020-04-07
* Improved reading unencoded mini-SEED data with byte swap (part of issue #40)
* Bug fix for issue #42.
### 2020-03-14
* mini-SEED can now parse unencoded data to structures of any GphysData subtype
### 2020-03-13
* *sync!* has been rewritten based on @tclements suggestions (Issue #31). Notable changes:
* Much less memory use
* Much faster; ~6x speedup on tests with 3 channels of length ~10^7 samples
* More robust handling of unusual time matrices (e.g., segments out of order)
* The [tutorial page](https://seisio.readthedocs.io/en/latest/src/Help/tutorial.html) has been updated. Fixes issue #39.
### 2020-03-10
* Automated testing for Julia v.1.1-1.2 has ended. Tested versions of the language include v1.0 (LTS), v1.3 (stable), and v1.4 (upcoming release).
* The docstring `?chanspec` was renamed `?web_chanspec` to avoid confusion with SeisIO internals.
* The docstring `?timespec` was renamed to `?TimeSpec`.
* Quake Type *SeisEvent* now has a real docstring.
* Quake Type *EventChannel* has a docstring again.
### 2020-03-09
* Rewrote SeisIO.RandSeis for faster structure generation
+ randSeisChannel has two new keywords: fs_min and fc
+ randSeisData has two new keywords: fs_min and a0
* More documentation and docstring updates
* The data processing functions *ungap!*, *taper!*, *env!*, *filtfilt!*, and *resample!* can no longer be forced to work on irregularly-sampled data by doing clever things with keywords.
* *taper* now has a docstring
* ASDF file reads now close all groups and datasets after reading
### 2020-03-07
* Increased the robustness of *t_extend*; it no longer needs a mini-API.
* Tests now handle time and data comparison of re-read data more robustly.
* *show*
- now reports correct number of gaps with a gap before the last sample in *:x*
- now identifies times in irregular data as "vals", not "gaps".
* *write_asdf*
+ When *ovr=false*, a sample window with the same ID, start time, end time as a trace in the output volume now never overwrites the trace in the output volume.
* Fixed a very rare case in which two rows of a time matrix could correspond to the same sample index
* *read_data*: formats "slist" and "lennartz" now use verbosity
#### QuakeML
+ Reading QuakeML with no magnitude now returns an empty hdr.mag structure
+ *write_qml*
- now writes hdr.loc.typ to Origin/type
- now writes hdr.loc.npol to focalMechanism/stationPolarityCount
- added method for SeisEvent
#### SAC
Data files no longer track the LOC field of `:id` on read or write.
+ We learned only recently that LOC has no standard SAC header variable: some data sources store this as KHOLE, which we used in the past, but this is correctly an event property in the [format spec](http://ds.iris.edu/files/sac-manual/manual/file_format.html).
### 2020-03-05
* `show_writes` now prints filename in addition to write operation
* `merge!` is now logged in a way that `show_processing` catches
### 2020-03-04
* *writesacpz* now has a GphysChannel method
* *write_sxml* is extended to all GphysData subtypes
* The merge tests no longer allow total timespan *δt > typemax(Int64)* when testing *xtmerge!*; this rare case (p ~ 0.003) caused an error.
* The SeedLink test now accurately tracks the time spent for each trial SeedLink session.
* The SeedLink client now accepts keyword *seq="* for starting sequence number, consistent with [SeisComp3 SeedLink protocols](https://www.seiscomp3.org/doc/seattle/2012.279/apps/seedlink.html).
### 2020-03-03
* SEED support functions *seed_support()*, *mseed_support()*, *dataless_support()*, and *resp_wont_read()* now dump all info. to stdout.
* *Manifest.toml* is no longer tracked on GitHub, hopefully preventing dependency conflicts.
* using *get_data(..., w=true)* now logs the raw download write to *:notes*
* The FDSN tests now delete bad request channels before checking if data are written identically in SEED and SAC.
* The *writesac* extension in SeisIO.Quake no longer allows keyword *ts=*; it was not actually used in the function body.
# SeisIO v1.0.0 Release: 2020-03-02
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 3124 | # **How to Contribute**
0. **Please contact us first**. Describe the intended contribution(s). In addition to being polite, this ensures that you aren't doing the same thing as someone else.
1. Fork the code: in Julia, type `] dev SeisIO`.
2. Choose an appropriate branch:
- For **bug fixes**, please use `main`.
- For **new features** or **changes**, don't use `main`. Create a new branch or push to `dev`.
3. When ready to submit, push to your fork (please, not to `main`) and submit a Pull Request (please, not to `main`).
4. Please wait while we review the request.
# **General Rules**
## **Include tests for new code**
* We expect at least 95% code coverage on each file.
* Our target code coverage is 99% on both [CodeCov](https://codecov.io/gh/jpjones76/SeisIO.jl) and [Coveralls](https://coveralls.io/github/jpjones76/SeisIO.jl?branch=main). Code coverage has exceeded 97% consistenly since at least June 2019. Please don't break that for us.
* Good tests include a mix of [unit testing](https://en.wikipedia.org/wiki/Unit_testing) and [use cases](https://en.wikipedia.org/wiki/Use_case).
Data formats with rare encodings can be exceptions to the 95% rule.
* Example 1: SEG Y is one of four extant file formats that still uses [IBM hexadecimal Float](https://en.wikipedia.org/wiki/IBM_hexadecimal_floating_point); we've never encountered it, so we can't test it, but it exists.
* Example 2: Int24 encoding of SEED data exists in theory, but we cannot find a single researcher who's encountered it; neither can the IRIS DMC staff that we've asked. We don't support this encoding.
We understand the need to fish for digital coelocanths, but please consider
their rarity before deciding that SeisIO needs another one.
## **Don't add dependencies to the SeisIO core module**
Please keep the footprint small.
## **Write comprehensible code**
Other contributors must be able to understand your work. People must be able to
use it. Scientific software should require a Ph.D to understand the science, not
to learn the syntax.
## Please limit calls to other languages
For reasons of transparency, portability, and reproducibility, external calls must meet three conditions:
1. Works correctly in (non-emulated) Windows, Linux, and Mac OS.
1. Source code free and publicly available. "Please contact the author for the source code" emphatically **does not** meet this standard.
1. Must free pointers after use. If we can make your code trigger a segmentation fault, we have no choice but to reject it. Beware that LightXML is the reason that this rule exists; it does *not* free pointers on its own.
We strongly recommend only calling external functions for tasks with no Julia equivalent (like plotting) or whose native Julia versions behave strangely, perform poorly, or do both. (most of `Dates` does both)
### Prohibited external calls
* No software with (re)distribution restrictions, such as Seismic Analysis Code (SAC)
* No commercial software, such as MATLAB™ or Mathematica™
* No software with many contributors and no clear control process, such as ObsPy, Gnu Octave, or the Arch User Repository (AUR)
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 3281 | # **Format Support**
| Format | Read | Write | Fmt | Notes |
|:----- |:----- | :----- |:----- |:----- |
| AH-1 | read_data | | ah1 | |
| AH-2 | read_data | | ah2 | |
| ASDF event | read_asdf_evt | write_hdf5 | asdf | k |
| ASDF timeseries | read_hdf5 | write_hdf5 | asdf | k |
| Bottle | read_data | | bottle | |
| Dataless SEED | read_meta | | dataless | |
| GeoCSV slist | read_data | | geocsv.slist | u |
| GeoCSV tspair | read_data | | geocsv | u |
| Lennartz ASCII | read_data | | lennartz | |
| Mini-SEED | read_data | | mseed | o |
| PASSCAL SEG Y | read_data | | passcal | |
| QuakeML | read_qml | write_qml | | |
| RESP | read_meta | | resp | u |
| SAC polezero | read_meta | writesacpz | sacpz | u |
| SAC timeseries | read_data | writesac | sac | |
| SEG Y | read_data | | segy | i |
| SeisIO | rseis | wseis | | |
| SLIST | read_data | | slist | |
| SUDS event | read_quake | | suds | o |
| SUDS timeseries | read_data | | suds | o |
| StationXML | read_meta | write_sxml | sxml | |
| UW event | read_quake | | uw | |
| UW timeseries | read_data | | uw | |
| Win32 | read_data | | win32 | |
## Column Guide
* **Format** is the most common abbreviated name for the data format
* **Read** is the read command
+ Most commands have two versions: in-place and out-of-place.
+ The out-of-place command is given above and creates a new structure.
+ The in-place command is the command in the column plus "!" (like "read_data!"); it modifies an existing data structure.
* **Write** is the write command for formats with write support.
* **Fmt** is the format string passed to the read command
+ This is always the first ASCII string in the command
+ Example: *read_meta!(S, "sacpz", "tmp.sac.pz")*
## Notes Guide
* **k**: write_hdf5 uses fmt as a keyword that defaults to "asdf"; this will become more relevant when support expands to other hdf5 (sub)formats
* **i**: incomplete
+ SEG Y rev 2 read support is NYI; send us test files if you need it!
* **o**: out-of-scope blockettes/structures are skipped
* **u**: UNIX-style text files only; uses a byte-wise parser and assumes lines end in "\n". DOS-style text files (whose lines end in "\r\n") must be converted with e.g. `dos2unix(fname)` or equivalent Windows Powershell command(s).
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 3293 | # **Document Purpose**
This document lists problems that are not our fault and can't be fixed by us. GitHub Issues opened about items listed here will be closed with a comment referring the opener to this document.
## Issues Key
* I = Incomplete file support. If support is needed, please send us test files with expected values and adequate documentation. Please note that official documentation alone does not satisfy any known definition of "adequate" in any issue listed here.
* O = Out of scope; won't fix.
* X = External; can't fix.
# **List of Non-SeisIO Issues**
* **32-bit support** (O): SeisIO is designed for 64-bit systems and won't work in 32-bit versions of Julia.
* **ASDF support** (I)
+ `Waveform` group attributes `event_id`, `magnitude_id`, `focal_mechanism_id` are not yet matched to `QuakeML` group attributes.
+ `Provenance` is not fully supported.
+ `write_hdf5(..., fmt="ASDF")` doesn't write source-receiver geometry to ASDF volumes when invoked on a SeisEvent structure.
* **Code coverage** (X): reported coverage sometimes appears to be 94%, rather than ~99%.
* **FDSNevq(..., src="all")** (X): no checks are possible for event uniqueness or server status.
* **Quanterra geophone responses** (X): a few permanent North American short-period stations have tremendous (two orders of magnitude) scaling problems with `translate_resp` and `remove_resp`.
* Details: all known cases have an XML `SensorDescription` value of "HS-1-LT/Quanterra 330 Linear Phase Composite". This description seems to mean "Geospace Technologies HS-1-LT geophone with Kinemetrics Quanterra Q330 digitizer", but no "HS-1-LT" exists on the [Geospace Technologies product website](https://www.geospace.com/sensors/).
* **NCEDC/SCEC connection issues** (X): [see gist](https://gist.github.com/jpjones76/0175e762bea8c37d99b97ef3cb056068)
* **SEED blockette support** (O): blockettes outside the scope of SeisIO aren't read into memory.
* **SEED with little-endian Steim compression** (X)
+ See issue #33. This isn't valid SEED.
+ mini-SEED in ObsPy writes these files by default in rare cases.
* **SEG Y files with nonstandard trace headers** (X)
+ If SEG Y files use nonstandard trace headers, they're unreadable by public software.
- Details: only six trace header quantities have mandatory positions and value types in SEG Y ≤ rev 1.0. All public software assumes "recommended" trace header positions, including ours.
* **SEG Y subformats** (I)
+ SEG Y rev 2 is unsupported.
+ Seismic Unix ("SU") is unsupported.
* **SUDS structures** (I,O): structures outside the scope of SeisIO aren't read into memory.
## Issues with Workarounds
* **HDF5 variations in I/O speed** (X): [HDF5.jl issue #609](https://github.com/JuliaIO/HDF5.jl/issues/609). Most combinations of library version and Julia language version have this issue.
+ **Workaround**: Rebuild Julia from source with HDF5 <=v0.12.3.
# **Reporting New Issues**
[Always report issues here](https://github.com/jpjones76/SeisIO.jl/issues). If possible, please include a minimum working example (MWE) and text dump of error(s) thrown (if applicable). GitHub Issues that are in-scope and internal to SeisIO remain open until fixed. Significant external (X) and out-of-scope (O) issues will be added to this document.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 2876 | # **Web Clients and Support**
| Service | Submodule | Command | Method |
|:----- |:----- |:----- |:----- |
| FDSNWS dataselect | | get_data | FDSN |
| FDSNWS event with data | Quake | FDSNevt | |
| FDSNWS event search | Quake | FDSNevq | |
| FDSNWS station | | FDSNsta | |
| IRISWS timeseries | | get_data | IRIS |
| IRISPH5WS dataselect | | get_data | FDSN |
| IRISWS traveltime (TauP) | Quake | get_pha! | |
| SeedLink DATA mode | | seedlink | DATA |
| SeedLink FETCH mode | | seedlink | FETCH |
| SeedLink TIME mode | | seedlink | TIME |
## Column Guide
* **Service** is the name of the service
* **Submodule** is the submodule, if not part of SeisIO core.
+ Access using e.g., `using SeisIO.Quake` for submodule Quake.
* **Command** is the command.
* **Method** is the method positional argument.
+ This is always the first ASCII string in the command
+ Example: *get_data!(S, "FDSN", "CI.ADO..BH?")*
+ Method is case-sensitive and should be all caps
# **List of FDSN Servers**
| src= | Base URL |
|:----- |:----- |
| BGR | http://eida.bgr.de |
| EMSC | http://www.seismicportal.eu |
| ETH | http://eida.ethz.ch |
| GEONET | http://service.geonet.org.nz |
| GFZ | http://geofon.gfz-potsdam.de |
| ICGC | http://ws.icgc.cat |
| INGV | http://webservices.ingv.it |
| IPGP | http://eida.ipgp.fr |
| IRIS | http://service.iris.edu |
| ISC | http://isc-mirror.iris.washington.edu |
| KOERI | http://eida.koeri.boun.edu.tr |
| LMU | http://erde.geophysik.uni-muenchen.de |
| NCEDC | http://service.ncedc.org |
| NIEP | http://eida-sc3.infp.ro |
| NOA | http://eida.gein.noa.gr |
| ORFEUS | http://www.orfeus-eu.org |
| RESIF | http://ws.resif.fr |
| SCEDC | http://service.scedc.caltech.edu |
| TEXNET | http://rtserve.beg.utexas.edu |
| USGS | http://earthquake.usgs.gov |
| USP | http://sismo.iag.usp.br |
# **List of PH5 Servers**
| src= | Base URL |
|:----- |:----- |
|IRISPH5 | https://service.iris.edu/ph5ws/ |
## Notes on Server List
The string in column **src=** is a case-sensitive keyword, all caps, and enclosed in double-quotes: for example, specify ETH with keyword `src="ETH"`, not src=eth or src=ETH.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 5084 | Adding new data structures
# 0. When is This Guide Mandatory?
Any pull request for SeisIO that adds Types
# 1. Supertypes
## GphysChannel
Any structure that contains univariate data and behaves similarly to SeisChannel should have this supertype.
## GphysData
Any structure that contains univariate data and behaves similarly to SeisData should have this supertype. A GphysData subtype should also have a corresponding single-channel version, equivalent to a SeisChannel.
## InstrumentPosition
Any Type describing an instrument's position should have this supertype.
## InstrumentResponse
Any Type describing an instrument response should have this supertype.
# 2. Mandatory Fields
All SeisData fields *except* `:c` (Connections) are assumed to exist in any subtype of GphysData; all SeisChannel fields are assumed to exist in any GphysChannel subtype.
| Name | Description | Recommendations |
|:--- |:--- | :---- |
| `:id` | String ID | Use format "NETWORK.STATION.LOCATION.CHANNEL" |
| `:name` | Channel name | |
| `:loc` | Instrument position | |
| `:fs` | Sampling frequency in Hz | |
| `:gain` | Scalar gain to convert `:x` to `:units`| |
| `:resp` | Instrument response | |
| `:units` | Units | See units API |
| `:src` | Data source | |
| `:misc` | Non-essential info | |
| `:notes` | Notes and logging | |
| `:t` | Time | (time API must be followed)
| `:x` | Univariate data | Allow floating-point data vectors |
Failure to include one or more of these fields will break how your new Type interacts with SeisIO core code.
# 3. Required Method Extensions
For general Types, the following methods must be imported and extended:
```
import Base: ==, isempty, isequal, show, sizeof, summary
```
## 3a. GphysData subtypes
Methods needed:
```
import Base: +, *, ==, append!, convert, deleteat!, getindex,
isempty, isequal, merge!, push!, setindex!, show, size, sizeof,
sort!, summary
import SeisIO: merge_ext!
```
### Required behavior
* Let `C` be a single-channel object of type `T <: GphysChannel`.
* Let `S` be a multichannel object of type `Y <: GphysData`, analogous to a multichannel version of `C`.
The following are required and should be demonstrated in your tests:
* `sort!` uses the `:id` field
* `append!` attaches an object of type Y <: GphysData to `S`
* `push!` attaches `C` to `S` and thereby extends each Array field in S by 1
* `convert`
- Converts `C` to and from Type SeisChannel.
- Converts `S` to and from Type SeisData.
- Include the mandatory fields above.
- If fields aren't stored in the target Type, set them to default values.
- Not all fields are preserved by conversion. In general, `convert(Y, convert(SeisData, S)) != S`.
- Changing this to `==` would require storing your new fields in `:misc`; but relying on keys in `:misc` is non-robust and potentially very slow.
* `isempty` should always return `true` for a newly initialized structure with no keywords passed during initialization.
- For numeric fields, I recommend initializing to 0 or 1 (as appropriate) and testing against the default value in your `isempty` method extension.
- Strings and Arrays are best initialized empty.
- This does imply that the "default" values of keyword init. must be the same values that `isempty` considers "empty".
* If `hash` is extended to a parameteric type, it must return the same value for two empty structures of the same Type.
#### Note on `hash`
With apologies, `hash` is unpredictable in Julia. Check the `hash` function documentation and experiment until the above test succeeds. This may take far more time than this paragraph suggests; our apologies.
`hash` must be extended to new subtypes of InstrumentPosition and InstrumentResponse.
An earlier version of this guide erroneously states that `hash` must be extended to all new types.
# 4. Recommendations
## a. Suggested Workflows
### New Subtype
1. Define initialization methods
1. Extend `show`, `summary` (if different from supertype)
1. Extend `isempty`, `isequal`, `sizeof`
1. Extend `read`, `write`
### GphysData Subtype
Starting with the SeisData Type as a skeleton:
1. Define new fields
* Follow "New Subtype" workflow if adding new subtypes of InstrumentPosition and/or InstrumentResponse
1. Follow "New Subtype" workflow
1. Extend `convert!` to/from SeisData
1. Create a single-channel version, like SeisChannel
+ Follow "New Subtype" workflow
+ Extend `convert!` to/from SeisChannel
1. Indexing: extend each of `append!`, `deleteat!`, `getindex`, `push!`, `setindex!`, `sort!`
1. Define `merge_ext!` for the new subtype, to merge any fields not in a SeisData object
1. Add tests
## b. Native File IO
If you want your new Types to be readable/writable with `rseis/wseis`, then you must do the following:
1. Add to the relevant module:
+ `import SeisIO: TNames, TCodes, rseis, wseis`
+ `import Base: read, write`
1. Create low-level `read` and `write` functions for your new Types.
1. Add your Types to Tnames.
1. Generate type codes and add to TCodes.
1. Be aware of the potential for conflict in TCodes with other submodules.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 9677 | # **Data Formats Guide and API**
This guide includes API and expectations for data format readers and parsers.
## **Definitions of Terms**
* **Unix epoch** or **epoch time**: 1970-01-01T00:00:00 (UTC)
* **HDF5**: [Hierarchical Data Format](https://support.hdfgroup.org/HDF5/whatishdf5.html)
# **Requirements**
Any data reader must...
* return a SeisData structure for time-series data.
* return a SeisEvent structure for discrete earthquake data.
* work with one "read" wrapper without adding keywords.
* not modify existing GphysData channels unless reading to them
* not break existing readers by leaving buffers in BUF resized
* extend a channel whose ID matches the data being read into memory
* include tests in *../../test/DataFormats/test_FF.jl* for data format FF
+ Expected code coverage: >95% on both Coveralls and CodeCov
## **Suggestions**
* Import *SeisIO.Formats.formats* and add an appropriate description
* Once your reader works, change low-level file I/O to use *SeisIO.FastRead*
* Use *SeisIO.BUF* for data I/O; see API below
### Adding more formats
Seismology alone has ~10^2 extant file formats; I cannot guess how many more are used by geodesy, volcanology, etc. Before adding support for another one, ask yourself if it's in widespread use. If not, ask yourself if this is a judicious use of your research time.
# **File Reader API**
## **List of Variables**
| V | Meaning | Type in Julia |
|:--- |:---- | :--- |
| A | string array | Array{String, 1} |
| B | byte vector for reading | Array{UInt8, 1} |
| C | single-channel structure | typeof(C) <: SeisIO.GphysChannel |
| D | data read array | AbstractArray |
| L | channel :loc field | typeof(L) <: SeisIO.InstrumentPosition |
| Q | integer array | Array{Y,1} where Y<:Integer |
| R | channel :resp field | typeof(R) <: SeisIO.InstrumentResponse |
| S | multichannel structure | typeof(S) <: SeisIO.GphysData |
| V | output byte array | Array{UInt8, 1} |
| Y | a primitive Type | Type |
| c | ASCII character | Char |
| fc | lower corner frequency | Float64 |
| fs | channel :fs field | Float64 |
| g | channel :gain field | Float64 |
| n | an integer | Integer |
| s | short integer | Int16 |
| str | ASCII string | String |
| t | time [μs] from Unix epoch | Int64 |
| tf | boolean variable | Bool |
| u | 8-bit unsigned integer | UInt8 |
| uu | channel :units field | String |
| x | a Julia double float | Float64 |
| q | any integer | Integer |
| ul | unsigned 32-bit integer | UInt32 |
## **Function API**
`BUF`
SeisIO static structure containing arrays for buffered file reads. See **SeisIO.BUF API** below.
`ChanSpec`
Type alias to *Union{Integer, UnitRange, Array{Int64, 1}}*
`i = add_chan!(S::GphysData, C::GphysChannel, strict::Bool)`
Add channel *C* to *S*. If *C.id* matches a channel ID in *S*, data and times from *C* are added and the remaining information is discarded. Use *strict=true* to match channels on more than *:id*. Returns the index of the matching channel.
`i = channel_match(S::GphysData, i::Integer, fs::Float64)`
Check that *fs* matches *S.fs[i]*. Returns the index of the matching channel (if one exists) or 0 (if no match).
```
i = channel_match(S::GphysData, i::Integer, fs::Float64,
g::AbstractFloat, L::InstrumentPosition, R::InstrumentResponse
uu::String)
```
Test that *S[i]* matches *fs*, *g*, *loc*, *R, *units*. If successful, returns *i*; if not, returns 0.
`c = getbandcode(fs::Float64; fc::Float64 = 1.0)`
Get FDSN-compliant band code (second letter of channel designator) for sampling frequency *fs* Hz with nominal lower instrument corner frequency *fc*. Returns the one-character code.
`A = split_id(id::AbstractString; c::String=".")`
Split *id* on delimiter *c*, always returning a length-4 String array containing the pieces. Incomplete IDs have their remaining fields filled with empty strings.
`str = fix_units(str::AbstractString)`
Replace *str* with UCUM-compliant unit string via Dict lookup.
`str = units2ucum(str::String)`
Replace *str* with UCUM-compliant unit string via substitution.
`tf = is_u8_digit(u::UInt8)`
Returns *true* if 0x2f < *u* < 0x3a, i.e., if *u* represents an ASCII character in the range '0'-'9'.
`fill_id!(B::Array{UInt8,1}, cv::Array{UInt8,1}, i::T, i_max::T, j::T, j_max::T)`
Fill id vector *V* from char vector *B*, starting at *B[i]* and *V[j]* and ending at *B[imax]* or *V[jmax]*, whichever is reached first.
`n = checkbuf!(B::Array{UInt8,1}, q::Y1, Y::Type) where Y1<:Integer`
Check that *B* can read at least *q* values of data type *Y*. Returns the new buffer size in bytes.
`checkbuf!(D::AbstractArray, q::Y) where Y<:Integer`
Calls *resize!(D, q)* if *q* > *length(D)*; otherwise, leaves *D* untouched.
`checkbuf_strict!(D::AbstractArray, q::Y) where Y<:Integer`
Calls *resize!(D, q)* if *q* != *length(D)*
`checkbuf_8!(B::Array{UInt8,1}, q::Integer)`
Check that *B* can hold a number of bytes equal to the first value *q1* > *q* that satisfies the equation *mod(q1, 8)* = 0.
### **Low-Level Parsers**
These functions all fill *x[os]:x[os+q]* from bytes buffer *B*. The function names completely describe the read operation:
* the character after the first underscore is the data type expected: 'i' for integer, 'u' for unsigned
* the next number is the number of bits per value: 4. 8, 16, 24, or 32
* the string after the second underscore gives the endianness: "le" for little-endian, "be"* for bigendian)
```
function fillx_i4!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i8!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i16_le!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i16_be!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i24_be!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i32_le!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_i32_be!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_u32_be!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
function fillx_u32_le!(x::AbstractArray, B::Array{UInt8,1}, q::Integer, os::Int64)
```
The above data types include every integer encoding that we've encountered in real data.
### **Byte-wise ASCII Parsers**
Use these to parse ASCII data. They're generally faster than `parse()`.
`u = buf_to_uint(B::Array{UInt8,1}, q::Integer)`
Parse characters in *B* to an unsigned Int64, to a maximum position in *B* of *q*.
`s = buf_to_i16(B::Array{UInt8,1}, s₀::Int16, s₁::Int16)`
Parse characters in *B* to create a 16-bit signed integer, starting at position *s₀* in *B* and ending at position *s₁*.
`x = buf_to_double(B::Array{UInt8,1}, n::Int64)`
Parse characters in *B* to create a Float64, to a maximum position in *B* of *n*.
`ul1, ul2 = parse_digits(io::IO, u_in::UInt8, u_max::UInt8)`
Parse *io* one byte at a time until reaching a non-digit character, returning two unsigned 32-bit integers. A maximum of *u_max* characters will be parsed.
`n = stream_int(io::IO, nᵢ::Int64)`
Parse a maximum of *nᵢ* bytes from *io*, creating a 64-bit integer *n* from the character bytes.
`x = stream_float(io::IO, u_in::UInt8)`
Parse *io* to create a single-precision float. Can parse many degenerate float strings, like "3F12".
`t = stream_time(io::IO, Q::Array{Y,1}) where Y<:Integer`
Parse characters in *io* to fill an array *Q* of time values, then convert to integer μs measured from the Unix epoch.
`t = string_time(str::String, Q::Array{Y,1}) where Y<:Integer`
`t = string_time(str::String)`
Wrap *str* in an IOBuffer and call *stream_time* on the buffer. Returns a time in integer μs measured from the Unix epoch. If only one argument is supplied, *string_time* buffers to *BUF.date_buf*.
## **Usable Buffers**
| field_name | Type | Used By | resize_to |
|:--- |:--- |:--- | ---:|
| buf | UInt8 | everything | 65535 |
| int16_buf | Int16 | SEG Y | 62 |
| int32_buf | Int32 | SEGY, Win32 | 100 |
| int64_buf | Int64 | rseis/wseis | 6 |
| sac_cv | UInt8 | AH, SAC | 192 |
| sac_fv | Float32 | SAC | 70 |
| sac_iv | Int32 | SAC | 40 |
| uint16_buf | UInt16 | SEED | 6 |
| uint32_buf | UInt32 | mini-SEED | 16384 |
| x | Float32 | everything | 65535 |
* Call `checkbuf!`, `checkbuf_8!`, or `checkbuf_strict!` before use as needed
* Call `resize!(BUF.field_name, resize_to)` when done
## **Unusable Buffers**
calibs, date_buf, dh_arr, flags, hdr, hdr_old, id, seq
These buffers are part of the SEED reader.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 3877 | # Fundamental Rule
Logging to *:notes* must contain enough detail that someone who reads *:notes* can replicate the work, starting with reading raw data, by following the steps described.
## **Definitions of Terms**
* **automated** means any change that results from a function, rather than a command entered at the Julia prompt.
* **metadata** includes any field in any structure whose Type is a subtype of GphysData or GphysChannel, *except* these fields: *:t*, *:x*, *:misc*, *:notes*
## General Note Structure
* First field: timestamp, formatted YYYY-MM-DDTHH:MM:SS
* Second field:
- For time-series read or download, "+src"
- For data processing functions: "processing"
- For data analysis functions: "analysis"
- For write operations, "write"
- For metadata read or download, "+meta"
* Third field: function call
* Fourth field: (optional) human-readable description
### Expected field delimiter
" ¦ ", including spaces. Please note that this is Char(0xa6), not Char(0x7c).
# File I/O Logging API
`fread_note!(S::GphysData, N::Array{Int64,1}, method::String, fmt::String, filestr::String, opts::String)`
Log file read information to *:notes* in channels *S[N]*. *method* is the invoking function name, *fmt* is the format string, *filestr* is the file string. *opts* is a comma-separated String list of arguments, including keywords, like `"swap=true, full=true"`.
`fwrite_note!(S::GphysData, i::Int64, method::String, fname::String, opts::String)`
Log file write operation to *S.notes[i]* or *C.notes*. *method* is the name of the invoking function; *opts* is a dynamically-created comma-separated list of arguments, including keywords, with an initial comma, like `", fname=\"foo.out\", v=3"`.
# Processing/Analysis Logging API
Here, it's **not** necessary to correctly name the variable used for the input structure. Instead, use **S** for GphysData subtypes, **C** for GphysChannel subtypes, and **Ev** for Quake.SeisEvent.
`proc_note!(S::GphysData, N::Array{Int64, 1}, proc_str::String, desc::String)`
Log processing operation to *:notes* in channels *S[N]*. *proc_str* is a dynamic String of the full function call including relevant arguments and keywords, like `"unscale!(S, chans=[1,2,3])"`. *desc* should be a human-readable description, like `"divided out channel gain"`.
`proc_note!(S::GphysData, i::Int64, proc_str::String, desc::String)`
As above for *S.notes[i]*.
`proc_note!(C::GphysChannel, method::String, desc::String)`
As above for *C.notes*.
# Downloads and Streaming Data
## Syntax:
* `note!(S, i, "+source ¦ " * url)` for GphysData subtypes
* `note!(C, "+source ¦ " * url)` for GphysChannel subtypes
## What to Log
1. The URL, with "+source" as the second field.
2. Any submission info required for data transfer: POST data, SeedLink command strings, etc.
* The second field of the note should be a descriptive single-word string: "POST" for HTTP POST methods, "commands" for SeedLink commands, etc.
* Include only the relevant commands to acquire data for the selected channel.
### Example: HTTP POST request
```
2019-12-18T23:17:28 ¦ +source ¦ https://service.scedc.caltech.edu/fdsnws/station/1/
2019-12-18T23:17:28 ¦ POST ¦ CI BAK -- LHZ 2016-01-01T01:11:00 2016-02-01T01:11:00\n
```
# Automated Metadata Changes
## Syntax:
* First field: timestamp, formatted YYYY-MM-DDTHH:MM:SS
* Second field: "+meta", no quotes
* Third field: function call
* Fourth field: (optional) human-readable description
For file strings, we strongly recommend using `abspath(str)` to resolve the absolute path.
Example: `2019-12-18T23:17:30 ¦ +meta ¦ read_meta("sacpz", "/data/SAC/testfile.sacpz")`
# Field `:src`
`:src` should always contain the most recent time-series data source.
## File Source
`:src` should be the file pattern string, like "/data/SAC/test*.sac".
## Download or Streaming Source
`:src` should be the request URL.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 4023 | Code for data processing or analysis must conform to additional specifications for inclusion in SeisIO.
# Don't assume ideal objects
Your code must handle (or skip, as needed) channels in `GphysData` subtypes (and/or `GphysChannel` subtypes) with undesirable features. Examples from our tests include:
* irregularly-sampled data (`S.fs[i] == 0.0`), such as campaign-style GPS, SO₂ flux, and rain gauge measurements.
* channels with a time gap before the last sample (`S.t[end, 2] != 0`)
* data with (potentially very many) time gaps of arbitrary lengths.
- Example: one of the mini-SEED test files comes from Steve Malone's [rebuilt Mt. St. Helens data](https://ds.iris.edu/ds/newsletter/vol16/no2/422/very-old-mount-st-helens-data-arrives-at-the-dmc/). Because the network was physically dangerous to maintain, each SEED volume spans several months with >100 time gaps, ranging in size from a few samples to a week and a half. Such gaps are _very_ common in records that predate the establishment of dedicated scientific data centers.
* segments and channels with very few data points (e.g. a channel `i` with `length(S.x[i]) < 10`)
* data that are neither seismic nor geodetic (e.g. timing, radiometers, gas flux)
* empty or unusual `:resp` or `:loc` fields
You don't need to plan for PEBKAC errors, but none of the cases above are mistakes.
## Skip channels that can't processed
For example, one can't bandpass filter irregularly-sampled data in a straightforward way; even *approximate* filtering requires interpolating to a regularly-sampled time series, filtering that, and extracting results at the original sample times. That's a cool trick to impress one's Ph.D. committee, but is there a demand for it...?
## Leave unprocessed data alone
Never alter or delete unprocessed data. We realize that some code requires very specific data (for example, three-dimensional trace rotation requires a multicomponent seismometer); in these cases, use `getfield`, `getindex`, and utilities like `get_seis_channels` to select applicable channels.
# Don't assume a work flow
If a function assumes or requires specific preprocessing steps, the best practice is to add code to the function that checks `:notes` for prerequisite steps and applies them as needed.
## Tips for selecting the right data
In an object that contains data from more than one instrument type, finding the right channels to process is non-trivial. For this reason, whenever possible, SeisIO follows [SEED channel naming conventions](http://www.fdsn.org/seed_manual/SEEDManual_V2.4_Appendix-A.pdf) for the `:id` field. Thus, there are at least two ways to identify channels of interest:
1. Get the single-character "channel instrument code" for channel `i` (`inst_codes` does this efficiently). Compare to [standard SEED instrument codes](https://ds.iris.edu/ds/nodes/dmc/data/formats/seed-channel-naming/) and build a channel list, as `get_seis_channels` does.
- This method can break on instruments whose IDs don't follow the SEED standard.
- Channel code `Y` is opaque and therefore ambiguous; beware matching on it.
2. Check `:units`. See the [units guide](./units.md). This is usually safe, but can be problematic in two situations:
- Some sources report units in "counts" (e.g., "counts/s", "counts/s²"), because the "stage zero" gain is a unit conversion.
- Some units are ambiguous; for example, displacement seismometers and displacement GPS both use distance (typically "m").
# Log function calls to `:notes`
Please see the [logging API](./logging.md)
# No unpublished algorithms in SeisIO core
Place research-level code in separate packages
<!-- `ii = get_unique(S::GphysData, A::Array{String,1}, chans::ChanSpec)`
Get groups of channel indices in `S` that match the strings in `A`. `A` must contain either string field names in `S` (like "fs" or "gain"), or strings describing functions applicable to `S.x` (like "eltype" or "length").
Returns an array of Integer arrays; each subarray contains the indices of one group. -->
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 1263 | A short guide to adding and structuring submodules.
# Naming
Submodule names can't contain spaces or punctuation.
# Tree Structure
| Path | Description |
|--- |--- |
| src/[Name].jl | Submodule definition file read by SeisIO.jl |
| src/Submodules/[Name] | Path to submodule [Name] |
| src/Submodules/[Name]/imports.jl | `import` statements |
You don't need to plan for PEBKAC errors, but none of the cases above are mistakes.
# Submodule or Core Code?
A submodule is recommended whenever the alternative is spaghetti code. For example:
* Many helper or utility functions that aren't useful elsewhere (RandSeis, SeisHDF, SEED, SUDS)
* Many new Type definitions (Quake, SEED, SUDS)
* Significant functionality other than I/O and processing (Quake)
* Supports both time-series data and event data (SUDS, UW)
# Submodule or Separate Package?
Please create a separate package if your code meets any of the following criteria:
* Dependencies not in Project.toml
* Performs analysis that isn't preprocessing (e.g., tomography)
* Computes derived quantities from time series (e.g., seismic attributes)
* Only used in one subfield of geophysics
* Code focuses on a specific data type (e.g., strain)
* Research code, including prototyping and unpublished algorithms
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 17738 | # **SeisIO Time Guide and API**
This guide describes how to use the time field `:t` of any GphysData (multichannel) or GphysChannel (single-channel) structure in SeisIO and its submodules. This includes, but is not limited to, SeisData and SeisChannel structures in SeisIO core. Future subtypes of GphysData and GphysChannel will also conform to the standards established in this guide.
All data described herein are univariate and discretely sampled.
# **SeisIO Time Matrices**
## **List of Variables**
| V | Meaning | Julia Type in SeisIO |
|:--- |:---- | :--- |
| C | single-channel structure | typeof(C) <: SeisIO.GphysChannel |
| S | multichannel structure | typeof(S) <: SeisIO.GphysData |
| T | SeisIO time matrix, `:t` | Array{Int64, 2} |
| X | SeisIO data vector, `:x` | Union{Vector{Float32}, Vector{Float64}} |
| end | last index of a dimension | Integer |
| fs | sampling frequency [Hz] | Float64 |
| i | channel index | Integer |
| j | sample index | Integer |
| k | row index in time matrix | Integer |
| t | time | Int64 |
| Δ | sampling interval [μs] | Int64 |
| δt | time gap or time jump | Int64 |
## **Definitions of Terms**
* **Unix epoch** or **epoch time**: 1970-01-01T00:00:00 (UTC)
* **single-channel structure**: a structure that can contain only discrete univariate data from a single channel of a single instrument. The guidelines below apply to single-channel structures by assuming a channel index subscript value of *i* = 1.
* **multichannel structure**: a structure that can contain discrete univariate data from multile channels of multiple instruments.
* **time-series**: a vector *Xᵢ* of univariate data sampled at a regular interval *Δᵢ*.
+ SeisIO convention: data are time-series if `fsᵢ > 0.0`
* **irregular**: univariate data sampled at discrete times. Short for "irregularly-sampled".
+ SeisIO convention: data are irregular if `fsᵢ == 0.0`
* **gap**: a significant deviation in **time-series** *Xᵢ* from the regular sampling interval *Δᵢ*.
+ Formal definition: for values *Xᵢⱼ₋₁* and *Xᵢⱼ* sampled at times *tᵢⱼ₋₁*, *tᵢⱼ*, *δt ≡ tᵢⱼ₋₁* - *tᵢⱼ* - *Δᵢ*
* A gap before sample *Xᵢⱼ* is considered significant in SeisIO if sample times *tᵢⱼ₋₁*, *tᵢⱼ* satisfy the inequality |*tᵢⱼ₋₁* - *tᵢⱼ* - *Δᵢ* | > 0.5 *Δᵢ*.
+ In SeisIO, time gaps can be positive or negative.
* **segment**: a contiguous set of indices *j₀*:*j₁* in **time-series** *Xᵢ* with *j₁* ≥ *j₀* and no **gap** between *Xᵢⱼ₀* and *Xᵢⱼ₁*.
+ If *j₁* > *j₀*, every pair of adjacent samples (*Xᵢⱼ*, *Xᵢⱼ₊₁*), whose indices satisfy the inequality *j₀* ≤ *j* < *j* + 1 ≤ *j₁*, satisfies the property *tᵢⱼ₊₁* - *tᵢⱼ* = *Δᵢ* to within the absolute precision of a **gap**, i.e., ≤0.5 *Δᵢ*.
* Sample times are generally much more precise than ±0.5 *Δᵢ* with modern digital recording equipment.
* Time gaps with absolute deviations ≤ 0.5 *Δᵢ* from sampling interval *Δᵢ* are discarded by SeisIO readers.
## **Definition of SeisIO Time Matrix**
A two-column Array{Int64,2}, in which:
* `Tᵢ[:,1]` are monotonically increasing indices *j* in *Xᵢ*
* `Tᵢ[:,2]` are time values in μs
### **Time-series time matrix**
#### `Tᵢ[:,1]`
* `Tᵢ[1,1] == 1`
* `Tᵢ[end,1] == length(Xᵢ)`
* `Tᵢ[k,1] < Tᵢ[k+1,1]` for any *k* (row index *k* increases monotonically with data index *j*)
* `Tᵢ[2:end-1,1]`: each sample is the first index in *Xᵢ* after the corresponding time gap in the second column
#### `Tᵢ[:,2]`
* `Tᵢ[1,2]` is the absolute time of *X₁* measured from Unix epoch
+ `Tᵢ[1,2]` is usually the earliest sample time; however, if the time segments in *Xᵢ* aren't in chronological order, this isn't necessarily true.
* `Tᵢ[2:end,2]` are time gaps *δt* in *Xᵢ*, defined *δt* ≡ *tᵢⱼ₋₁* - *tᵢⱼ* - *Δᵢ*.
+ **Important**: for time gap *δt* = *Tᵢ[k,2]* at index *j = Tᵢ[k,1]* in *Xᵢ*, the total time between samples *Xᵢⱼ₋₁* and *Xᵢⱼ* is *δt + Δᵢ* not *δt*. This may differ from time gap representations in other geophysical software.
+ `Tᵢ[end,2]` is usually the latest sample time; however, if the time segments in *Xᵢ* aren't in chronological order, this isn't necessarily true.
#### How gaps are logged
1. A gap is denoted by a row in *Tᵢ* whenever samples *Xᵢⱼ*, *Xᵢⱼ₊₁* have sample times *tᵢⱼ₊₁*, *tᵢⱼ* that satisfy |*tᵢⱼ₋₁* - *tᵢⱼ* - *Δᵢ* | > 0.5 *Δᵢ*.
2. Both negative and positive time gaps are logged.
#### Examples
* `Tᵢ[1,2] = 0` occurred at *tᵢ* = 1970-01-01T00:00:00.00000 (UTC).
* `Tᵢ[1,2] = 1559347200000000` occurred at tᵢ = 2019-06-01T00:00:00.000000.
* `Tᵢ[k,:] = [12 1975000]` at *fsᵢ* = 40.0 Hz is a time gap of 2.0 s before the 12th sample of *Xᵢ*.
+ Check: `g = 1975000; d = round(Int64,1.0e6/40.0); (g+d)/1.0e6`
* `Tᵢ[k,:] = [31337 -86400000000]` at *fsᵢ* = 100.0 Hz is a time gap of -86399.99 s before the 31337th sample of *Xᵢ*.
+ Check: `g = -86400000000; d = round(Int64,1.0e6/100.0); (g+d)/1.0e6`
- Substituting `1.0e-6*(g+d)` for the last expression is slightly off due to floating-point rounding.
* `Tᵢ = [1 1324100000000000; 8640000 0]` at *fsᵢ* = 100.0 Hz is a one-segment time matrix.
* `Tᵢ = [1 1401000000000002; 100001 9975000; 200001 345000; 300000 0]` at *fsᵢ* = 40.0 Hz is a three-segment time matrix.
* `Tᵢ = [1 1324100000000000; 8640000 50000]` at *fsᵢ* = 100.0 Hz is a two-segment time matrix.
+ There is a gap of *δ* = 50000 μs between samples *j* = 8639999 and *j* = 8640000.
+ The second segment is one sample long; it starts and ends at *j* = 8640000.
+ The last two samples were recorded 0.06 s apart.
* `Tᵢ = [1 1559347200000000; 31337 -86400010000; 120000 0]` at *fsᵢ* = 100.0 Hz is a two-segment time matrix where the first sample is not the earliest.
+ The earliest sample is *j* = 31337, recorded at 2019-05-31T00:05:13.36 (UTC).
+ If the data segments were in chronological order, the equivalent time matrix would be `Tᵢ = [1 1559261113350000; 88665 85200010000; 120000 0]`.
* Recording a gap:
+ *tᵢⱼ₋₁* = 1582917371000000
+ *tᵢⱼ* = 1582917371008000
+ *Δᵢ* = 20000 μs
+ *δt* = 1582917371008000 - 1582917371000000 - Δᵢ = -12000 μs
+ The gap in *Tᵢ* before sample *j* is denoted by the row `[j -12000]`
#### Notes
* A time matrix `Tᵢ` with no time gaps has the property `size(Tᵢ,1) == 2`.
* A time gap in `Tᵢ[end,2]` is acceptable. This indicates there was a time gap before the last recorded sample.
* In a time-series time matrix, `Tᵢ[1,2]` and `Tᵢ[end,2]` are the only values that should ever be 0. Length-0 gaps in other rows are undefined and untested behavior; they also create a display issues with `show`.
### **Irregular time matrix**
#### `Tᵢ[:,1]` expected behavior
* `Tᵢ[1,1] == 1`
* `Tᵢ[end,1] == length(Xᵢ)`
* `Tᵢ[:,1]` has the same length as *Xᵢ*
* `Tᵢ[k,1]` < `Tᵢ[k+1,1]` for any `k` (row index `k` increases monotonically with data index `j`)
* `Tᵢ[k,1] = k`; in other words, row number corresponds to index `k` in *Xᵢ*
+ These are technically redundant, as all current GphysData subtypes use linear indexing for *Xᵢ*
#### `T[:,2]` expected behavior
* `Tᵢ[k,2]` is the sample time of `Xᵢ[k]` in μs relative to the Unix epoch
## **Converting to Absolute Time**
Internal functions for working with time matrices are in `CoreUtils/time.jl`. An API for working with them is given below.
### **Common use cases**
The most common functions needed to manipulate time matrices are covered by:
`import SeisIO: t_expand, t_collapse, t_win, w_time, tx_float`
If the absolute time of each sample in `S.x[i]` is required, try the following commands:
```
using SeisIO
import SeisIO: t_expand
t = S.t[i]
fs = S.fs[i]
tx = t_expand(t, fs) # Int64 vector of sample times (in μs)
dtx = u2d.(tx.*1.0e-6) # DateTime vector of sample times
stx = string.(u2d.(tx.*1.0e-6)) # String vector of sample times
```
## **Obsolescence**
The SeisIO time matrix system will remain accurate at 64-bit precision until 5 June 2255. At later dates it will become increasingly unusable, as 64-bit floating-point representation of integer μs will become increasingly imprecise. Please plan to discontinue use before that date.
Check: `using Dates; unix2datetime(1.0e-6*maxintfloat())`
# **Time API**
## **List of Variables**
| V | Meaning | Type in Julia |
|:--- |:---- | :--- |
| A | array of Int64 time vals | Array{Int64, 1} |
| B | array of Int32 time vals | Array{Int32, 1} |
| C | single-channel structure | typeof(C) <: SeisIO.GphysChannel |
| D | Julia DateTime structure | DateTime |
| H | hex-encoded UInt8 array | Array{UInt8, 1} |
| M | month | Integer |
| S | multichannel structure | typeof(S) <: SeisIO.GphysData |
| T | SeisIO time matrix | Array{Int64, 2} |
| Tf | floating-point time vector| Array{Float64, 1} |
| Tx | expanded time vector | Array{Int64, 1} |
| W | time window matrix | Array{Int64, 2} |
| c | fractional seconds | Integer |
| d | day of month | Integer |
| fs | sampling frequency [Hz] | Float64 |
| h | hour | Integer |
| i | channel index | Int64 |
| j | Julian day (day of year) | Integer |
| k | row index in time matrix | Integer |
| m | minute | Integer |
| n | a 64-bit integer | Int64 |
| r | real number | Real |
| s | second | Integer |
| str | ASCII string | String |
| t | time value | Int64 |
| ts | time spec | TimeSpec |
| u | hex-encoded 8-bit uint | UInt8 |
| x | a Julia double float | Float64 |
| xj | x-indices of segments | Array{Int64, 2} |
| y | four-digit year | Int64 |
| yy | two-digit hex year part | UInt8 |
| Δ | sampling interval [μs] | Int64 |
| μ | microsecond | Integer |
## **Function API**
`TimeSpec`
Type alias to `Union{DateTime, Real, String}`
`check_for_gap!(S::GphysData, i::Int64, t::Int64, n::Integer, v::Integer)`
Check for gaps between the end of *S.t[i]* and time *t*. Assumes the data
segment being added is length-*n*. Thin wrapper to *t_extend*.
`x = d2u(D::DateTime)`
Alias to `Dates.datetime2unix`
`datehex2μs!(A::Array{Int64, 1}, H::Array{UInt8, 1})`
Unpack datehex-encoded time values from length-8 datehex array *H* to length-6 array *A*. Assumes *H* is of the form *[yy1, yy2, M, d, h, m, s, c]* where *c* here is in hundredths of seconds.
`t = endtime(T::Array{Int64, 2}, Δ::Int64)`
`t = endtime(T::Array{Int64, 2}, fs::Float64)`
Compute the time of the last sample in *T* sampled at interval *Δ* [μs] or frequency *fs* [Hz]. Output is integer μs measured from the Unix epoch.
`s = int2tstr(t::Int64)`
Convert time value *t* to a String.
`M,d = j2md(y, j)`
Convert year *y* and Julian day (day of year) *j* to month *M*, day *d*.
`j = md2j(y, M, d)`
Convert month *M*, day *d* of year *y* to Julian day (day of year) *j*.
`mk_t!(C::GphysChannel, n::Integer, t::Int64)`
Initialize SeisIO time matrix *C.t* for *n*-sample data vector *C.x* to start at *t* in integer μs from the Unix epoch.
`mk_t(n::Integer, t::Int64)`
Create new SeisIO time matrix *T* for an *n*-sample data vector starting at *t* in integer μs from the Unix epoch.
`t = mktime(y::T, j::T, h::T, M::T, s::T, μs::T) where T<:Integer`
Convert *y*, *j*, *h*, *m*, *s*, *μ* to integer μs from the Unix epoch.
`t = mktime(A::Array{T, 1}) where T<:Integer`
Convert values in *A* to total integer μs from the Unix epoch. Expected format of *A* is *[y, j, h, s, μ]*.
`(str0, str1) = parsetimewin(ts1::TimeSpec, ts2::TimeSpec)`
Convert *ts1*, *ts2* to String, and sort s.t. *DateTime(str0)* < *DateTime(str1)*. See **TimeSpec API** below.
`t = starttime(T::Array{Int64, 2}, Δ::Int64)`
`t = starttime(T::Array{Int64, 2}, fs::Float64)`
Get the time of the first sample in SeisIO time matrix `t`, sampled at
interval `Δ` [μs] or frequency `fs` [Hz]. Output is integer μs measured from
the Unix epoch.
`t_arr!(B::Array{Int32,1}, t::Int64)`
Convert *t* to *[y, j, h, m, s, c]*, overwriting the first 6 values in *B* with the result. Here, *c* is milliseconds.
`T = t_collapse(Tx::Array{Int64, 1}, fs::Float64)`
Create a time matrix from times in *Tx* sampled at *fs* Hz. For input matrix *Txᵢ*, the time *t* of each index *j* is the sample time of *Xᵢⱼ* measured relative to the Unix epoch.
`Tx = t_expand(T::Array{Int64, 2}, fs::Float64)`
Create a vector of sample times *Tx* starting at *T[1,2]* for a data vector *X* sampled at *fs* Hz.
`str = tstr(D::DateTime)`
Convert DateTime *D* to String *str*. The output format follows ISO 8601 code `YYYY-MM-DDThh:mm:ss`.
`D = u2d(r::Real)`
Alias to `Dates.unix2datetime`
`T = t_extend(T::Array{Int64,2}, t_new::Int64, n_new::Int64, Δ::Int64)`
`T = t_extend(T::Array{Int64,2}, t::Integer, n::Integer, fs::Float64)`
Extend SeisIO time matrix *T* sampled at interval *Δ* μs or frequency *fs* Hz. For matrix *Tᵢ*:
* *t_new* is the start time of the next segment in data vector *Xᵢ*
* *n_new* is the expected number of samples in the next segment of *Xᵢ*
This function has a mini-API below.
`W = t_win(T::Array{Int64, 2}, Δ::Int64)`
`W = t_win(T::Array{Int64, 2}, fs::Float64)`
Convert time matrix *T* for data sampled at interval Δ [μs] or frequency fs [Hz] to a time window matrix *W* of segment times, measured from the Unix epoch. Window *k* starts at *W[k,1]* and ends at *W[k,2]*.
`str = timestamp()`
Return a String with the current time, with ISO 8601 format code `YYYY-MM-DDThh:mm:ss.s`. Equivalent to `tstr(now(UTC))`.
`str = timestamp(ts::Union{DateTime, Real, String})`
Alias to `tstr(t)`
`str1 = tnote(str::String)`
Create prefix of timestamped note; alias to `string(timestamp(), " ¦ ", str)`
`t = tstr2int(str::String)`
Convert time string *str* to μs. *str* must follow [Julia expectations](https://docs.julialang.org/en/v1/stdlib/Dates/) for DateTime input strings.
`Tf = tx_float(T::Array{Int64, 2}, fs::Float64)`
Convert time matrix *T* sampled at frequency *fs* Hz to an array of Float64 sample times in μs relative to the Unix epoch. For input matrix *Tᵢ* the time value *t* of each index *j* in *Tf* is the sample time of *Xᵢⱼ* measured relative to the Unix epoch.
`n = unpack_u8(u::UInt8)`
Unpack a datehex-encoded UInt8 to Int64. In this encoding, the UInt8 representation *u=0xYZ* uses *Y* for the first digit and *Z* for the second; for example, 0x22 is the number 22.
`T = w_time(W::Array{Int64, 2}, Δ::Int64)`
Convert time window matrix *W* of data sampled at interval *Δ* [μs] or frequency *fs* [Hz] to a SeisIO time matrix.
`xj = x_inds(T::Array{Int64, 2})`
Get *x*-indices *j* corresponding to the start and end of each segment in *T*. Total number of segments is *size(xj, 1)*. Segment *k* starts at index *xj[k,1]* and ends at *xj[k,2]*.
`t = y2μs(y::Integer)`
Convert year *y* to integer μs from the Unix epoch.
## **TimeSpec API**
Functions that allow time specification use two reserved keywords or arguments to track time:
* *s*: Start (begin) time
* *t*: Termination (end) time
A TimeSpec can be any Type in Union{Real, DateTime, String}.
* Real numbers are interpreted as seconds
* **Caution**: not μs; not measured from Epoch time
* Exact behavior is given in the table below
* DateTime values should follow [Julia documentation](https://docs.julialang.org/en/v1/stdlib/Dates/).
* Strings should conform to [ISO 8601](https://www.w3.org/TR/NOTE-datetime) *without* the time zone designator.
+ ISO 8601 expected format spec: `YYYY-MM-DDThh:mm:ss.s`
+ Fractional second is optional and accepts up to 6 decimal places (μs)
+ Julia support for ISO 8601 time zones is NYI
+ Equivalent Unix `strftime` format codes: `%Y-%m-%dT%H:%M:%S`, `%FT%T`
* Example: `s="2016-03-23T11:17:00.333"`
When start and end time are both specified, they're sorted so that `t` < `s` doesn't error.
### **parsetimewin Behavior**
In all cases, parsetimewin outputs a pair of strings, sorted so that the first string corresponds to the earlier start time.
| typeof(s) | typeof(t) | Behavior |
|:------ |:------ |:------------------------------------- |
| DateTime | DateTime | convert to String, then sort |
| DateTime | Real | add *t* seconds to *s*, convert to String, sort |
| DateTime | String | convert *s* to String, then sort |
| Real | DateTime | add *s* seconds to *t*, convert to String, sort |
| Real | Real | treat as relative, convert to String, sort |
| Real | String | add *s* seconds to *t*, convert to String, sort |
| String | DateTime | convert *t* to String, then sort |
| String | Real | add *t* seconds to *s*, convert to String, sort |
| String | String | sort |
Special behavior with (Real, Real): *s* and *t* are converted to seconds from the start of the current minute.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 1694 | SeisIO `:units` strings
# Units use case-sensitive UCUM
Please see the relevant guidelines:
* [Unified Code for Units of Measure](https://en.wikipedia.org/wiki/Unified_Code_for_Units_of_Measure) and references therein
* [BIPM](https://www.bipm.org/utils/common/pdf/si-brochure/SI-Brochure-9.pdf)
* [NIST SI guidelines](https://www.nist.gov/pml/special-publication-811/nist-guide-si-chapter-6-rules-and-style-conventions-printing-and-using), chapters 5-7
## Units do not use Fortran strings
Because it is not 1970
## Allowed non-SI units
Allowed non-SI unit strings are found in these tables:
* [BIPM SI brochure, Table 4.1](https://www.bipm.org/utils/common/pdf/si-brochure/SI-Brochure-9.pdf)
* [NIST SI guidelines, Table 6](https://www.nist.gov/pml/special-publication-811/nist-guide-si-chapter-5-units-outside-si)
# Common Issues
## Units formed by multiplication
Indicate multiplication of units with a single period (`.`), e.g., `N.m` for Newton meters.
## Units formed by division
Indicate division of units with an oblique stroke (`/`), e.g. `m/s` for meters per second.
## Temperature
* Don't use a degree symbol: e.g., `K` is OK, but not `°K`.
* The UCUM abbreviation for Celsius is `Cel`, not `C`.
## Powers and Exponents
* A simple integer power is denoted by an integer following a unit, e.g., `"m/s2"` is "meters per second squared", not `"m/s^2"`, `"m/s**2"`, `"m s**-2"`, ... *ad nauseam*.
* Express negative powers in unit strings using `/` to separate the numerator from the denominator, e.g., `"m/s2"`, not `"m s-2"`.
# Converting to/from UCUM units syntax
See the SeisIO utilities `units2ucum`; use the SeisIO utility `vucum` to validate strings for UCUM compliance.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 9817 | # SeisIO v0.3: **Typeageddon!**
2019-06-04
The main purpose of this release is to finalize custom Types; major changes to Types are *extremely* unlikely after this release; minor changes will only happen in response to significant demands. The Type restructuring below serves two main purposes:
1. allow a range of predetermined styles for fields with no universal standard (e.g. location)
2. improve user experience
Descriptions of Type fields and meanings can be found in the help text of each Type.
Low-level descriptions of how each Type is stored on file can be found in the SeisIO documentation.
## Full switch to read_data
Readers for individual file formats (e.g. `readsac`) have been deprecated (as warned about in the notes to SeisIO-0.2.0). Please use `read_data` instead.
## Changes to SeisData, SeisChannel
* An abstract Type, `GphysData`, is now the supertype of `SeisData`. This will allow variants of `SeisData` objects to be added easily.
* For similar reasons, an abstract Type, `GphysChannel`, is now the supertype of `SeisChannel`.
* The `:loc` field now uses custom types designed for one location format each; current options are:
+ `GeoLoc`: geographic instrument position using lat/lon
+ `UTMLoc`: location in UTM coordinates (zone, Northing, Easting)
+ `XYLoc`: location in local (x-y-z) coordinates relative to a georeferenced origin
+ `GenLoc`: generic location
+ It is not necessary for all channels in a SeisData object to use the same location Type.
* The `:resp` field now uses custom types designed for instrument response formats. Current options are:
+ `PZResp`: pole-zero response with Float32 fields `:c` (damping constant), `:p` (complex poles), `:z` (complex zeros).
+ `PZResp64`: pole-zero response with Float64 precision; same field names as `PZResp`.
+ `GenResp`: generic instrument response object comprising a descriptive string, `:desc`, and a complex Float64 matrix, `:resp`.
## Quake submodule
All Types and functions related to handling of discrete earthquake events have been moved to a new submodule, SeisIO.Quake. This includes several (former) SeisIO core functions:
* UW data format: `readuwevt`, `uwpf`, `uwpf!`
* Event web functions: `FDSNevq`, `FDSNevt`, `distaz`, `get_pha!`
* Miscellaneous: `gcdist`, `show_phases`
* Types: `SeisHdr`, `SeisEvent`
### SeisIO.Quake Types
* `SeisSrc` is a new Type that characterizes an earthquake source process, with fields:
- `:id` (String): seismic source id
- `:eid` (String): event id
- `:m0` (Float64): scalar moment
- `:misc` (Dict{String,Any}): dictionary for non-essential information
- `:mt` (Array{Float64,1}): moment tensor values
- `:dm` (Array{Float64,1}): moment tensor errors
- `:npol` (Int64): number of polarities used in focal mechanism
- `:pax` (Array{Float64,2}): principal axes
- `:planes` (Array{Float64,2}): nodal planes
- `:src` (String): data source
- `:st` (SourceTime): source-time description with subfields:
+ `:desc` (String): description of source-time function
+ `:dur` (Real): duration
+ `:rise` (Real): rise time
+ `:decay` (Real): decay time
* `SeisHdr` has changed, hopefully for the last time:
+ Fields describing the seismic source process have moved to `SeisSrc`.
+ `:typ` (String) added for event type.
+ `:id` changed Type (Int64 ==> String) to accommodate ISC IDs.
+ `:loc` was expanded; added subfields can now fully characterize location quality, type, and source.
+ `:mag` is now a custom object, rather than Tuple(Float32, String), with fields:
- `:val` (Float32): magnitude value
- `:scale` (String): magnitude scale
- `:nst` (Int64): number of stations used in magnitude calculation
- `:gap` (Float64): max. azimuthal gap between stations in magnitude calculation
- `:src` (String): magnitude source (e.g,. software name, authoring agency)
* `SeisEvent` now has three substructures, rather than two:
+ `:data` is now Type `EventTraceData` (<: GphysData), which behaves like SeisData but contains four additional fields for event-specific information:
+ `az` Azimuth from event
+ `baz` Backazimuth to event
+ `dist` Source-receiver distance
+ `pha` Phase catalog. This is a dictionary of custom objects named `SeisPha`, keyed to phase names (e.g. "pP"), with fields: (Float64 except as indicated)
- `amp` amplitude
- `d` distance
- `ia` incidence angle
- `pol` polarity (Char)
- `qual` quality (Char)
- `rp` ray parameter
- `res` residual
- `ta` takeoff angle
- `tt` travel time
- `unc` uncertainty
+ `:hdr` (SeisHdr), header information
+ `:source` (SeisSrc), source process characterization
**Note**: Seismic data centers typically use different IDs for event location and event source model; hence, for a SeisHdr object `H` and the corresponding SeisSrc object `R`, `H.id == R.eid`, but generally, `H.id != H.eid`. **Please don't open an issue about this, I swear it's not a bug.**
### QuakeML Support
SeisIO.Quake introduces QuakeML support.
+ A new function, `read_qml`, reads QuakeML files and parses QuakeML downloads.
+ If multiple focal mechanisms, locations, or magnitudes are present in a single `Event` element, the following rules are used to select one of each:
- `FocalMechanism`
1. `preferredFocalMechanismID` if present
2. Solution with best-fitting moment tensor
3. First `FocalMechanism` element
- `Magnitude`
1. `preferredMagnitudeID` if present
2. Magnitude whose ID matches `MomentTensor/derivedOriginID`
3. Last moment magnitude (in this context, any magnitude whose lowercase
scale abbreviation begins with "mw")
4. First `Magnitude` element
- `Origin`
1. `preferredOriginID` if present
2. `derivedOriginID` from the chosen `MomentTensor` element
3. First `Origin` element
+ Non-essential QuakeML data are saved to each event `W` in `W.source.misc` (for earthquake source data) or `W.hdr.misc` (other data), using the corresponding QuakeML element names as keys.
### Changes to former SeisIO core functions
* `FDSNevq` now returns both an Array{SeisHdr,1} and a corresponding Array{SeisSrc,1}.
* SeisIO processing functions must now be called on the `:data` field of a `SeisEvent` object. They won't work if called on the `SeisEvent` object itself.
## read/write and SeisIO Types
* `read` and `write` methods now extend to all exported SeisIO Types, including those in SeisIO.Quake. The primary use of `rseis` and `wseis` will be creating indexed, searchable files of many SeisIO objects.
* `SeisChannel` objects are no longer converted to `SeisData` on write.
* The native file format has been rewritten. Please open an issue if you need to access data in the old file format; we don't think anyone was using it yet.
* Write speed has improved 20-30%. Read speed improved two orders of magnitude by removing automatic compression; it's now comparable to SAC or SEG Y with fewer allocations and less overhead.
* On write, the field `:x` of a data object is no longer compressed automatically.
+ Two new keywords, `KW.comp` (UInt8) and `KW.n_zip` (Int64), control compression
* If `KW.comp == 0x00`, data are not compressed when written.
* If `KW.comp == 0x01`, only compress `:x` if maximum([length(S.x[i])
for i = 1:S.n]) ≥ KW.n_zip; by default, `KW.n_zip = 100000`.
* If `KW.comp == 0x02`, always compress `:x`.
* Switched compressors from `blosclz` to `lz4`. This yields orders-of-magnitude write speed improvements for long sequences of compressed data.
## New Functionality
* `purge!` returns to remove empty and duplicate channels
* `resample!` does what the name suggests. Note that this is both (much) slower and more memory-intensive than a typical decimate operation
## Consistency, Performance, Bug Fixes
* The **+** and **\*** operators on objects of type T<: GphysData now obey
basic properties of arithmetic:
- commutativity: `S1 + S2 = S2 + S1`
- associativity: `(S1 + S2) + S3 = S1 + (S2 + S3)`
- distributivity: `(S1*S3 + S2*S3) = (S1+S2)*S3`
* `merge!`
- improved speed and memory efficiency
- duplicates of channels are now removed
- duplicates of windows within channels are now removed
- corrected handling of two (previously breaking) end-member cases:
+ data windows within a channel not in chronological order
+ sequential one-sample time windows in a channel
* `mseis!` now accepts EventTraceData and EventChannel objects
* `get_data`
+ now uses new keyword `KW.prune` to determine whether or not to remove empty
channels from partially-successful data requests
+ now calls `prune!` instead `merge!` after new downloads
+ no longer throws warnings if removing an empty channel because its data
were unavailable
* `sort!` has been extended to objects of type T<: GphysData
* FDSN web queries, including `get_data`, now work correctly with the Northern California Seismic Network.
* `sizeof(S)` should now accurately return the total size of (all data and fields) in each custom Type
* `isempty` is now defined for all Types
* fixed incremental subrequests in long `get_data` requests.
* eliminated the possibility of a (never-seen, but theoretically possible) duplicate sample error in multiday `get_data` requests.
* `get_data` no longer treats regional searches and instrument selectors as mutually exclusive.
* SeisIO keyword `nd` (number of days per subrequest) is now type `Real` (was: `Int`).
* shortened SeisIO keyword `xml_file` to `xf` because I'm *that* lazy about typing. y do u ax
* `writesac`:
- once again stores channel IDs correctly
- now sets begin time (SAC `b`) from SeisChannel/SeisData `:t`, rather than to 0.0. Channel start and end times using `writesac` should now be identical to `wseis` and `write`.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 2861 | SeisIO v0.4.1
2019-10-13
Primarily a bug fix release for v0.4.0, with the addition of write support for FDSN station XML files and the ASDF file format.
## New Writers
* Added `write_hdf5` to allow writing complete structures to seismic HDF5 files. Currently only supports ASDF; PH5 may be added later.
* Added `write_sxml` to create station XML from SeisData headers.
+ Note: output is valid FDSN station XML 1.1 but the IRIS validator may issue up to two warnings per channel inconsistently; see stationxml-validator issue [78](https://github.com/iris-edu/stationxml-validator/issues/78) for details.
## Consistency, Performance, Bug Fixes
* Added a file reader for SLIST (ASCII sample list) (`read_data("slist", ...)`), since I have readers for two SLIST variants (lennartzascii and geocsv.slist)...
* `resample` will no longer throw an error if the desired sampling frequency equals `:fs` for the largest group of segments. Fixed `resample` docstrings.
* `SEED.mseed_support()` and `SEED.seed_support()` now output some text; users don't need to check their respective help files.
* Added kw `autoname` to `get_data`; see documentation for functionality and behavior. Implements request in issue #24.
* Discovered/fixed a rare mini-SEED bug wherein the last packet of a request containing unencoded data could throw a BoundsError if padded with empty bytes. * The memory footprint of SeisIO has been reduced by moving most large files to https://github.com/jpjones76/SeisIO-TestData. SeisIO now requires ~3 MB rather than ~300 MB.
+ The development version is somewhat unwieldy due to the commit history; without test files, it's around 500 MB. This can be safely pruned with BFG Repo-Cleaner with a file size threshold of 50k.
+ Test data now have their own repository. They're downloaded automatically by `tests/runtests.jl` when the script is first invoked, but `runtests.jl` now requires a Subversion command-line client to run.
* `read_data("seisio", ...)` now works as a wrapper to `rseis`
+ Note: this is a convenience wrapper and lacks the full functionality of `rseis`. When reading a SeisIO file that contains multiple objects, `read_data("seisio", ...)` reads only the first object in each file that can be converted to a SeisData structure.
* User agent settings are now standardized and should no longer cause error 500 on FDSN servers in California.
* `get_data` now warns when requesting a (non-miniseed) format from FDSN dataselect servers that don't implement the `format` keyword.
### SeedLink renamed and fixed
* Functions `SeedLink` and `SeedLink!` have been renamed to lowercase (they're now `seedlink` and `seedlink!`) because `SeedLink` was too easily mistaken for a custom Type.
* Fixed an issue that broke SeedLink connectons in v0.4.0; SeedLink connections once again process buffered data without manual intervention.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 8667 | SeisIO v0.4.0: Respocalypse Now
2019-09-22
# Instrument Response Improvements
SeisIO can now use full (multi-stage) instrument responses or simplified
(pole-zero) instrument responses.
* Added two new Instrument Response (`:resp`) types: `CoeffResp` and
`MultiStageResp`. These allow full descriptions of multi-stage instrument
responses in FDSN station XML.
+ Full multi-stage instrument responses can be requested by passing keyword
`msr=true` to appropriate funcitons.
* Instrument response Types `PZResp` and `PZResp64` have changed in two ways:
+ Field `:c` renamed to `:a0` for consistency with FDSN and SEED
+ Field `:f0` added as a container for the frequency at which `:a0` is applied.
## New/Changed
* `fctoresp(f,c)`: generate a new instrument response from lower corner
frequency `f` and damping constant `c`. If no damping constant is specified,
assumes `c = 1/√2`.
* `remove_resp!(S)` remove (flatten to DC) the frequency responses of seismic
channels in a GphysData object.
* `resptofc(R)`: attempt to guess critical frequency `:f0` from poles and zeros
of a PZResp or PZResp64 object.
* `translate_resp!(S, R)`: translate response of seismic data channels
in GphysData object S to response R.
+ `translate_resp` works on channels with `MultiStageResp` responses, with
two caveats:
1. Only the *first* stage of the response changes.
1. The first stage must be a PZResp or PZResp64 object for the response to
be translated.
### Removed
* `equalize_resp!` (replaced by `translate_resp!`)
* `fctopz` (replaced by `fctoresp`)
* `SeisIO.resp_f` (deprecated)
# Expanded Read Support
With the addition of the readers below, SeisIO read support now covers many
data formats with a handful of simple wrapper functions:
## `read_data, read_data!`
Wrapper for reading entire data files into memory.
| Format | String |
| :--- | :--- |
| AH-1 | ah1 |
| AH-2 | ah2 |
| Bottle | bottle |
| GeoCSV, time-sample pair | geocsv |
| GeoCSV, sample list | geocsv.slist |
| Lennartz ASCII | lennartzascii |
| Mini-SEED | mseed |
| PASSCAL SEG Y | passcal |
| PC-SUDS | suds |
| SAC | sac |
| SEG Y (rev 0 or rev 1) | segy |
| UW datafile | uw |
| Win32 | win32 |
### Improvements
* `read_data("passcal", ..., swap=true)` reads big-endian PASSCAL SEG Y.
* `read_data` method extended to take either one or two Strings as arguments.
- If one String is passed to `read_data`, the string is treated as a file pattern; `guess` is called to determine the file format.
- If two Strings are passed, the first is treated as a file format String; the second is treated as a file pattern string.
+ Note: `read_data` is *much* faster when the file format String is supplied.
## `read_meta, read_meta!`
Wrapper for reading instrument metadata files into memory.
| Format | String |
| :--- | :--- |
| Dataless SEED | dataless |
| FDSN Station XML | sxml |
| SACPZ | sacpz |
| SEED RESP | resp |
## `read_hdf5, read_hdf5!`
Extract data from an HDF5 archive that uses a recognized seismic data format.
This works differently from `read_data` in that HDF5 archives are generally
large and contain data from multiple channels; they are scanned selectively
for data in a user-defined time window matching a user-specified ID pattern.
| Format | String |
| :--- | :--- |
| ASDF | asdf |
## `read_quake`
A wrapper to read discrete event data into a SeisEvent structure. Because
seismic event files are typically self-contained, this does not accept
wildcard file strings and has no "in-place" version.
| Format | String |
| :--- | :--- |
| QuakeML | qml |
| PC-SUDS event | suds |
| UW event | uw |
# Other Changes
## Documentation Improvements
* Public documentation of low-level file formats has been copied into docs/desc.
* CLI information on supported file formats can now be found in `SeisIO.formats`,
a dictionary accessed by format name (as given above).
## Processing on Download
`get_data` can now process requests after download by specifying keywords:
demean, detrend, rr (remove instrument response), taper, ungap, unscale.
+ There are no keywords in `get_data` to filter data or translate seismometer
responses to a non-flat curve; too many additional keywords would be needed.
## New Functionality
* `?RESP_wont_read` shows some common SEED RESP issues for problem files.
* `convert_seis!` converts seismograms in `S` to other units (m, m/s, m/s²) by differentiation or integration.
* `env!` efficiently computes the signal envelope by segment within each (regularly-sampled) channel.
* `get_file_ver` gets the version of a SeisIO native format file.
* `get_seis_channels(S)` returns numeric indices of channels in `S` whose instrument codes indicate seismic data.
* `guess` guesses data file format and endianness.
* `inst_code(C)` returns the instrument code of GphysChannel object `C`.
* `inst_code(S,i)` returns the instrument code of channel `i`.
* `inst_codes(S)` returns the instrument code of every channel in `S`.
* `resp_a0!(S)` updates the sensitivity `:a0` of PZResp/PZResp64 responses in GphysData object `S`, including PZResp/PZResp64 stages of type MultiStageResp responses. It can also be called on individual InstrumentResponse objects.
* `scan_hdf5` scans supported Seismic HDF5 formats and returns a list of strings describing the waveform contents.
* `set_file_ver` sets the version of a SeisIO native format file.
* `using SeisIO.SUDS; suds_support()` lists current SUDS support.
* `validate_units(S)` validates strings in `:units` to ensure UCUM compliance.
* `vucum(str)` validates UCUM units for `str`.
* `writesacpz(S)` writes instrument responses to SACPZ files.
## Consistency, Performance, Bug Fixes
* Adjusted user agent settings when connecting to FDSN servers in California.
* `get_data` now warns when requesting a (non-miniseed) format from FDSN
dataselect servers that don't implement the `format` keyword.
* Fixed a bug that could cause buffer size to degrade performance with some
file readers after reading long files.
* `FDSN_sta_xml` and functions that call it (e.g. `get_data`) now only spam
"overwrite" warnings at (user-specified verbosity) `v > 0`.
* Meta-data readers and parsers now strictly use `A0` from file, rather than
recalculating it under certain circumstances.
* Rewrote `SL_info`; performance should be greatly improved and it should no
longer cause timeout errors in automated tests.
* Fixed issue #20
* Fixed issue #19
* `tx_float` now always uses Float64 precision; Float32 lacks the resolution
to handle long time series.
* `detrend!` has seen many improvements:
+ now uses linear regression on gapless channels with `:fs > 0.0` && `n=1`,
yielding a ~12x speedup and >99% less memory use.
+ `detrend!(..., n=N)` now allows degree n=0, equivalent to `demean!`.
+ greatly increased accuracy at single precision.
* Most processing functions now accept a numeric channel list using keyword
`chans=` to restrict processing to certain channels.
+ The only exception to the above is `sync!`; `sync!` with channel restrictions
makes no sense.
* All functions that accept a numeric channel list as a keyword now call this
keyword `chans`; `chans` can be an Integer, UnitRange, or Array{Int64, 1}.
* Added `resample` as a "safe" (out-of-place) version of `resample!`
* Station XML handling has been rewritten, yielding 97% reduced memory use, 5.1x
speedup, and a workaround for the GeoNet server-side StationXML error (fixes issue #15)
* FDSNEvq now returns full event catalogs by default (fixes issue #16)
* Documentation updated (fixes issue #17)
* `writesac()` with a GphysChannel object now accepts keyword `fname` to set
the file name (issue #18)
+ When specifying `fname=FSTR`, if FSTR doesn't end with (case-insensitive)
".sac", the suffix ".sac" is appended to FSTR automatically.
* New PZResp/PZResp64 objects should now always have `:a0 = 1.0, :f0 = 1.0`,
regardless of initialization method.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 13127 | SeisIO v1.0.0: Publication release
2020-03-02
This release coincides with the first SeisIO publication: Jones, J.P., Okubo, K., Clements. T., & Denolle, M. (2020). SeisIO: a fast, efficient geophysical data architecture for the Julia language. The paper was accepted by Seismological Research Letters in January 2020.
Major feature expansions and code changes include:
* Standardization of automated logging
* Workaround for Julia thread locking
* Significant improvements to *read_data*
* Full support for ASDF (Advanced Seismic Data Format)
* Write support for StationXML and QuakeML
This release is designated SeisIO v1.0.0 because it's the first release that I feel meets all basic goals established when I created SeisIO.
# **File I/O and Thread Locking**
File I/O now avoids the thread-locking disease introduced in Julia v1.3.0. Most low-level I/O commands have been changed in the following ways:
```
eof ⟹ fasteof
position ⟹ fastpos
read ⟹ fastread
readbytes! ⟹ fast_readbytes!
seek ⟹ fastseek
seekend ⟹ fastseekend
skip ⟹ fastskip
```
In some cases these changes yield significant speed increases. Popular data formats may read slightly slower or faster (±3%), but any observed slowdown is likely due to better logging.
# **Logging Improvements**
Logging to *:notes* and *:src* has been broadened and standardized.
## *:src* field in SeisIO structures
* Reading metadata no longer overwrites *:src*.
* *:src* should now be set consistently with design goals by all functions.
## *:notes* field in SeisIO structures
* The field separator for a single note has changed to ` ¦ ` (including spaces); the old separator (`,`) was problematic.
* New data sources should always be logged accurately, allowing user to easily reproduce the acquisition commands.
* Automated notes now have at least three fields, including the timestamp.
* Time stamp format for notes is now `YYYY:MM:DDThh-mm-ss`.
* Time stamps now have a precision of seconds. Millisecond timestamps were unnecessary.
* Notes that log data sources now set the second field of the note to *+source*
* `tnote` output changed slightly.
* `note!` now extends to SeisHdr and SeisSrc objects in SeisIO.Quake
## *get_data* logging
Bad requests and unparseable formats are now logged to special output channels; see below.
## Other logging
* `read_meta` logs all reads to *:notes* in entries that contain "+meta" in the second field
* All writes are now logged to *:notes*:
* `writesac` now logs all writes to channels written.
* `wseis` now logs all writes to (all channels of) objects written.
* `write_hdf5` now logs all writes to all channels written.
## New functions for checking logs
* `show_processing` tabulates and prints processing steps in *:notes*
* `show_src` tabulates and prints data sources in *:notes*
* `show_writes` tabulates and prints all data writes in *:notes*
# **Improvements to** *read_data*
* A String array can now be passed as the file string pattern argument with method `read_data(fmt, filestr [, keywords])`. This functionality will eventually be expanded to `read_data(filestr [, keywords])`.
* All file formats can now be memory-mapped before reading by passing KW *memmap=true*. SeisIO benchmarks suggest that this affects some read speeds:
+ *Significant speedup*: ASCII formats, including metadata formats
+ *Slight speedup*: mini-SEED
+ *Significant slowdown*: SAC
+ Other data formats show trivial changes in read speed (at most ±3%).
+ Mmap.mmap signal handling is undocumented. Use with caution.
* When reading into an existing structure, stricter channel matching can now be enforced with KW *strict=true*. See below.
* ASCII data formats now always continue existing channels that match channel ID, rather than starting new ones. Fixes issue #35.
* Verbose source logging can be enabled with new KW *vl=true*. There are cases where this can log sources to *:notes* that are irrelevant to a channel; verbosity isn't always efficient.
* The format string to read Lennartz SLIST (ASCII) with *read_data* has changed from "lennasc" to "lennartz".
## Channel Extension
Existing data channels should now be extended on a successful channel ID match in all file formats. This is the intended behavior of SeisIO.
Previously, all formats except mini-SEED and ASDF created a new channel for every new file read, which required flattening with *merge!*. That was a clumsy work flow.
The new behavior standardizes the output because multiple files read with many *read_data!* calls should yield the same number of channels as passing all filenames to *read_data!* in an Array{String, 1} or String wildcard.
### Behavior with *strict=true*
* Does not work with GeoCSV or SUDS formats.
* Matches on *at least* both of *:id* & *:fs* in all other formats.
* In the best-case scenario, *read_data* matches on *:id*, *:fs*, *:gain*,*:loc*, *:resp*, & *:units*.
* See official documentation for fields matched in each file format.
# **ASDF/HDF5**
ASDF (Adaptable Seismic Data Format) is now fully supported. Specific changes from SeisIO v0.4.1 are below.
## *write_hdf5* Changes
+ KW *chans* supports writing only some channels from a structure.
+ calling *write_hdf5* on an existing file now appends data to the file, rather than recreating it.
+ users can now specify *add=true* to add trace data to a file while preserving existing traces in it. See documentation for details.
+ users can now specify *ovr=true* to overwrite existing trace data in a file.
See documentation for details.
+ irregularly-sampled channels are now skipped.
+ gapped channels are now written to file by segment.
+ KW *tag* allows user control over the tag in the trace name string.
- If unset, the channel name is used.
- Previously, the default tag was the channel name with a trailing underscore.
+ Includes a method for SeisEvent structures in submodule Quake.
## New/Changed ASDF Functions
* `asdf_qml` was renamed to `asdf_rqml` as it reads QuakeML from ASDF files.
+ `asdf_wqml` writes QuakeML to a new or existing ASDF file.
+ `read_asdf_evt` reads events from an ASDF archive and returns an array of SeisEvent structures.
+ `asdf_waux` is a thin wrapper to write to the AuxiliaryData group.
# **Other Changes**
## Documentation Improvements
* The tutorial has been updated and expanded to include more practice working with ASDF volumes and an optional tutorial for the Quake submodule.
* Several docstrings were greatly truncated to reduce scrolling.
* The Time developer guide now includes a full API and formal definitions.
* The File Formats developer guide now includes a full API.
## `examples.jl`
* Everything in *examples.jl* is now also part of the Data Acquisition tutorial.
* `using Pkg; Pkg.test("SeisIO")` now tells users where to find *examples.jl* at the end of a successful test set.
* no longer errors on the second SeedLink example when invoked outside the "tests" directory.
## Consistency, Performance, Bug Fixes
* `seedlink`, `seedlink!`
+ now requires MODE ("TIME", "FETCH", or "DATA") as the first string argument, for consistency with other web clients; it's no longer a keyword.
+ SeedLink keyword *u=* (base URL) now has a default value in SeisIO.KW.SL and
can be changed with `SeisIO.KW.SL.u=URL` for string *URL*.
* `Quake.get_pha!` docstring corrected.
* `read_meta` now accepts an *Array{String, 1}* for the file pattern argument.
* `read_sxml` is no longer exported; use *read_meta("sxml", ... )* instead.
* Functions that accept a verbosity level *v* now accept any Integer subtype, not just Int64.
* Fixed a minor bug that could cause some frequency-domain processing routines to allocate larger arrays than necessary.
* Fixed issue #30
* Fixed issue #28
* `endtime(t, fs)` once again behaves correctly for irregularly-sampled data.
* The docstring for *read_quake* is now correctly accessed with `?read_quake`.
* SeisSrc objects created with *randSeisEvent* now have *:pax* and *:planes* fields whose geometries are consistent with what *read_qml* produces.
* `rseis` now supports memory mapping with KW *mmap=true*.
* `convert_seis` now correctly errors when *units_out* is invalid.
### *get_data*
Bad requests are now saved to channels with special IDs.
* Channel IDs that begin with "XX.FAIL" contain bad requests, with the response message stored as a String in *:misc["msg"]*.
* Channel IDs that begin with "XX.FMT" contain unparseable data. The raw response bytes are stored in *:misc["raw"]* as an Array{UInt8,1}, and can be dumped to file or parsed with external programs as needed.
* Information about bad requests is logged to *:notes* in these special channels.
* *get_data* calls during automated tests now use a retry script when servers return no data. This should prevent most test errors due to server-side data availability.
### *merge!*
* Should work more consistently with *MultiStageResp* instrument responses.
* Now handles sample times more robustly in overlapping segments.
### *ungap!* and *gapfill!*
These low-level processing functions have been optimized for better memory usage. Additional specific changes:
* *ungap!* no longer breaks on negative subsample gaps of length -0.5 *Δ* ≤ *δt* < -1.0 *Δ* at sampling interval *Δ*. Fixes issue #29.
* The docstring of *ungap!* now explicitly warns to call *merge!* before *ungap!* if any channel has segments that aren't in chronological order.
* *ungap!* with a negative time gap now partly overwrites output with earlier
segments (corresponding to the time overlap).
### GeoCSV
+ Fixed a possible off-by-one error in two-column (tspair) GeoCSV channel start times
+ Fixed a possible off-by-one error in the last entry of *:t* when reading two-column (tspair) GeoCSV
### PASSCAL SEG Y
* Fixed a bug where PASSCAL SEG Y could store 1.0/gain as *:gain*.
* Fixed a minor bug with PASSCAL SEG Y channel names created from incomplete trace header fields.
### QuakeML
* `write_qml` writes and appends QuakeML.
* `read_qml`
+ The *:src* fields of *.hdr*, *.hdr.loc*, *.hdr.mag*, and *.source* now follow an internally consistent format.
+ Fixed a bug where standard error wasn't being parsed to *:loc.se*.
* Uninitialized array fields in SeisSrc structures created by this function now have empty fields, rather than fields filled with zeros, consistent with the method extension of `isempty()` to *SeisSrc* objects.
### SAC
* `writesac`
+ Now accepts a range of channels with keyword *chans=*.
+ Fixed a bug where some empty SAC character strings were written incorrectly
+ Now handles data gaps by writing one segment per file. The old behavior of assuming no gaps was undocumented and undesirable, albeit intended when the function was written.
+ KW *"xy=true"* deprecated
+ No longer writes irregular channels
+ No longer allows writing SAC x-y files
* `writesacpz`
+ Now accepts a channel list as a keyword argument, specified with *chans=*.
+ For consistency with other write methods, the required arguments have reversed positions; `writesacpz(S, fname)` is now `writesacpz(fname, S)`
#### Explanation of Changes
The intent of writing SAC x-y was to allow a convenient way to write irregular channels to file, with sample values in *y* and sample times in *x* measured relative to SAC header start time.
However, at time precision *δt*, representational problems begin at (t_max - t_min) > maxintfloat(Float32, Int64) [δt]. Yet maxintfloat(Float32, Int64) = 16,777,216.
This means that writing irregular data to SAC x-y either requires user-specified sampling accuracy (like ".01 s"), or uses native (μs) time differences; the former is a terrible workflow, and the latter loses faithful Float32 representation 16 s after data collection begins.
### SEG Y
Please note that these changes do not affect the PASSCAL SEG Y variant.
+ IBM-Float is now supported.
+ Station names are now set in channel IDs from more appropriate headers.
+ *:gain* is once again correctly set.
+ *:units* are now set from the appropriate trace header.
+ File streams should now always close before an unsupported data encoding throws an error.
### SUDS
* Comment structures are now only displayed to stdout at high verbosity. There is no good way to read them into SeisIO and no evidence anyone used them.
* Fixed a bug where reading a Struct 7 could sometimes set *:fs* incorrectly.
### StationXML
* Writing station XML now accepts a numeric channels specifier *C* with KW *chans=C*.
* Reduced memory overhead of *write_sxml* by ~80% with significant speedup.
### UW
* *readuwevt* and *read_quake("UW", ...)* now respect the value of *full* passed by the user.
* UW events with no external event ID in the pick file now set the event ID from the pick file name (if available) or data file name (if no pick file).
### Win32
Gaps between one-second blocks of data sampled at interval *Δ* are now only logged if *δt* > *1.5Δ*, rather than *δt* > *Δ*, consistent with other file formats.
### mini-SEED
Deprecated mini-SEED support for little-endian Steim compression for consistency with FDSN standards; resolves issue #33.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
|
[
"MIT"
] | 1.2.1 | 9cc5973b40a0f06030cbfc19dc1f79478488e546 | docs | 8709 | SeisIO v1.1.0
2020-08-27
SeisIO v1.1.0 adds submodule `SeisIO.Nodal` for nodal data, plus initial support for IRISPH5 requests. Other changes include fixes to several rare and/or minor bugs, code consistency improvements, a much-needed *sync!* rewrite, and documentation updates.
# 1. **Public API Changes**
## **SAC**
* SAC data files no longer track the LOC field of `:id`; thus, IDs `NN.SSSSS.LL.CCC` and `NN.SSSSS..CCC` will be written and read identically to/from SAC.
+ This change realigns SeisIO SAC handling with the [official format spec](http://ds.iris.edu/files/sac-manual/manual/file_format.html).
+ *Explanation*: Some data sources store the LOC field of `:id` in KHOLE (byte offset 464). We followed this convention through SeisIO v1.0.0. However, KHOLE is usually an event property.
## **SeedLink**
Functions *seedlink* and *seedlink!* now accept keyword *seq=* for starting sequence number, consistent with [SeisComp3 protocols](https://www.seiscomp3.org/doc/seattle/2012.279/apps/seedlink.html).
## **SEG Y**
A minor change to SEGY file support could break user work flows that depend on `read_data("segy", ..., full=true)`. Two keys have been renamed:
* `:misc["cdp"]` => `:misc["ensemble_no"]`
* `:misc["event_no"]` => `:misc["rec_no"]`
### New: `read_data("segy", ll=...)`
`read_data("segy", ...)` has a new keyword: `ll=` sets the two-character location field in `:id` (NNN.SSS.**LL**.CC), using values in the SEG Y trace header. Specify using UInt8 codes; see official documentation for codes and meanings.
## **IRISWS timeseries**
SeisIO has made some minor changes to `get_data("IRIS", ... )` behavior, due to server-side changes to IRISWS timeseries:
* The Stage 0 (scalar) gain of each channel is logged to `:notes` for fmt="sacbl" ("sac") and fmt="geocsv".
* The output of `get_data("IRIS", ... , w=true)` differs slightly from calling `read_data` on the files created by the same command:
+ This affects the fields `:misc` and `:notes`.
+ `:loc` will be set in objects read from SAC and GeoCSV files, but not mini-SEED.
+ Data in objects read from SAC or GeoCSV files will be scaled by the Stage 0 gain; fix this with `unscale!`.
## **New: .Nodal submodule**
Added SeisIO.Nodal for reading data files from nodal arrays
* New types:
+ NodalData <: GphysData
+ NodalChannel <: GphysChannel
+ NodalLoc <: InstrumentPosition
* Wrapper: `read_nodal(fmt, file, ...)`
+ Current file format support:
- Silixa TDMS ("silixa")
- Nodal SEG Y ("segy")
* Utility functions: `info_dump`
# 2. **Bug Fixes**
* Fixed a minor bug with SEG Y endianness when calling `guess`.
* `guess` now tests all six required SEG Y file header values, rather than five.
* SEED submodule support functions (e.g. `mseed_support()`) now correctly info dump to stdout
* *merge!* is now logged in a way that *show_processing* catches
* *read_qml* on an event with no magnitude element now yields an empty `:hdr.mag`
* *show* now reports true number of gaps when `:x` has a gap before the last sample
* Fixed two breaking bugs that were probably never seen in real data:
+ Extending a time matrix by appending a one-sample segment to `:x` can no longer break time handling; see changes to *t_extend* for fix details.
+ *write_hdf5* with *ovr=false* no longer overwrites a trace in an output volume when two sample windows have the same ID, start time string, and end time string; instead, the tag string is incremented.
- This previously happened when two or more segments from one channel started and ended within the same calendar second.
* The data processing functions *ungap!*, *taper!*, *env!*, *filtfilt!*, and *resample!* now correctly skip irregularly-sampled channels.
* Irregularly-sampled channels are no longer writable to ASDF, which, by design, does not handle irregularly-sampled data.
* ASDF groups and datasets are now always closed after reading with *read_hdf5*.
* In Julia v1.5+, calling `sizeof(R)` on an empty `MultiStageResp` object should no longer error.
## **SeisIO Test Scripts**
Fixed some rare bugs that could break automated tests.
* *test/TestHelpers/check_get_data.jl*: now uses a *try-catch* loop for *FDSNWS station* requests
* *tests/Processing/test_merge.jl*: testing *xtmerge!* no longer allows total timespan *δt > typemax(Int64)*
* *tests/Quake/test_fdsn.jl*: KW *src="all"* is no longer tested; too long, too much of a timeout risk
* *tests/Web/test_fdsn.jl*: bad request channels are deleted before checking file write accuracy
* Tests now handle time and data comparison of re-read data more robustly
# 3. **Consistency, Performance**
* The field `:x` of GphysData and GphysChannel objects now accepts either `AbstractArray{Float64, 1}` or `AbstractArray{Float32, 1}`.
* *get_data* with *w=true* now logs the raw download write to *:notes*
* *show* now identifies times in irregular data as "vals", not "gaps"
* *show_writes* now prints the filename in addition to the write operation
* *write_qml* now:
- writes `:hdr.loc.typ` to *Event/Origin/type*
- writes `:hdr.loc.npol` to *Event/focalMechanism/stationPolarityCount*
- has a method for direct write of *SeisEvent* structures
* *write_sxml* now works with all GphysData subtypes
* *read_data* now uses verbosity for formats "slist" and "lennartz"
* *get_data("IRIS", ...)* now accepts `fmt="sac"` as an alias to `fmt="sacbl"`
* *sync!* has been rewritten based on @tclements suggestions (Issue #31). Notable changes:
* Much less memory use
* Much faster; ~6x speedup on tests with 3 channels of length ~10^7 samples
* More robust handling of unusual time matrices (e.g., segments out of order)
* *SeisIO.RandSeis* functions have been optimized.
+ *randSeisChannel* has two new keywords: *fs_min*, *fc*
+ *randSeisData* has two new keywords: *fs_min*, *a0*
# 4. **Developer API Changes**
* Internal function `SeisIO.dtr!` now accepts `::AbstractArray{T,1}` in first positional argument.
* Most internal functions have switched from keywords to positional arguments. This includes:
* SeisIO: `FDSN_sta_xml` , `FDSNget!` , `IRISget!` , `fdsn_chp` , `irisws` , `parse_charr` , `parse_chstr` , `read_station_xml!` , `read_sxml` , `sxml_mergehdr!` , `trid`
* SeisIO.RandSeis: `populate_arr!`, `populate_chan!`
* SeisIO.SeisHDF: `write_asdf` (note: doesn't affect `write_hdf5`)
* *t_extend* is now more robust and no longer needs a mini-API
+ previously, some rare cases of time matrix extension could break. They were likely never present in real data -- e.g., a time matrix with a gap before the last sample would break when extended by another sample -- but these "end-member" cases were theoretically possible.
+ the rewrite covers and tests all possible cases of time matrix extension.
* *check_for_gap!* is now a thin wrapper to *t_extend*, ensuring uniform behavior.
* Internal functions in *SeisIO.RandSeis* have changed significantly.
# 5. **Documentation**
* [Official documentation](https://seisio.readthedocs.io/) updated
* Many docstrings have been updated and standardized. Notable examples:
+ *?timespec* is now *?TimeSpec*
+ *?chanspec* is now *?web_chanspec*
+ *?taper* once again exists
+ *?seedlink* keywords table is once again current
+ *SeisIO.Quake*:
- *?EventChannel* now produces a docstring, rather than an error
- *?get_pha!* once again describes the correct function
* Updated and expanded the Jupyter tutorial
* Updated and expanded the time API
# **Index**: GitHub Issues Fixed
* #31 : *sync!* rewritten.
* #39 : tutorial updated.
* #42 : mini-SEED calibration blockettes of this type parse correctly again.
* #43 : reading Steim-compressed mini-SEED into an existing channel with an empty Float64 data vector should no longer error. `get_data` should no longer error when the first part of a segmented request includes no data from some channels.
* #44 : documentation updated.
* #45 : documentation updated.
* #48 : mini-SEED Blockette 100 handling should now match the IRIS mini-SEED C library.
* #49 : the read speed slowdown in HDF5.jl was fixed; SeisIO no longer requires HDF5 v0.12.3.
* #50 : `resample!` now consistently allows upsampling.
* #51 : `resample!` now correctly updates `:t` of a gapless SeisChannel.
* #52 : merged PR #53, adding initial support for IRISPH5 requests
* #54 : `SeisIO.dtr!` now accepts `::AbstractArray{T,1}` in first positional argument
* #55 : added `read_nodal("segy")`
* #56 : implemented as part of initial SeisIO.Nodal release
* #57 : fixed by addition of KW `ll=` to `read_data`
**Note**: v1.1.0 was originally released 2020-07-07. Re-releasing, rather than incrementing to v1.2.0, fixes an issue that prevented Julia from automatically registering v1.1.0.
| SeisIO | https://github.com/jpjones76/SeisIO.jl.git |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.