[seiscomp, scanloc] Install, add .gitignore

This commit is contained in:
2025-10-09 15:07:02 +02:00
commit 20f5301bb1
2848 changed files with 1315858 additions and 0 deletions

3
.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
share/maps
var

BIN
bin/Hypo71PC Executable file

Binary file not shown.

82
bin/arclink2inv Executable file
View File

@ -0,0 +1,82 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import seiscomp.datamodel
import seiscomp.io
import getopt
import sys
usage = """arclink2inv [options] input=stdin output=stdout
Options:
-h [ --help ] Produce help message
-f [ --formatted ] Enable formatted XML output
"""
def main(argv):
imp = seiscomp.io.Importer.Create("arclink")
if imp is None:
sys.stderr.write("Arclink import not available\n")
return 1
formatted = False
# parse command line options
try:
opts, args = getopt.getopt(argv[1:], "hf", ["help", "formatted"])
except getopt.error as msg:
sys.stderr.write(f"{msg}\n")
sys.stderr.write("for help use --help\n")
return 1
for o, a in opts:
if o in ["-h", "--help"]:
sys.stderr.write(f"{usage}\n")
return 1
elif o in ["-f", "--formatted"]:
formatted = True
argv = args
if len(argv) > 0:
o = imp.read(argv[0])
else:
o = imp.read("-")
inv = seiscomp.datamodel.Inventory.Cast(o)
if inv is None:
sys.stderr.write("No inventory found\n")
return 1
ar = seiscomp.io.XMLArchive()
if len(argv) > 1:
res = ar.create(argv[1])
else:
res = ar.create("-")
if not res:
sys.stderr.write("Failed to open output\n")
return 1
ar.setFormattedOutput(formatted)
ar.writeObject(inv)
ar.close()
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))

26
bin/bindings2cfg Executable file
View File

@ -0,0 +1,26 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) gempa GmbH #
# All rights reserved. #
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# Other Usage #
# Alternatively, this file may be used in accordance with the terms and #
# conditions contained in a signed written agreement between you and #
# gempa GmbH. #
############################################################################
import seiscomp.bindings2cfg
import sys
sys.exit(seiscomp.bindings2cfg.main())

BIN
bin/dlsv2inv Executable file

Binary file not shown.

764
bin/dump_picks Executable file
View File

@ -0,0 +1,764 @@
#!/usr/bin/env seiscomp-python
############################################################################
# Copyright (C) 2016 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
# #
# Author: Enrico Ellguth, Dirk Roessler #
# Email: enrico.ellguth@gempa.de, roessler@gempa.de #
############################################################################
import datetime
import os
import sys
from seiscomp import core, datamodel, io
from seiscomp.client import Application
from seiscomp import geo
def str2time(timestring):
"""
Liberally accept many time string formats and convert them to a
seiscomp.core.Time
"""
timestring = timestring.strip()
for c in ["-", "/", ":", "T", "Z"]:
timestring = timestring.replace(c, " ")
timestring = timestring.split()
assert 3 <= len(timestring) <= 6
timestring.extend((6 - len(timestring)) * ["0"])
timestring = " ".join(timestring)
fmt = "%Y %m %d %H %M %S"
if timestring.find(".") != -1:
fmt += ".%f"
t = core.Time()
t.fromString(timestring, fmt)
return t
def utc():
return datetime.datetime.now(datetime.timezone.utc).replace(tzinfo=None)
class DumpPicks(Application):
def __init__(self, argc, argv):
Application.__init__(self, argc, argv)
self.output = "-"
self.type = "0"
self.margin = [300]
self.originID = None
self.bbox = None
self.noamp = False
self.automatic = False
self.manual = False
self.checkInventory = False
self.author = None
self.hours = None
self.minutes = None
self.start = None
self.end = None
self.network = None
self.station = None
self.tmin = str2time("1970-01-01 00:00:00")
self.tmax = str2time(str(utc()))
self.delay = None
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, True)
def createCommandLineDescription(self):
self.commandline().addGroup("Dump")
self.commandline().addStringOption(
"Dump",
"hours",
"Start search hours before now considering object time, not creation time. "
"If --minutes is given as well they will be added. "
"If set, --time-window, --start, --end are ignored.",
)
self.commandline().addStringOption(
"Dump",
"minutes",
"Start search minutes before now considering object time, not creation time. "
"If --hours is given as well they will be added. "
"If set, --time-window, --start, --end are ignored.",
)
self.commandline().addStringOption(
"Dump",
"start",
"Start time of search until now considering object time, not creation time."
" If set, --time-window is ignored.",
)
self.commandline().addStringOption(
"Dump",
"end",
"End time of search considering object time, not creation time. If set, "
"--time-window is ignored.",
)
self.commandline().addStringOption(
"Dump",
"time-window,t",
"Specify time window to search picks and amplitudes by their time. Use one "
"single string which must be enclosed by quotes in case of spaces in the "
"time string. Times are of course in UTC and separated by a tilde '~'. "
"Uses: 1970-01-01 00:00:00 to now if not set.",
)
self.commandline().addStringOption(
"Dump",
"maximum-delay",
"Maximum allowed delay of picks or amplitudes, hence the difference between"
" creation time and actual time value. Allows identifcation of picks found "
"in real time.",
)
self.commandline().addStringOption(
"Dump",
"region,r",
"Dump picks only from sensors in given region. Implies loading an "
"inventory.\n"
"Format: minLat,minLon,maxLat,maxLon \n"
"Default: -90,-180,90,180 if not set.",
)
self.commandline().addOption(
"Dump",
"check-inventory,c",
"Dump picks only when corresponding streams are found in inventory.",
)
self.commandline().addStringOption(
"Dump",
"origin,O",
"Origin ID. Dump all "
"picks associated with the origin that has the given origin ID.",
)
self.commandline().addOption("Dump", "manual,m", "Dump only manual picks.")
self.commandline().addOption(
"Dump", "automatic,a", "Dump only automatic picks."
)
self.commandline().addOption(
"Dump",
"no-amp,n",
"Do not dump amplitudes from picks. "
"Amplitudes are not required by scanloc.",
)
self.commandline().addStringOption(
"Dump", "author", "Filter picks by the given author."
)
self.commandline().addStringOption(
"Dump",
"net-sta",
"Filter picks and amplitudes by given network code or "
"network and station code. Format: NET or NET.STA.",
)
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output",
"output,o",
"Name of output file. If not given, all data is written to stdout.",
)
self.commandline().addStringOption(
"Output",
"type",
f"Type of output format. Default: {self.type}.\n"
"0 / scml: SCML containing all objects (default if option is not used)\n"
"1 / streams: Time windows and streams for all picks like in scevtstreams\n"
"2 / caps: Time windows and streams in capstool format\n"
"3 / fdsnws: Time windows and streams in FDSN dataselect webservice POST \
format\n"
"Except for type 0, only picks are considered ignoring all other objects.",
)
self.commandline().addOption(
"Output",
"formatted,f",
"Output formatted XML. Default is unformatted. Applies only for type 0.",
)
self.commandline().addStringOption(
"Output",
"margin",
"Time margin applied around pick times along with --type = [1:]. Use 2 "
"comma-separted values (before,after) for asymmetric margins, e.g. "
f"--margin 120,300. Default: {self.margin[0]} s.",
)
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]
Read picks and amplitudes from database and dump them to a file or to standard output.\
"""
)
Application.printUsage(self)
print(
f"""Examples:
Dump all picks within a region and a period of time
{os.path.basename(__file__)} -d localhost -t 2023-01-20T13:52:00~2023-01-20T13:57:00\
-r "-10,-90,10,120"
Search 24 hours before now for automatic picks from author "scautopick" with low delay \
ignoring amplitudes
{os.path.basename(__file__)} -d localhost --hours 24 -a -n --author "scautopick" \
--maximum-delay 60
Dump the streams of picks with time windows fetching the corresponding data from a \
local CAPS server
{os.path.basename(__file__)} -d localhost --type 2 --margin 60 | capstool \
-H localhost -o data.mseed
Dump the streams of picks with time windows fetching the corresponding data from a \
local SDS archive
{os.path.basename(__file__)} -d localhost --type 1 --margin 60 | scart -dsE -l - \
/archive -o data.mseed
"""
)
def init(self):
if not Application.init(self):
return False
try:
self.output = self.commandline().optionString("output")
except RuntimeError:
pass
try:
self.type = self.commandline().optionString("type")
except RuntimeError:
pass
if self.type == "scml":
self.type = "0"
elif self.type == "streams":
self.type = "1"
elif self.type == "caps":
self.type = "2"
elif self.type == "fdsnws":
self.type = "3"
try:
self.margin = self.commandline().optionString("margin").split(",")
except RuntimeError:
pass
try:
self.originID = self.commandline().optionString("origin")
except RuntimeError:
pass
if not self.originID:
try:
boundingBox = self.commandline().optionString("region")
self.bbox = boundingBox.split(",")
if len(self.bbox) != 4:
print(
"Invalid region given, expected lat0,lon0,lat1,lon1",
file=sys.stderr,
)
return False
self.bbox[0] = str(geo.GeoCoordinate.normalizeLat(float(self.bbox[0])))
self.bbox[1] = str(geo.GeoCoordinate.normalizeLon(float(self.bbox[1])))
self.bbox[2] = str(geo.GeoCoordinate.normalizeLat(float(self.bbox[2])))
self.bbox[3] = str(geo.GeoCoordinate.normalizeLon(float(self.bbox[3])))
self.checkInventory = True
except RuntimeError:
boundingBox = "-90,-180,90,180"
self.bbox = boundingBox.split(",")
print("Settings", file=sys.stderr)
print(
f" + considered region: {self.bbox[0]} - {self.bbox[2]} deg North, "
f"{self.bbox[1]} - {self.bbox[3]} deg East",
file=sys.stderr,
)
try:
self.hours = float(self.commandline().optionString("hours"))
except RuntimeError:
pass
try:
self.minutes = float(self.commandline().optionString("minutes"))
except RuntimeError:
pass
try:
self.start = self.commandline().optionString("start")
except RuntimeError:
pass
try:
self.end = self.commandline().optionString("end")
except RuntimeError:
pass
delta = 0.0
if self.hours:
delta = self.hours * 60
if self.minutes:
delta += self.minutes
if self.hours or self.minutes:
print(
" + time window set by hours and/or minutes option: ignoring all "
"other time parameters",
file=sys.stderr,
)
dt = datetime.timedelta(minutes=delta)
self.tmin = str2time(str(utc() - dt))
self.tmax = str2time(str(utc()))
self.start = None
self.end = None
else:
if self.start:
print(
" + time window set by start option: ignoring --time-window",
file=sys.stderr,
)
self.tmin = str2time(self.start)
if self.end:
print(
" + time window set by end option: ignoring --time-window",
file=sys.stderr,
)
self.tmax = str2time(self.end)
if not self.start and not self.end:
try:
self.tmin, self.tmax = map(
str2time,
self.commandline().optionString("time-window").split("~"),
)
print(
" + time window set by time-window option", file=sys.stderr
)
except RuntimeError:
print(
" + no time window given exlicitly: Assuming defaults",
file=sys.stderr,
)
print(
f" + considered time window: {str(self.tmin)} - {str(self.tmax)}",
file=sys.stderr,
)
else:
print(
" + searching for picks is based on originID, ignoring "
"region and time window",
file=sys.stderr,
)
try:
self.delay = float(self.commandline().optionString("maximum-delay"))
except RuntimeError:
pass
if not self.checkInventory:
self.checkInventory = self.commandline().hasOption("check-inventory")
if self.checkInventory:
print(
" + dumping only picks for streams found in inventory", file=sys.stderr
)
else:
print(
" + do not consider inventory information for dumping picks",
file=sys.stderr,
)
if self.commandline().hasOption("no-amp"):
self.noamp = True
else:
self.noamp = False
if self.type != "0":
self.noamp = True
if self.noamp:
print(" + dumping picks without amplitudes", file=sys.stderr)
else:
print(" + dumping picks with amplitudes", file=sys.stderr)
if self.commandline().hasOption("manual"):
self.manual = True
print(" + dumping only manual objects", file=sys.stderr)
else:
self.manual = False
print(" + considering also manual objects", file=sys.stderr)
if self.commandline().hasOption("automatic"):
if not self.manual:
self.automatic = True
print(" + dumping only automatic picks", file=sys.stderr)
else:
print(
"EXIT - Script was started with competing options -a and -m",
file=sys.stderr,
)
return False
else:
self.automatic = False
print(" + considering also automatic objects", file=sys.stderr)
try:
self.author = self.commandline().optionString("author")
except RuntimeError:
pass
networkStation = None
try:
networkStation = self.commandline().optionString("net-sta")
print(
f" + filter objects by network / station code: {networkStation}",
file=sys.stderr,
)
except RuntimeError:
pass
if networkStation:
try:
self.network = networkStation.split(".")[0]
except IndexError:
print(
f"Error in network code '{networkStation}': Use '--net-sta' with "
"format NET or NET.STA",
file=sys.stderr,
)
return False
try:
self.station = networkStation.split(".")[1]
except IndexError:
print(
f" + no station code given in '--net-sta {networkStation}' - "
"using all stations from network",
file=sys.stderr,
)
return True
def run(self):
db = self.database()
def _T(name):
return db.convertColumnName(name)
def _time(time):
return db.timeToString(time)
colLat, colLon = _T("latitude"), _T("longitude")
dbq = self.query()
ep = datamodel.EventParameters()
picks = []
noAmps = 0
if self.originID:
for p in dbq.getPicks(self.originID):
picks.append(datamodel.Pick.Cast(p))
for p in picks:
dbq.loadComments(p)
ep.add(p)
if not self.noamp:
for a in dbq.getAmplitudesForOrigin(self.originID):
amp = datamodel.Amplitude.Cast(a)
ep.add(amp)
else:
fmt = "%Y-%m-%d %H:%M:%S"
if self.checkInventory:
q = (
"select distinct(PPick.%s), Pick.* "
"from PublicObject as PPick, Pick, Network, Station, SensorLocation "
"where PPick._oid=Pick._oid and Network._oid=Station._parent_oid and "
"Station._oid=SensorLocation._parent_oid and Station.%s >= %s and "
"Station.%s <= %s and Station.%s >= %s and Station.%s <= %s and "
"SensorLocation.%s=Pick.%s and SensorLocation.%s <= Pick.%s and "
"(SensorLocation.%s is null or SensorLocation.%s > Pick.%s) and "
"Station.%s=Pick.%s and Network.%s=Pick.%s and "
"Pick.%s >= '%s' and Pick.%s < '%s'"
""
% (
_T("publicID"),
colLat,
self.bbox[0],
colLat,
self.bbox[2],
colLon,
self.bbox[1],
colLon,
self.bbox[3],
_T("code"),
_T("waveformID_locationCode"),
_T("start"),
_T("time_value"),
_T("end"),
_T("end"),
_T("time_value"),
_T("code"),
_T("waveformID_stationCode"),
_T("code"),
_T("waveformID_networkCode"),
_T("time_value"),
self.tmin.toString(fmt),
_T("time_value"),
self.tmax.toString(fmt),
)
)
else:
q = (
"select distinct(PPick.%s), Pick.* "
"from PublicObject as PPick, Pick "
"where PPick._oid=Pick._oid and "
"Pick.%s >= '%s' and Pick.%s < '%s'"
""
% (
_T("publicID"),
_T("time_value"),
self.tmin.toString(fmt),
_T("time_value"),
self.tmax.toString(fmt),
)
)
if self.manual:
q = q + f" and Pick.{_T('evaluationMode')} = 'manual' "
if self.automatic:
q = q + f" and Pick.{_T('evaluationMode')} = 'automatic' "
if self.author:
q = q + f" and Pick.{_T('creationInfo_author')} = '{self.author}' "
if self.network:
q = q + f" and Pick.{_T('waveformID_networkCode')} = '{self.network}' "
if self.station:
q = q + f" and Pick.{_T('waveformID_stationCode')} = '{self.station}' "
for p in dbq.getObjectIterator(q, datamodel.Pick.TypeInfo()):
pick = datamodel.Pick.Cast(p)
if (
self.delay
and float(pick.creationInfo().creationTime() - pick.time().value())
> self.delay
):
continue
picks.append(pick)
for p in picks:
dbq.loadComments(p)
ep.add(p)
if not self.noamp:
if self.checkInventory:
q = (
"select distinct(PAmplitude.%s), Amplitude.* "
"from PublicObject as PAmplitude, Amplitude, PublicObject \
as PPick, Pick, Network, Station, SensorLocation "
"where PAmplitude._oid=Amplitude._oid and "
"PPick._oid=Pick._oid and Network._oid=Station._parent_oid and "
"Station._oid=SensorLocation._parent_oid and Station.%s >= %s and "
"Station.%s <= %s and Station.%s >= %s and Station.%s <= %s and "
"SensorLocation.%s=Pick.%s and SensorLocation.%s <= Pick.%s and "
"(SensorLocation.%s is null or SensorLocation.%s > Pick.%s) and "
"Station.%s=Pick.%s and Network.%s=Pick.%s and "
"Pick.%s >= '%s' and Pick.%s < '%s' and PPick.%s=Amplitude.%s"
""
% (
_T("publicID"),
colLat,
self.bbox[0],
colLat,
self.bbox[2],
colLon,
self.bbox[1],
colLon,
self.bbox[3],
_T("code"),
_T("waveformID_locationCode"),
_T("start"),
_T("time_value"),
_T("end"),
_T("end"),
_T("time_value"),
_T("code"),
_T("waveformID_stationCode"),
_T("code"),
_T("waveformID_networkCode"),
_T("time_value"),
self.tmin.toString(fmt),
_T("time_value"),
self.tmax.toString(fmt),
_T("publicID"),
_T("pickID"),
)
)
else:
q = (
"select distinct(PAmplitude.%s), Amplitude.* "
"from PublicObject as PAmplitude, Amplitude, PublicObject as PPick, Pick "
"where PAmplitude._oid=Amplitude._oid and PPick._oid=Pick._oid and "
"Pick.%s >= '%s' and Pick.%s < '%s' and PPick.%s=Amplitude.%s"
""
% (
_T("publicID"),
_T("time_value"),
self.tmin.toString(fmt),
_T("time_value"),
self.tmax.toString(fmt),
_T("publicID"),
_T("pickID"),
)
)
if self.manual:
q = q + f" and Pick.{_T('evaluationMode')} = 'manual' "
if self.automatic:
q = q + f" and Pick.{_T('evaluationMode')} = 'automatic' "
if self.author:
q = q + f" and Pick.{_T('creationInfo_author')} = '{self.author}' "
if self.network:
q = q + " and Pick.%s = '%s' " % (
_T("waveformID_networkCode"),
self.network,
)
if self.station:
q = q + " and Pick.%s = '%s' " % (
_T("waveformID_stationCode"),
self.station,
)
for a in dbq.getObjectIterator(q, datamodel.Amplitude.TypeInfo()):
amp = datamodel.Amplitude.Cast(a)
if (
self.delay
and float(
amp.creationInfo().creationTime()
- amp.timeWindow().reference()
)
> self.delay
):
continue
ep.add(amp)
noAmps += 1
if self.type == "0":
ar = io.XMLArchive()
ar.create(self.output)
ar.setFormattedOutput(self.commandline().hasOption("formatted"))
ar.writeObject(ep)
ar.close()
elif self.type in ["1", "2", "3"]:
if len(picks) == 0:
print(
"No picks are found and written",
file=sys.stderr,
)
return False
# convert times to string depending on requested output format
# time and line format
if self.type == "2":
timeFMT = "%Y,%m,%d,%H,%M,%S"
lineFMT = "{0} {1} {2} {3} {4} {5}"
elif self.type == "3":
timeFMT = "%FT%T"
lineFMT = "{2} {3} {4} {5} {0} {1}"
else:
timeFMT = "%F %T"
lineFMT = "{0};{1};{2}.{3}.{4}.{5}"
lines = set()
for pick in picks:
net = pick.waveformID().networkCode()
station = pick.waveformID().stationCode()
loc = pick.waveformID().locationCode()
channelGroup = f"{pick.waveformID().channelCode()[:2]}*"
# FDSNWS requires empty location to be encoded by 2 dashes
if not loc and self.type == "3":
loc = "--"
# add some marging to picks times
minTime = pick.time().value() - core.TimeSpan(float(self.margin[0]))
maxTime = pick.time().value() + core.TimeSpan(float(self.margin[-1]))
minTime = minTime.toString(timeFMT)
maxTime = maxTime.toString(timeFMT)
lines.add(
lineFMT.format(minTime, maxTime, net, station, loc, channelGroup)
)
if self.output == "-":
out = sys.stdout
else:
print(f"Output data to file: {self.output}", file=sys.stderr)
try:
out = open(self.output, "w", encoding="utf8")
except Exception:
print("Cannot create output file '{self.output}'", file=sys.stderr)
return False
for line in sorted(lines):
print(line, file=out)
if self.output != "-":
out.close()
else:
print(
f"Unspupported output format '{self.type}': No objects are written",
file=sys.stderr,
)
return False
print(
f"Saved: {len(picks):d} picks, {noAmps:d} amplitudes",
file=sys.stderr,
)
return True
def main(argv):
app = DumpPicks(len(argv), argv)
return app()
if __name__ == "__main__":
sys.exit(main(sys.argv))

BIN
bin/ew2sc Executable file

Binary file not shown.

28
bin/extr_file Executable file
View File

@ -0,0 +1,28 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
import sys
from seiscomp import mseedlite as mseed
open_files = {}
if len(sys.argv) != 2:
print("Usage: extr_file FILE")
sys.exit(1)
for rec in mseed.Input(open(sys.argv[1], "rb")):
oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
if oname not in open_files:
postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year,
rec.begin_time.timetuple()[7], rec.begin_time.hour,
rec.begin_time.minute)
open_files[oname] = open(oname + postfix, "ab")
ofile = open_files[oname]
ofile.write(rec.header + rec.data)
for oname in open_files:
open_files[oname].close()

1620
bin/fdsnws Executable file

File diff suppressed because it is too large Load Diff

BIN
bin/fdsnxml2inv Executable file

Binary file not shown.

260
bin/gempa-check-database Executable file
View File

@ -0,0 +1,260 @@
#!/usr/bin/env seiscomp-python
############################################################################
# Copyright (C) 2021 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
# #
# Author: Stephan Herrnkind #
# Email: herrnkind@gempa.de #
############################################################################
import os
import sys
from collections import OrderedDict
from seiscomp import client, logging
def writeUpdateStatements(database, tables, charset):
filename = f"/tmp/update-mysql-charset-{charset}-{database}.sql"
with open(filename, "w", encoding="utf8") as f:
print(
f"ALTER DATABASE `{database}` "
f"CHARACTER SET {charset} COLLATE {charset}_bin;",
file=f,
)
for table in tables:
print(
f"ALTER TABLE `{database}`.`{table}` "
f"CONVERT TO CHARACTER SET {charset} COLLATE {charset}_bin;",
file=f,
)
print("", file=f)
for table in tables:
print(f"ANALYZE TABLE `{database}`.`{table}`;", file=f)
return filename
def checkBinaryCollation(charset, collation):
return collation == f"{charset}_bin"
class CheckDatabase(client.Application):
def __init__(self, argc, argv):
super().__init__(argc, argv)
self.setDaemonEnabled(False)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setConnectionRetries(0)
self.setLoggingToStdErr(True)
def validateParameters(self):
if not super().validateParameters():
return False
# Disable messaging if database connection string is provided
if self.databaseURI():
self.setMessagingEnabled(False)
return True
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]"""
)
client.Application.printUsage(self)
print(
f"""Examples:
Run the script getting the database parameters from default messaging
{os.path.basename(__file__)}
Run the script specifiying the database parameters
{os.path.basename(__file__)} --debug -d mysql://sysop:sysop@localhost/seiscomp"""
)
def run(self):
if not self.query():
logging.error("No database connection available")
return False
dbType = self.databaseType()
if dbType and dbType != "mysql":
print(f"No tests for database type {dbType} available.")
return True
db = self.query().driver()
# query database name
q = "SELECT DATABASE()"
if not db.beginQuery(q) or not db.fetchRow() or not db.getRowFieldString(0):
logging.error("Could not query database name")
return False
dbName = db.getRowFieldString(0)
db.endQuery()
logging.info(f"Checking encoding of database: {dbName}")
# collect charset found at database, table and column level
charsets = set()
# select default database character set and collation
q = (
"SELECT default_character_set_name, default_collation_name "
"FROM information_schema.SCHEMATA "
f"WHERE schema_name = '{dbName}'"
)
if not db.beginQuery(q) or not db.fetchRow() or db.getRowFieldCount() != 2:
logging.error("Could not query default database charset and collation")
return False
charset = db.getRowFieldString(0)
collation = db.getRowFieldString(1)
db.endQuery()
binCollation = checkBinaryCollation(charset, collation)
logging.debug(f"{dbName:<48}{charset} -> {collation}")
# select default table character set and collation
q = (
"SELECT T.table_name, CCSA.character_set_name, CCSA.collation_name "
"FROM information_schema.`TABLES` T, "
"information_schema.`COLLATION_CHARACTER_SET_APPLICABILITY` CCSA "
"WHERE CCSA.collation_name = T.table_collation AND "
f"T.table_schema = '{dbName}' "
"ORDER BY T.table_name"
)
if not db.beginQuery(q) or not db.fetchRow() or db.getRowFieldCount() != 3:
logging.error("Could not query default charset and collation of tables")
return False
tables = OrderedDict()
while True:
table, charset, collation = (
db.getRowFieldString(col) for col in range(0, 3)
)
tables[table] = (charset, collation)
charsets.add(charset)
binCollation = binCollation and checkBinaryCollation(charset, collation)
if not db.fetchRow():
break
db.endQuery()
# select charset and collation of all tables and columns
q = (
"SELECT table_name, column_name, character_set_name, collation_name "
"FROM information_schema.`COLUMNS` "
f"WHERE table_schema = '{dbName}' "
"ORDER BY table_name, column_name"
)
if not db.beginQuery(q) or not db.fetchRow() or db.getRowFieldCount() != 4:
logging.error("Could not query charset and collation of columns")
return False
prevTable = None
while True:
table, col, charset, collation = (
db.getRowFieldString(col) for col in range(0, 4)
)
if prevTable != table:
if table not in tables:
tables[table] = ("?", "?")
tCharset, tCollation = tables[table]
logging.debug(f" {table:<44}{tCharset} -> {tCollation}")
prevTable = table
if charset:
logging.debug(f" {col:<40}{charset} -> {collation}")
charsets.add(charset)
binCollation = binCollation and checkBinaryCollation(charset, collation)
if not db.fetchRow():
break
db.endQuery()
filenames = []
issues = []
utf8mb4 = "utf8mb4"
if utf8mb4 not in charsets:
filename = writeUpdateStatements(dbName, tables.keys(), utf8mb4)
issues.append(
f"Your database is not configured with the {utf8mb4} character set. "
"Certain unicode characters may not be stored correctly. Consider "
f"applying the migrations in:\n - {filename}"
)
noBinText = (
"Found collation other than 'binary'. Case-insensitive collations should "
"be avoided because they may lead to publicID collisions. "
)
if len(charsets) > 1:
filenames = []
for charset in charsets:
filename = writeUpdateStatements(dbName, tables.keys(), charset)
if charset == utf8mb4:
filename += " (preferred)"
filenames.append(filename)
fileNamesText = "\n - ".join(filenames)
issues.append(
"Found more than one character set. It is recommended to use the same "
f"character set across all tables. {'' if binCollation else noBinText}"
"Consider applying the migrations in one of the following files:"
f"\n - {fileNamesText}"
)
elif not binCollation:
filename = writeUpdateStatements(dbName, tables.keys(), charsets.pop())
issues.append(
f"{noBinText}Consider applying the migrations in:\n - {filename}"
)
if issues:
print("Found database issues:")
for issue in issues:
print(f" * {issue}")
print(
"""
Update instructions:
* Stop scmaster
* Ensure, no other modules like scdb, scardac, etc. or custom tools from internal or
external clients attempt accessing the database.
* Login to your database, e.g.:
mysql -u sysop -p
* Source one of the suggested update scripts:
SOURCE /tmp/update-mysql-charset-CHARACTERSET-DATABASE.sql"""
)
return False
print("No database issues found.")
return True
# Main method to call the app
def main(argc, argv):
app = CheckDatabase(argc, argv)
return app()
# Call the main method if run as script
if __name__ == "__main__":
sys.exit(main(len(sys.argv), sys.argv))

787
bin/gempa-checkSCconfig Executable file
View File

@ -0,0 +1,787 @@
#!/usr/bin/env seiscomp-python
############################################################################
# Copyright (C) 2021 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
# #
# Author: Dirk Roessler #
# Email: roessler@gempa.de #
############################################################################
from __future__ import absolute_import, division, print_function
import fnmatch
import sys
import os
import seiscomp.client
import seiscomp.io
import seiscomp.datamodel
import seiscomp.logging as logging
# define the latest version for which this script applies
latestVersion = "6.6.0"
# define module and binding configuration parameters
# define module and binding configuration parameter values
###############################################################################
# binding parameters
parameters = {
###########################################################################
# bindigng parameter names
# magnitudes
# MLh
"MLh.maxavg, version:4.5.0": "amplitudes.MLh.params",
"MLh.ClippingThreshold, version:4.5.0": "amplitudes.MLh.ClippingThreshold",
"MLh.params, version:4.5.0": "magnitudes.MLh.params",
# md
"md.maxavg, version:4.5.0": "magnitudes.md.seismo",
"md.taper, version:4.5.0": "magnitudes.md.taper",
"md.signal_length, version:4.5.0": "magnitudes.md.signal_length",
"md.butterworth, version:4.5.0": "magnitudes.md.butterworth",
"md.depthmax, version:4.5.0": "magnitudes.md.depthmax",
"md.deltamax, version:4.5.0": "magnitudes.md.deltamax",
"md.snrmin, version:4.5.0": "magnitudes.md.snrmin",
"md.mdmax, version:4.5.0": "magnitudes.md.mdmax",
"md.fma, version:4.5.0": "magnitudes.md.fma",
"md.fmb, version:4.5.0": "magnitudes.md.fmb",
"md.fmd, version:4.5.0": "magnitudes.md.fmd",
"md.fmf, version:4.5.0": "magnitudes.md.fmf",
"md.fmz, version:4.5.0": "magnitudes.md.fmz",
"md.linearcorrection, version:4.5.0": "magnitudes.md.linearcorrection",
"md.offset, version:4.5.0": "magnitudes.md.offset",
"md.stacor, version:4.5.0": "magnitudes.md.stacor",
# MLr
"MLr.maxavg, version:4.5.0": "magnitudes.MLr.params",
# Ms_20
"Ms_20.lowerPeriod, version:4.5.0": "magnitudes.Ms_20.lowerPeriod",
"Ms_20.upperPeriod, version:4.5.0": "magnitudes.Ms_20.upperPeriod",
"Ms_20.minDist, version:4.5.0": "magnitudes.Ms_20.minDist",
"Ms_20.maxDist, version:4.5.0": "magnitudes.Ms_20.maxDist",
"Ms_20.maxDepth, version:4.5.0": "magnitudes.Ms_20.maxDepth",
# MLv
"MLv.logA0, version:4.5.0": "magnitudes.MLv.logA0",
"MLv.maxDistanceKm, version:4.5.0": "magnitudes.MLv.maxDistanceKm",
# ML
"ML.logA0, version:4.5.0": "magnitudes.ML.logA0",
"ML.maxDistanceKm, version:4.5.0": "magnitudes.ML.maxDistanceKm",
###########################################################################
###########################################################################
# module parameter names
# scmv
"legend, version:4.6.0": "scheme.map.showLegends",
# data base
"database.type, version:5.0.0": "database",
"database.parameters, version:5.0.0": "database",
# RecordsStream
"recordstream.service, version:5.0.0": "recordstream",
"recordstream.source, version:5.0.0": "recordstream",
# scautoloc
"autoloc.locator.profile, version:4.3.0": "locator.profile",
"autoloc.cleanupInterval, version:4.3.0": "buffer.cleanupInterval",
"autoloc.maxAge, version:4.3.0": "buffer.pickKeep",
"autoloc.wakeupInterval, version:4.3.0": "",
# magnitudes
"magnitudes.*.regions, version:5.0.0": "magnitudes.*.regionFile",
###########################################################################
# SC 5.0.0
# global parameters for eventlist
"eventlist.customColumn, version:5.0.0": "eventlist.customColumn.name",
"eventlist.regions, version:5.0.0": "eventlist.filter.regions.profiles",
"eventlist.region, version:5.0.0": "eventlist.filter.regions.region",
# global parameters for eventedit
"eventedit.customColumn, version:5.0.0": "eventedit.origin.customColumn.name",
"eventedit.customColumn.default, version:5.0.0": "eventedit.origin.customColumn.default",
"eventedit.customColumn.originCommentID, version:5.0.0": "eventedit.origin.customColumn.originCommentID",
"eventedit.customColumn.pos, version:5.0.0": "eventedit.origin.customColumn.pos",
"eventedit.customColumn.colors, version:5.0.0": "eventedit.origin.customColumn.colors",
###########################################################################
# SC 5.1.1
# scolv
"picker.auxilliary., version:5.1.1": "picker.auxiliary.",
# SC 5.4.0
# ttt.homogeneous
"ttt.homogeneous.profile., version:5.4.0": "ttt.homogeneous.",
# SC 6.0.0
# scardac
"batchSize, version:6.0.0": "",
# SC 6.5.0
# StdLoc
"GridSearch.cellSize, version:6.5.0": "GridSearch.numPoints",
# scanloc
"clusterSearch.ignorePicks, version:scanloc": "buffer.ignorePickTimeDifference",
"clusterSearch.ignorePickTimeDifference, version:scanloc": "buffer.ignorePickTimeDifference",
"buffer.originAuthorWhiteList, version:scanloc": "buffer.authorWhiteList",
"score.weight.p, version:scanloc": "score.sum.weight.p",
"score.weight.s, version:scanloc": "score.sum.weight.s",
"score.weight.p0, version:scanloc": "score.sum.weight.p0",
"score.weight.s0, version:scanloc": "score.sum.weight.s0",
"score.weight.residual, version:scanloc": "score.sum.weight.residual",
"score.weight.depth, version:scanloc": "score.sum.weight.depth",
"score.weight.increaseManual, version:scanloc": "score.sum.weight.increaseManual",
"ossum.p, version:scanloc": "score.sum.weight.p",
"ossum.s, version:scanloc": "score.sum.weight.s",
"ossum.p0, version:scanloc": "score.sum.weight.p0",
"ossum.s0, version:scanloc": "score.sum.weight.s0",
"ossum.residual, version:scanloc": "score.sum.weight.residual",
"ossum.depth, version:scanloc": "score.sum.weight.depth",
"ossum.increaseManual, version:scanloc": "score.sum.weight.increaseManual",
"ossum.normalizationDepth, version:scanloc": "score.sum.weight.normalizationDepth",
"ossum.normalizationRMS, version:scanloc": "score.sum.weight.normalizationRMS",
"clustersearch.streamCheckLevel, version:scanloc": "streamCheckLevel",
# vortex
"script.*.path, version:vortex": "event.script.*.path",
# TOAST
"alwaysShowAdditionalOptions, version:toast": "dissemination.alwaysShowAdditionalOptions",
"confirmationRequired, version:toast": "dissemination.confirmationRequired",
"disseminationSelectionBehavior, version:toast": "dissemination.selectionBehavior",
}
###############################################################################
# parameter values
# module and binding configuration
valuesCfg = {
# plugins
"parameter:plugins,rsas, version:4.5.0": "",
# messaging
"parameter:plugins,dbplugin, version:4.0.0": "",
# events list
"parameter:eventlist.visibleColumns, TP, version:4.0.4": "MType for \
eventlist.visibleColumns",
# magnitude calibration functions
"parameter:logA0,0-1., version:5.0.0": "New format for logA0 parameter of \
magnitude: delta1:M1,delta2:M2,...",
# sigma
"parameter:plugins,gmpeeewd, version:sigma": "Replace 'gmpeeewd' by 'cppgmpe'",
"parameter:plugins,py3gmpe, version:sigma": "Replace 'py3gmpe' by 'cppgmpe' unless \
more Python3 GMPEs are needed",
"parameter:gempa.plugins,py3gmpe, version:sigma": "Remove and apply defaults or \
replace 'py3gmpe' by 'cppgmpe' unless more Python3 GMPEs are needed. \
Check cppgmpe/GMPE names!",
"parameter:gempa.plugins,pygmpe, version:sigma": "Remove and apply defaults or \
replace 'pygmpe' by 'cppgmpe' unless more Python GMPEs are needed. \
Check cppgmpe/GMPE names!",
"parameter:plugins,pygmpe, version:sigma": "Replace 'pygmpe' by 'cppgmpe' unless \
more Python GMPEs are needed",
"gfz-potsdam.de, version:4.0.0": "Replace by gfz.de - server address has changed",
}
###############################################################################
def gempaStatement(gempa):
print(
f""" + This parameter seems to belong to the gempa module '{gempa}'. The \
proposed action is valid
for the most recent version. Read the changelog of '{gempa}' before applying \
the changes!""",
file=sys.stderr,
)
return True
def checkParameter(inFile, oldValue, newValue):
found = 0
line = ""
with open(inFile, encoding="utf-8") as f:
lines = f.readlines()
lineNo = 0
for line in lines:
found = None
lineNo += 1
if line.startswith("#"):
continue
words = line.split()
# Check each possible substring
for i in range(len(words)):
for j in range(i + 1, len(words) + 1):
substring = " ".join(words[i:j])
if fnmatch.fnmatch(substring, oldValue) or oldValue in substring:
# return True
# if oldValue in line:
# try reducing false positives
idxStart = line.find(oldValue)
if idxStart != 0:
if "." == line[idxStart] or "," == line[idxStart]:
continue
idxEnd = idxStart + len(oldValue)
if idxEnd != len(oldValue) - 1:
if "." == line[idxEnd] or "," == line[idxEnd]:
continue
if newValue not in oldValue and newValue not in line:
found = lineNo
if newValue in oldValue:
found = lineNo
if found:
# print(
# f" + POTENIAL ISSUE on line {lineNo}: obsolete/deprecated \
# parameter {oldValue} - new parameter: {newValue}",
# file=sys.stderr,
# )
# print(f" + full line: {line.rstrip()}", file=sys.stderr)
return found, line
return found, line
###############################################################################
def findValue(inFile, oldValue, parameter=None):
found = 0
line = ""
with open(inFile, encoding="utf-8") as f:
lines = f.readlines()
lineNo = 0
for line in lines:
lineNo += 1
if line.startswith("#"):
continue
if parameter and parameter not in line:
continue
if oldValue in line:
found = lineNo
return found, line
return found, line
###############################################################################
def printFinal(version, issuesFound=None):
print(
f"This check applies to SeisComP in version <= {version}",
file=sys.stderr,
)
print(" + read your own version, e.g.: 'seiscomp exec scm -V'", file=sys.stderr)
if issuesFound:
print(f" + found issues: {issuesFound}", file=sys.stderr)
else:
print(" + no issues found \U0001f44d", file=sys.stderr)
print(
"""
Alert and recommendation:
* Applies to: SeisComP databases using the charset utf8mb4 created with SeisComP in
version 6.0.0 <= version < 6.7.0 or nightly after 17 August 2023 until February 2025
* Issue: The charset used for the database does not discriminate upper and lower case
characters.
* Actions:
* Install the script 'gempa-check-database' with the package 'seiscomp-tools'
using gsm or download from https://data.gempa.de/packages/Public/.
* Stop scmaster
* Ensure, no other modules like scdb, scardac, etc. or custom tools from internal or
external clients attempt accessing the database.
* Login to your database, e.g.:
mysql -u sysop -p
* Source one of the suggested update scripts:
SOURCE /tmp/update-mysql-charset-CHARACTERSET-DATABASE.sql"""
)
return True
class CheckConfig(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(False, False)
self._configDir = False
self._home = os.environ["HOME"]
self._root = None
self._seiscompVersion = latestVersion
def createCommandLineDescription(self):
self.commandline().addGroup("SeisComP")
self.commandline().addStringOption(
"SeisComP",
"config-dir,c",
"Path to non-standard @CONFIGDIR@. Default: False",
)
self.commandline().addStringOption(
"SeisComP",
"root,r",
"SeisComP root directory to search for "
"SYSTEMCONFIGDIR. Default: $SEISCOMP_ROOT",
)
self.commandline().addStringOption(
"SeisComP",
"seiscomp-version, s",
"SeisComP version number to be considered for testing.",
)
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]
Identify + report legacy configuration for SeisComP and gempa modules in version <= \
{latestVersion}"""
)
seiscomp.client.Application.printUsage(self)
print(
f"""Examples:
For the test results read the log file or use --console 1
Simple run considering all parameters up version {self._seiscompVersion}
{os.path.basename(__file__)}
Run with specfic $SEISCOMP_ROOT directory and SeisComP version
{os.path.basename(__file__)} -r /home/sysop/seisomp-test --console 1 \
--seiscomp-version 4.8.0"""
)
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self._root = self.commandline().optionString("root")
except RuntimeError:
try:
self._root = os.environ["SEISCOMP_ROOT"]
except KeyError:
print(
"SEISCOMP_ROOT directory is undefined. Cannot continue.",
file=sys.stderr,
)
return False
logging.debug(f"ROOT directory not given. Assuming {self._root}")
try:
self._configDir = self.commandline().optionString("config-dir")
except RuntimeError:
logging.debug(
f"Configuration directory not set. Creating from ROOT: {self._root}"
)
try:
self._seiscompVersion = self.commandline().optionString("seiscomp-version")
except RuntimeError:
logging.debug(f"SeisComP not set. Assuming version {self._seiscompVersion}")
return True
def run(self):
issuesFound = 0
if not os.path.exists(self._home):
print(
f"{self._home} does not exist, check your option --home",
file=sys.stderr,
)
return False
# define @CONFIGDIR@ / @SYSTEMCONFIGDIR@
if not self._configDir:
configSC = os.path.join(self._home, ".seiscomp")
configSC3 = os.path.join(self._home, ".seiscomp3")
systemConfigSC = os.path.join(self._root, "etc")
keysSC = os.path.join(self._root, "etc/key")
else:
configSC = self._configDir
configSC3 = configSC
systemConfigSC = os.path.join(configSC, "etc")
keysSC = os.path.join(configSC, "etc/key")
print("\nTesting general issues:", file=sys.stderr)
if os.path.exists(configSC3):
for configFile in os.listdir(configSC3):
if configFile.endswith("cfg"):
print(
"SeisComP3 configuration still exists in "
f"{configSC3} - consider migrating or removing",
file=sys.stderr,
)
issuesFound += 1
break
if not os.path.exists(configSC):
logging.debug(
f"SeisComP configuration '{configSC}' does not exist in '{self._home}'"
"and will not be tested. You may consider using '-c'."
)
return False
if not os.path.exists(systemConfigSC):
logging.error(
f"SeisComP system configuration {systemConfigSC} does not exist in "
f"{self._root}. Set your SeisComP variables or consider using '-r'."
)
return False
if not os.path.exists(keysSC):
logging.error(
f"SeisComP key directory {keysSC} does not exist in {self._root}. "
"Set your SeisComP variables or consider using '-r'"
)
return False
# test if old license path exists
oldLicenses = os.path.join(configSC, "licenses")
if os.path.exists(oldLicenses):
print(
f" + ISSUE: Found old license directory '{oldLicenses}'. Move it to "
"@DATADIR@/licenses",
file=sys.stderr,
)
print(
" + more information: "
"'https://www.gempa.de/news/2022/02/licenses/'",
file=sys.stderr,
)
issuesFound += 1
# filter parameters by considered version
parametersFiltered = {}
for test, newValue in parameters.items():
items = test.split(",")
oldValue = None
version = None
parameter = None
gempa = False
for item in items:
if "version" in item:
version = item.split(":")[1]
if len(version.split(".")) != 3:
gempa = version
continue
oldValue = item
if oldValue is None:
continue
if not gempa and version and version > self._seiscompVersion:
continue
parametersFiltered[oldValue] = [newValue, parameter, gempa]
# if ", version:" in oldValue:
# key = oldValue.split(", version:")[0]
# version = oldValue.split(", version:")[1]
# if version <= self._seiscompVersion:
# parametersFiltered[key] = newValue
# else:
# parametersFiltered[oldValue] = newValue
# module configuration parameter values
valuesCfgFiltered = {}
for oldValue, newValue in valuesCfg.items():
items = oldValue.split(",")
key = None
version = None
parameter = None
gempa = False
for item in items:
if "version" in item:
version = item.split(":")[1]
if len(version.split(".")) != 3:
gempa = version
elif "parameter" in item:
parameter = item.split(":")[1]
else:
key = item
if key is None:
continue
if not gempa and version and version > self._seiscompVersion:
continue
valuesCfgFiltered[key] = [newValue, parameter, gempa]
print("\nTesting module configurations in @CONFIGDIR@:", file=sys.stderr)
# test module configurations in CONFIGDIR
for config in os.listdir(configSC):
configFile = os.path.join(configSC, config)
if not os.path.isfile(configFile):
continue
if not configFile.endswith("cfg"):
continue
logging.debug(f" + testing module configurations in {configFile}")
print(f" + file {configFile}", file=sys.stderr)
# test parameters
for oldValue, parameter in parametersFiltered.items():
newValue = parameter[0]
gempa = parameter[2]
result = checkParameter(configFile, oldValue, newValue)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
print(
f" + POTENIAL ISSUE on line {lineNo}: obsolete/deprecated "
f"parameter '{oldValue}'",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
if newValue:
print(
f" + new parameter: '{newValue}'",
file=sys.stderr,
)
else:
print(
" + action: Remove the value",
file=sys.stderr,
)
if gempa:
gempaStatement(gempa)
# test values
for oldValue, new in valuesCfgFiltered.items():
newValue = new[0]
parameter = new[1]
gempa = new[2]
result = findValue(configFile, oldValue)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
if newValue:
print(
f" + POTENIAL ISSUE on line {lineNo}: obsolete/deprecated "
f"parameter '{oldValue}' - new parameter: '{newValue}'\n"
" + action: Rename the parameter unless correct",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
continue
print(
f" + POTENIAL ISSUE on line lineNo: obsolete/deprecated "
f"parameter '{oldValue}'\n"
" + action: Remove the parameter unless correct",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
if gempa:
gempaStatement(gempa)
if issuesFound == 0:
print(" + found no issue", file=sys.stderr)
# test module configurations in SYSTEMCONFIGDIR
print(f"\nTesting module configurations in {systemConfigSC}:", file=sys.stderr)
for config in os.listdir(systemConfigSC):
configFile = os.path.join(systemConfigSC, config)
if not os.path.isfile(configFile):
continue
if not configFile.endswith("cfg"):
continue
logging.debug(f"testing module configurations in {configFile}")
print(f" + file {configFile}", file=sys.stderr)
# test parameters
for oldValue, parameter in parametersFiltered.items():
newValue = parameter[0]
gempa = parameter[2]
result = checkParameter(configFile, oldValue, newValue)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
print(
f" + POTENIAL ISSUE on line {lineNo}: obsolete/deprecated "
f"parameter '{oldValue}'",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
if newValue:
print(
f" + new parameter: '{newValue}'\n"
" + action: Replace the parameter unless correct",
file=sys.stderr,
)
else:
print(
" + action: Remove the parameter unless correct",
file=sys.stderr,
)
if gempa:
gempaStatement(gempa)
# test values
for oldValue, new in valuesCfgFiltered.items():
newValue = new[0]
parameter = new[1]
gempa = new[2]
result = findValue(configFile, oldValue)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
print(
f" + POTENIAL ISSUE on line {lineNo}: obsolete/deprecated "
f"parameter value '{oldValue}'",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
if newValue:
print(
f" + new value/action: {newValue}",
file=sys.stderr,
)
else:
print(
" + action: Replace the value unless correct",
file=sys.stderr,
)
if gempa:
gempaStatement(gempa)
print("\nTesting bindings configuration:", file=sys.stderr)
# test bindings configurations in key directory
for _, subDirs, _ in os.walk(keysSC):
# skip key files, just consider the module bindings directories
for subDir in subDirs:
bindingDir = os.path.join(keysSC, subDir)
if not os.path.isdir(bindingDir) or not os.listdir(bindingDir):
continue
for config in os.listdir(bindingDir):
bindingFile = os.path.join(bindingDir, config)
if not os.path.isfile(bindingFile):
continue
logging.debug(f"testing bindings in {bindingFile}")
print(f" + file {bindingFile}", file=sys.stderr)
for oldValue, parameter in parametersFiltered.items():
newValue = parameter[0]
gempa = parameter[2]
result = checkParameter(bindingFile, oldValue, newValue)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
print(
f" + POTENIAL ISSUE on line {lineNo}: "
f"obsolete/deprecated parameter '{oldValue}'",
file=sys.stderr,
)
print(f" + full line: {line.rstrip()}", file=sys.stderr)
if newValue:
print(
f" + new parameter: '{newValue}'",
file=sys.stderr,
)
else:
print(
" + - remove it",
file=sys.stderr,
)
if gempa:
print(
f""" + This parameter seems to belong to the \
gempa module '{gempa}'
The proposed action is valid for the most recent version.
Read the module changelog before applying the changes!""",
file=sys.stderr,
)
# test values in bindings
for oldValue, new in valuesCfgFiltered.items():
newValue = new[0]
parameter = new[1]
gempa = new[2]
result = findValue(bindingFile, oldValue, parameter)
lineNo = result[0]
line = result[1]
if not lineNo:
continue
issuesFound += 1
print(
f" + POTENIAL ISSUE on line {lineNo} for parameter "
f"{parameter}:\n"
f" + full line: {line}"
f" + obsolete/deprecated value '{oldValue}' ",
file=sys.stderr,
)
if newValue:
print(
f" + new value: '{newValue}'",
file=sys.stderr,
)
else:
print(
f" + POTENIAL ISSUE on line {lineNo}: obsolete/"
f"deprecated parameter value '{oldValue}' - remove it",
file=sys.stderr,
)
if gempa:
print(
f""" + this parameter seems to belong to the \
gempa module '{gempa}'
The proposed action is valid for the most recent version.
Read the module changelog before applying the changes!""",
file=sys.stderr,
)
print("\nSummary:", file=sys.stderr)
printFinal(self._seiscompVersion, issuesFound)
return True
def main(argv):
app = CheckConfig(len(argv), argv)
return app()
if __name__ == "__main__":
sys.exit(main(sys.argv))

View File

@ -0,0 +1,33 @@
#!/bin/bash
if [ -z ${SEISCOMP_ROOT+x} ]; then
echo "Environment variable SEISCOMP_ROOT is not set."
echo "Either use 'seiscomp exec [script]' or set SEISCOMP_ROOT to the installation "
exit 1
echo "path of your SeisComP installation."
fi
grep -A 2 ^station $SEISCOMP_ROOT/var/lib/seedlink/seedlink.ini | while read a b c; do
if [ "$a" = station -a "$b" != .dummy ]; then
id=$b
sta=""
net=""
while read a b c; do
case $a in
--) break;;
name) eval sta=$c;;
network) eval net=$c;;
esac
done
if [ -z "$id" -o -z "$sta" -o -z "$net" ]; then
echo "Error parsing seedlink.ini"
break
fi
if [ "$id" != "$net.$sta" ]; then
mv -v "$SEISCOMP_ROOT/var/lib/seedlink/buffer/$id" "$SEISCOMP_ROOT/var/lib/seedlink/buffer/$net.$sta"
else
echo "$id: No renaming required"
fi
fi
done

194
bin/gfs2fep Executable file
View File

@ -0,0 +1,194 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) gempa GmbH #
# All rights reserved. #
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# Other Usage #
# Alternatively, this file may be used in accordance with the terms and #
# conditions contained in a signed written agreement between you and #
# gempa GmbH. #
############################################################################
import datetime
import getopt
import sys
from typing import TextIO
from seiscomp import geo
# -----------------------------------------------------------------------------
def printHelp():
msg = """
gfs2fep - converts a SeisComP GeoFeatureSet file (GeoJSON or BNA) to FEP format
usage: {} [OPTIONS]
-h, --help
print this help message
-i, --input
input file (default: -)
-o, --output
output fep file (default: -)
-a, --append
append fep data to output file instead of overriding it
-p, --precision (default: unrestricted)
number of decimal places of coordintes"""
print(msg.format(sys.argv[0]), file=sys.stderr)
sys.exit(0)
# -----------------------------------------------------------------------------
def error(code, msg):
print(f"error ({code}): {msg}", file=sys.stderr)
sys.exit(code)
# -----------------------------------------------------------------------------
def run():
if len(sys.argv) == 1:
printHelp()
inFile = "-"
outFile = None
append = False
precision = None
opts, _ = getopt.getopt(
sys.argv[1:], "hi:o:ap:", ["help", "input=", "output=", "append", "precision"]
)
for o, a in opts:
if o in ("-h", "--help"):
printHelp()
if o in ("-i", "--input"):
inFile = a
if o in ("-o", "--output"):
outFile = a
if o in ("-a", "--append"):
append = True
if o in ("-p", "--precision"):
precision = max(int(a), 0)
gfs = geo.GeoFeatureSet()
if not gfs.readFile(inFile, None):
error(1, f"Could not read from file '{inFile}'")
# combine features sharing the same name
featureDict = {}
for f in gfs.features():
if not f.closedPolygon():
print(
f"warning: feature not a closed polygon: {f.name()}",
file=sys.stderr,
)
if f.name() in featureDict:
featureDict[f.name()].append(f)
else:
featureDict[f.name()] = [f]
# output is set to stdout or a file name if specified
if outFile and outFile != "-":
try:
with open(outFile, "a" if append else "w", encoding="utf8") as fp:
writeFEPFile(featureDict, inFile, fp, precision)
except Exception as e:
error(2, e)
else:
writeFEPFile(featureDict, inFile, sys.stdout, precision)
sys.stdout.flush()
# -----------------------------------------------------------------------------
def writeFEPFile(featureDict: dict, inFile: str, fp: TextIO, precision: int = None):
def _print(data: str):
print(data, file=fp)
if precision:
def _printVertex(v):
print(
f"{v.longitude():.{precision}f} {v.latitude():.{precision}f}", file=fp
)
else:
def _printVertex(v):
print(f"{v.longitude()} {v.latitude()}", file=fp)
_print(f"# created from file: {inFile}")
_print(
f"# created on {str(datetime.datetime.now())} by gfs2fep.py - (C) gempa GmbH"
)
_print("# LON LAT")
# write fep
for name, features in featureDict.items():
# print("{}: {}".format(len(features), name))
vCount = 0
fStart = features[0].vertices()[0]
v = fStart
# iterate over features sharing name
for f in features:
# vertex array contains vertices of main land and sub features
vertices = f.vertices()
# sub feature array holds indices of starting points
endIndices = list(f.subFeatures()) + [len(vertices)]
# iterate of main land and sub features
i = 0
for iEnd in endIndices:
vStart = vertices[i]
while i < iEnd:
v = vertices[i]
_printVertex(v)
vCount += 1
i += 1
# end sub feature on sub feature start
v = vStart
_printVertex(v)
vCount += 1
# go back to start of main land
if v != vertices[0]:
v = vertices[0]
_printVertex(v)
vCount += 1
# go back to start of first feature
if v != fStart:
v = fStart
_printVertex(v)
vCount += 1
# end fep region
_print(f"99.0 99.0 {vCount}")
_print(f"L {name}")
# -----------------------------------------------------------------------------
if __name__ == "__main__":
run()

141
bin/import_inv Executable file
View File

@ -0,0 +1,141 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import os
import subprocess
import glob
import seiscomp.client
class Importer(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(False, False)
self._args = argv[1:]
def run(self):
if len(self._args) == 0:
sys.stderr.write("Usage: import_inv [{format}|help] <input> [output]\n")
return False
if self._args[0] == "help":
if len(self._args) < 2:
sys.stderr.write("'help' can only be used with 'formats'\n")
sys.stderr.write("import_inv help formats\n")
return False
if self._args[1] == "formats":
return self.printFormats()
sys.stderr.write(f"unknown topic '{self._args[1]}'\n")
return False
fmt = self._args[0]
try:
prog = os.path.join(os.environ["SEISCOMP_ROOT"], "bin", f"{fmt}2inv")
except:
sys.stderr.write(
"Could not get SeisComP root path, SEISCOMP_ROOT not set?\n"
)
return False
if not os.path.exists(prog):
sys.stderr.write(f"Format '{fmt}' is not supported\n")
return False
if len(self._args) < 2:
sys.stderr.write("Input missing\n")
return False
input = self._args[1]
if len(self._args) < 3:
filename = os.path.basename(os.path.abspath(input))
if not filename:
filename = fmt
# Append .xml if the ending is not already .xml
if filename[-4:] != ".xml":
filename = filename + ".xml"
storage_dir = os.path.join(os.environ["SEISCOMP_ROOT"], "etc", "inventory")
output = os.path.join(storage_dir, filename)
try:
os.makedirs(storage_dir)
except:
pass
sys.stderr.write(f"Generating output to {output}\n")
else:
output = self._args[2]
proc = subprocess.Popen(
[prog, input, output], stdout=None, stderr=None, shell=False
)
chans = proc.communicate()
if proc.returncode != 0:
sys.stderr.write("Conversion failed, return code: %d\n" % proc.returncode)
return False
return True
def printFormats(self):
try:
path = os.path.join(os.environ["SEISCOMP_ROOT"], "bin", "*2inv")
except:
sys.stderr.write(
"Could not get SeisComP root path, SEISCOMP_ROOT not set?\n"
)
return False
files = glob.glob(path)
formats = []
for f in files:
prog = os.path.basename(f)
formats.append(prog[: prog.find("2inv")])
formats.sort()
sys.stdout.write("%s\n" % "\n".join(formats))
return True
def printUsage(self):
print(
"""Usage:
import_inv [FORMAT] input [output]
import_inv help [topic]
Import inventory information from various sources."""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
List all supported inventory formats
import_inv help formats
Convert from FDSN stationXML to SeisComp format
import_inv fdsnxml inventory_fdsnws.xml inventory_sc.xml
"""
)
if __name__ == "__main__":
app = Importer(len(sys.argv), sys.argv)
sys.exit(app())

278
bin/instdb2db2 Executable file
View File

@ -0,0 +1,278 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
import sys, os
import csv
from optparse import OptionParser
def quote(instr):
return '"'+instr+'"'
class base(object):
def __init__(self, filename, fields):
self.att = {}
fd = open(filename)
try:
try:
fieldNames = None
for row in csv.DictReader(fd, fieldNames):
id = row['id']
if id in self.att:
print("multiple %s found in %s" % (id, filename))
continue
for key in fields:
if not row[key]:
del(row[key])
del row['id']
try:
row['low_freq'] = float(row['low_freq'])
except KeyError:
pass
try:
row['high_freq'] = float(row['high_freq'])
except KeyError:
pass
self.att[id] = row
except KeyError as e:
raise Exception("column %s missing in %s" % (str(e), filename))
except (TypeError, ValueError) as e:
raise Exception("error reading %s: %s" % (filename, str(e)))
finally:
fd.close()
def keys(self):
return list(self.att.keys())
def screname(self, what):
nc = ""
nu = True
for c in what:
if c == '_':
nu = True
continue
if nu:
nc += c.upper()
nu = False
else:
nc += c
if nc == 'LowFreq': nc = 'LowFrequency'
if nc == 'HighFreq': nc = 'HighFrequency'
return nc
def reorder(self):
att = {}
if not self.att:
return None
for (code, row) in self.att.items():
for (k, v) in row.items():
k = self.screname(k)
try:
dk = att[k]
except:
dk = {}
att[k] = dk
try:
dv = dk[str(v)]
except:
dv = []
dk[str(v)] = dv
dv.append(code)
return att
def dump(self, fdo):
att = self.reorder()
lastK=None
for (k, v) in att.items():
if not lastK: lastK = k
if lastK != k:
fdo.write("\n")
for (kv, ids) in v.items():
fdo.write("Ia: %s=%s" % (k,quote(kv)))
for id in ids:
fdo.write(" %s" % id)
fdo.write("\n")
fdo.write("\n")
class sensorAttributes(base):
def __init__(self, filename):
base.__init__(self, filename, ['id', 'type','unit', 'low_freq', 'high_freq', 'model', 'manufacturer', 'remark'])
class dataloggerAttributes(base):
def __init__(self, filename):
base.__init__(self, filename, ['id', 'digitizer_model', 'digitizer_manufacturer', 'recorder_model', 'recorder_manufacturer', 'clock_model', 'clock_manufacturer', 'clock_type', 'remark'])
class INST(object):
def cleanID(self, id):
nc = ""
for c in id:
nc += c
if c == '_':
nc = ""
return nc
def __init__(self, filename, attS, attD):
self.filename = filename
self.sensorA = sensorAttributes(attS)
self.dataloggerA = dataloggerAttributes(attD)
lines = []
f = open(filename)
for line in f:
line = line.strip()
if not line or line[0] == '#':
# Add comments line types
lines.append({ 'content': line, 'type': 'C', 'id': None})
else:
(id, line) = line.split(">", 1)
id = id.strip()
line = line.strip()
# Add undefined line types
lines.append({ 'content': line, 'type': 'U', 'id': id})
f.close()
self.lines = lines
self._filltypes()
def _filltypes(self):
for line in self.lines:
if line['type'] != 'U': continue
id = line['id']
if id.find('_FIR_') != -1:
line['type'] = 'F'
elif id.find('Sngl-gain_') != -1:
line['type'] = 'L'
line['id'] = self.cleanID(id)
elif id.find('_digipaz_') != -1:
line['type'] = 'P'
elif id.find('_iirpaz_') != -1:
line['type'] = 'I'
for line in self.lines:
if line['type'] != 'U': continue
id = self.cleanID(line['id'])
if id in list(self.sensorA.keys()):
line['type'] = 'S'
line['id'] = id
elif id in list(self.dataloggerA.keys()):
line['type'] = 'D'
line['id'] = id
# Those we are forcing !
elif id in ['OSIRIS-SC', 'Gaia', 'LE24', 'MALI', 'PSS', 'FDL', 'CMG-SAM', 'CMG-DCM', 'EDAS-24', 'SANIAC']:
line['id'] = id
line['type'] = 'D'
elif id in ['Trillium-Compact', 'Reftek-151/120', 'BBVS-60', 'CMG-3ESP/60F', 'LE-1D/1', 'L4-3D/BW', 'S13', 'GS13', 'SH-1', 'MP', 'MARKL22', 'CM-3', 'CMG-6T', 'SM-6/BW']:
line['id'] = id
line['type'] = 'S'
for line in self.lines:
if line['type'] == 'U':
print("'"+self.cleanID(line['id'])+"', ", end=' ')
def dump(self, fdo):
sa = False
da = False
dataloggerFieldSize = 0
sensorFieldSize = 0
for line in self.lines:
if line['type'] == 'C': continue
if line['type'] == 'S':
if len(line['id']) > sensorFieldSize:
sensorFieldSize = len(line['id'])
if line['type'] == 'D':
if len(line['id']) > dataloggerFieldSize:
dataloggerFieldSize = len(line['id'])
seLine = "Se: %%%ss %%s\n" % (-1*(sensorFieldSize+1))
dtLine = "Dl: %%%ss %%s\n" % (-1*(dataloggerFieldSize+1))
for line in self.lines:
if line['type'] == 'C':
fdo.write(line['content'] + "\n")
continue
if line['type'] == 'S':
if not sa:
self.sensorA.dump(fdo)
sa = True
fdo.write(seLine % (line['id'], line['content']))
continue
if line['type'] == 'D':
if not da:
self.dataloggerA.dump(fdo)
da = True
fdo.write(dtLine % (line['id'], line['content']))
continue
if line['type'] == 'L':
fdo.write("Cl: %s %s\n" % (line['id'], line['content']))
continue
if line['type'] == 'F':
fdo.write("Ff: %s %s\n" % (line['id'], line['content']))
continue
if line['type'] == 'P':
fdo.write("Pz: %s %s\n" % (line['id'], line['content']))
continue
if line['type'] == 'I':
fdo.write("If: %s %s\n" % (line['id'], line['content']))
continue
def main():
parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True)
parser.add_option("", "--sat", type="string",
help="Indicates the sensor attribute file to use", dest="sat", default="sensor_attr.csv")
parser.add_option("", "--dat", type="string",
help="Indicates the station attribute file to use", dest="dat", default="datalogger_attr.csv")
parser.add_option("-c", "--clean", action="store_true",
help="Remove the comments and blank lines", dest="cleanFile", default=False)
# Parsing & Error check
(options, args) = parser.parse_args()
errors = []
if len(args) != 1:
errors.append("need an Input filename")
if not os.path.isfile(options.sat):
errors.append("sensor attribute file '%s' not found." % options.sat)
if not os.path.isfile(options.dat):
errors.append("datalogger attribute file '%s' not found." % options.dat)
if len(args) == 2 and os.path.isfile(args[1]):
errors.append("output file already exists, will not overwrite.")
if errors:
print("Found error while processing the command line:", file=sys.stderr)
for error in errors:
print(" %s" % error, file=sys.stderr)
return 1
inputName = args[0]
i= INST(inputName, options.sat, options.dat)
fdo = sys.stdout if len(args) < 2 else open(args[1],"w")
i.dump(fdo)
fdo.close()
if __name__ == "__main__":
main()

105
bin/inv2dlsv Executable file
View File

@ -0,0 +1,105 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import io
from seiscomp.legacy.fseed import *
from seiscomp.legacy.db.seiscomp3 import sc3wrap
from seiscomp.legacy.db.seiscomp3.inventory import Inventory
import seiscomp.datamodel
import seiscomp.io
ORGANIZATION = "EIDA"
def iterinv(obj):
return (j for i in obj.values() for j in i.values())
def main():
if len(sys.argv) < 1 or len(sys.argv) > 3:
print("Usage inv2dlsv [in_xml [out_dataless]]", file=sys.stderr)
return 1
if len(sys.argv) > 1:
inFile = sys.argv[1]
else:
inFile = "-"
if len(sys.argv) > 2:
out = sys.argv[2]
else:
out = ""
sc3wrap.dbQuery = None
ar = seiscomp.io.XMLArchive()
if not ar.open(inFile):
raise IOError(inFile + ": unable to open")
obj = ar.readObject()
if obj is None:
raise TypeError(inFile + ": invalid format")
sc3inv = seiscomp.datamodel.Inventory.Cast(obj)
if sc3inv is None:
raise TypeError(inFile + ": invalid format")
inv = Inventory(sc3inv)
inv.load_stations("*", "*", "*", "*")
inv.load_instruments()
vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False)
for net in iterinv(inv.network):
for sta in iterinv(net.station):
for loc in iterinv(sta.sensorLocation):
for strm in iterinv(loc.stream):
try:
vol.add_chan(
net.code,
sta.code,
loc.code,
strm.code,
strm.start,
strm.end,
)
except SEEDError as exc:
print(
f"Error ({net.code},{sta.code},{loc.code},{strm.code}): {str(exc)}",
file=sys.stderr,
)
if not out or out == "-":
output = io.BytesIO()
vol.output(output)
stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout
stdout.write(output.getvalue())
stdout.flush()
output.close()
else:
with open(sys.argv[2], "wb") as fd:
vol.output(fd)
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except Exception as e:
print(f"Error: {str(e)}", file=sys.stderr)
sys.exit(1)

BIN
bin/invextr Executable file

Binary file not shown.

BIN
bin/load_timetable Executable file

Binary file not shown.

329
bin/msrtsimul Executable file
View File

@ -0,0 +1,329 @@
#!/usr/bin/env seiscomp-python
from __future__ import absolute_import, division, print_function
import sys
import os
import time
import datetime
import calendar
import math
import stat
from getopt import gnu_getopt, GetoptError
from seiscomp import mseedlite as mseed
# ------------------------------------------------------------------------------
def read_mseed_with_delays(delaydict, reciterable):
"""
Create an iterator which takes into account configurable realistic delays.
This function creates an iterator which returns one miniseed record at a time.
Artificial delays can be introduced by using delaydict.
This function can be used to make simulations in real time more realistic
when e.g. some stations have a much higher delay than others due to
narrow bandwidth communication channels etc.
A delaydict has the following data structure:
keys: XX.ABC (XX: network code, ABC: station code). The key "default" is
a special value for the default delay.
values: Delay to be introduced in seconds
This function will rearrange the iterable object which has been used as
input for rt_simul() so that it can again be used by rt_simul but taking
artificial delays into account.
"""
import heapq # pylint: disable=C0415
heap = []
min_delay = 0
default_delay = 0
if "default" in delaydict:
default_delay = delaydict["default"]
for rec in reciterable:
rec_time = calendar.timegm(rec.end_time.timetuple())
delay_time = rec_time
stationname = f"{rec.net}.{rec.sta}"
if stationname in delaydict:
delay_time = rec_time + delaydict[stationname]
else:
delay_time = rec_time + default_delay
heapq.heappush(heap, (delay_time, rec))
toprectime = heap[0][0]
if toprectime - min_delay < rec_time:
topelement = heapq.heappop(heap)
yield topelement
while heap:
topelement = heapq.heappop(heap)
yield topelement
# ------------------------------------------------------------------------------
def rt_simul(f, speed=1.0, jump=0.0, delaydict=None):
"""
Iterator to simulate "real-time" MSeed input
At startup, the first MSeed record is read. The following records are
read in pseudo-real-time relative to the time of the first record,
resulting in data flowing at realistic speed. This is useful e.g. for
demonstrating real-time processing using real data of past events.
The data in the input file may be multiplexed, but *must* be sorted by
time, e.g. using 'mssort'.
"""
rtime = time.time()
etime = None
skipping = True
record_iterable = mseed.Input(f)
if delaydict:
record_iterable = read_mseed_with_delays(delaydict, record_iterable)
for rec in record_iterable:
if delaydict:
rec_time = rec[0]
rec = rec[1]
else:
rec_time = calendar.timegm(rec.end_time.timetuple())
if etime is None:
etime = rec_time
if skipping:
if (rec_time - etime) / 60.0 < jump:
continue
etime = rec_time
skipping = False
tmax = etime + speed * (time.time() - rtime)
ms = 1000000.0 * (rec.nsamp / rec.fsamp)
last_sample_time = rec.begin_time + datetime.timedelta(microseconds=ms)
last_sample_time = calendar.timegm(last_sample_time.timetuple())
if last_sample_time > tmax:
time.sleep((last_sample_time - tmax + 0.001) / speed)
yield rec
# ------------------------------------------------------------------------------
def usage():
print(
"""Usage:
msrtsimul [options] file
miniSEED real-time playback and simulation
msrtsimul reads sorted (and possibly multiplexed) miniSEED files and writes
individual records in pseudo-real-time. This is useful e.g. for testing and
simulating data acquisition. Output is
$SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used.
Verbosity:
-h, --help Display this help message
-v, --verbose Verbose mode
Playback:
-j, --jump Minutes to skip (float).
-c, --stdout Write on standard output.
-d, --delays Seconds to add as artificial delays.
--seedlink Choose the seedlink module name. Useful if a seedlink
alias or non-standard names are used. Replaces
'seedlink' in the standard mseedfifo path.
-m --mode Choose between 'realtime' and 'historic'.
-s, --speed Speed factor (float).
--test Test mode.
-u, --unlimited Allow miniSEED records which are not 512 bytes
Examples:
Play back miniSEED waveforms in real time with verbose output
msrtsimul -v data.mseed
Play back miniSEED waveforms in real time skipping the first 1.5 minutes
msrtsimul -j 1.5 data.mseed
"""
)
# ------------------------------------------------------------------------------
def main():
py2 = sys.version_info < (3,)
ifile = sys.stdin if py2 else sys.stdin.buffer
verbosity = 0
speed = 1.0
jump = 0.0
test = False
ulimited = False
seedlink = "seedlink"
mode = "realtime"
try:
opts, args = gnu_getopt(
sys.argv[1:],
"cd:s:j:vhm:u",
[
"stdout",
"delays=",
"speed=",
"jump=",
"test",
"verbose",
"help",
"mode=",
"seedlink=",
"unlimited"
],
)
except GetoptError:
usage()
return 1
out_channel = None
delays = None
for flag, arg in opts:
if flag in ("-c", "--stdout"):
out_channel = sys.stdout if py2 else sys.stdout.buffer
elif flag in ("-d", "--delays"):
delays = arg
elif flag in ("-s", "--speed"):
speed = float(arg)
elif flag in ("-j", "--jump"):
jump = float(arg)
elif flag in ("-m", "--mode"):
mode = arg
elif flag == "--seedlink":
seedlink = arg
elif flag in ("-v", "--verbose"):
verbosity += 1
elif flag == "--test":
test = True
elif flag in ("-u", "--unlimited"):
ulimited = True
else:
usage()
if flag in ("-h", "--help"):
return 0
return 1
if len(args) == 1:
if args[0] != "-":
try:
ifile = open(args[0], "rb")
except IOError as e:
print(
f"could not open input file '{args[0]}' for reading: {e}",
file=sys.stderr,
)
sys.exit(1)
elif len(args) != 0:
usage()
return 1
if out_channel is None:
try:
sc_root = os.environ["SEISCOMP_ROOT"]
except KeyError:
print("SEISCOMP_ROOT environment variable is not set", file=sys.stderr)
sys.exit(1)
mseed_fifo = os.path.join(sc_root, "var", "run", seedlink, "mseedfifo")
if verbosity:
print(f"output data to {mseed_fifo}", file=sys.stderr)
if not os.path.exists(mseed_fifo):
print(
f"""\
ERROR: {mseed_fifo} does not exist.
In order to push the records to SeedLink, \
it needs to run and must be configured for real-time playback.
""",
file=sys.stderr,
)
sys.exit(1)
if not stat.S_ISFIFO(os.stat(mseed_fifo).st_mode):
print(
f"""\
ERROR: {mseed_fifo} is not a named pipe
Check if SeedLink is running and configured for real-time playback.
""",
file=sys.stderr,
)
sys.exit(1)
try:
out_channel = open(mseed_fifo, "wb")
except Exception as e:
print(str(e), file=sys.stderr)
sys.exit(1)
try:
delaydict = None
if delays:
delaydict = {}
try:
f = open(delays, "r")
for line in f:
content = line.split(":")
if len(content) != 2:
raise ValueError(
f"Could not parse a line in file {delays}: {line}\n"
)
delaydict[content[0].strip()] = float(content[1].strip())
except Exception as e:
print(f"Error reading delay file {delays}: {e}", file=sys.stderr)
inp = rt_simul(ifile, speed=speed, jump=jump, delaydict=delaydict)
stime = time.time()
time_diff = None
print(
f"Starting msrtsimul at {datetime.datetime.utcnow()}",
file=sys.stderr,
)
for rec in inp:
if rec.size != 512 and not ulimited:
print(
f"Skipping record of {rec.net}.{rec.sta}.{rec.loc}.{rec.cha} \
starting on {str(rec.begin_time)}: length != 512 Bytes.",
file=sys.stderr,
)
continue
if time_diff is None:
ms = 1000000.0 * (rec.nsamp / rec.fsamp)
time_diff = (
datetime.datetime.utcnow()
- rec.begin_time
- datetime.timedelta(microseconds=ms)
)
if mode == "realtime":
rec.begin_time += time_diff
if verbosity:
tdiff_to_start = time.time() - stime
tdiff_to_current = time.time() - calendar.timegm(
rec.begin_time.timetuple()
)
nslc = f"{rec.net}.{rec.sta}.{rec.loc}.{rec.cha}"
print(
f"{nslc: <17} \
{tdiff_to_start: 7.2f} {str(rec.begin_time)} {tdiff_to_current: 7.2f}",
file=sys.stderr,
)
if not test:
rec.write(out_channel, int(math.log2(rec.size)))
out_channel.flush()
except KeyboardInterrupt:
pass
except Exception as e:
print(f"Exception: {str(e)}", file=sys.stderr)
return 1
return 0
# ------------------------------------------------------------------------------
if __name__ == "__main__":
sys.exit(main())

150
bin/optodas_inventory Executable file
View File

@ -0,0 +1,150 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import json
import datetime
import argparse
import zmq
import seiscomp.datamodel, seiscomp.core, seiscomp.io
VERSION = "0.1 (2024.066)"
def main():
parser = argparse.ArgumentParser()
parser.set_defaults(
address="tcp://localhost:3333",
sample_rate=100,
gain=1.0,
network="XX",
station="{channel:05d}",
location="",
channel="HSF"
)
parser.add_argument("--version",
action="version",
version="%(prog)s " + VERSION
)
parser.add_argument("-a", "--address",
help="ZeroMQ address (default %(default)s)"
)
parser.add_argument("-r", "--sample-rate",
type = int,
help = "sample rate (default %(default)s)"
)
parser.add_argument("-g", "--gain",
type=float,
help="gain (default %(default)s)"
)
parser.add_argument("-n", "--network",
help="network code (default %(default)s)"
)
parser.add_argument("-s", "--station",
help="station code template (default %(default)s)"
)
parser.add_argument("-l", "--location",
help="location code (default %(default)s)"
)
parser.add_argument("-c", "--channel",
help="channel code (default %(default)s)"
)
args = parser.parse_args()
sock = zmq.Context().socket(zmq.SUB)
sock.connect(args.address)
sock.setsockopt(zmq.SUBSCRIBE, b"")
header = json.loads(sock.recv().decode("utf-8"))
inv = seiscomp.datamodel.Inventory()
resp = seiscomp.datamodel.ResponsePAZ_Create()
resp.setType("A")
resp.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"])
resp.setGainFrequency(0)
resp.setNormalizationFactor(1)
resp.setNormalizationFrequency(0)
resp.setNumberOfZeros(0)
resp.setNumberOfPoles(0)
inv.add(resp)
sensor = seiscomp.datamodel.Sensor_Create()
sensor.setName(header["instrument"])
sensor.setDescription(header["instrument"])
sensor.setUnit(header["sensitivities"][0]["unit"])
sensor.setResponse(resp.publicID())
inv.add(sensor)
datalogger = seiscomp.datamodel.Datalogger_Create()
datalogger.setDescription(header["instrument"])
datalogger.setGain(1)
datalogger.setMaxClockDrift(0)
deci = seiscomp.datamodel.Decimation()
deci.setSampleRateNumerator(args.sample_rate)
deci.setSampleRateDenominator(1)
datalogger.add(deci)
inv.add(datalogger)
net = seiscomp.datamodel.Network_Create()
net.setCode(args.network)
net.setDescription(header["experiment"])
net.setStart(seiscomp.core.Time.FromYearDay(datetime.datetime.utcnow().year, 1))
inv.add(net)
for roi in header["roiTable"]:
for c in range(roi["roiStart"], roi["roiEnd"] + 1, roi["roiDec"]):
sta = seiscomp.datamodel.Station_Create()
sta.setCode(eval("f'''" + args.station + "'''", {"channel": c}))
sta.setDescription("DAS channel %d" % c)
sta.setStart(net.start())
net.add(sta)
loc = seiscomp.datamodel.SensorLocation_Create()
loc.setCode(args.location)
loc.setStart(net.start())
sta.add(loc)
cha = seiscomp.datamodel.Stream_Create()
cha.setCode(args.channel)
cha.setStart(net.start())
cha.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"])
cha.setGainUnit(header["sensitivities"][0]["unit"])
cha.setGainFrequency(0)
cha.setSensor(sensor.publicID())
cha.setDatalogger(datalogger.publicID())
loc.add(cha)
ar = seiscomp.io.XMLArchive()
ar.create("-")
ar.setFormattedOutput(True)
ar.writeObject(inv)
ar.close()
if __name__ == "__main__":
main()

938
bin/playback_picks Executable file
View File

@ -0,0 +1,938 @@
#!/usr/bin/env seiscomp-python
############################################################################
# Copyright (C) 2016 by gempa GmbH #
# #
# All Rights Reserved. #
# #
# NOTICE: All information contained herein is, and remains #
# the property of gempa GmbH and its suppliers, if any. The intellectual #
# and technical concepts contained herein are proprietary to gempa GmbH #
# and its suppliers. #
# Dissemination of this information or reproduction of this material #
# is strictly forbidden unless prior written permission is obtained #
# from gempa GmbH. #
# #
# Author: Enrico Ellguth, Dirk Roessler #
# Email: enrico.ellguth@gempa.de, roessler@gempa.de #
############################################################################
import os
import sys
import time
import seiscomp.client
import seiscomp.core
import seiscomp.io
import seiscomp.datamodel
def timing_pickTime(obj):
"""
Sort picks, origins by their time values
Sort amplitudes by their reference time
"""
po = seiscomp.datamodel.Pick.Cast(obj[0])
oo = seiscomp.datamodel.Origin.Cast(obj[0])
if po or oo:
t = obj[0].time().value()
else:
t = obj[0].timeWindow().reference()
return t
def timing_creationTime(obj):
"""
Sort all objects by their creation time
"""
ct = obj[0].creationInfo().creationTime()
return ct
def listPicks(self, objects):
print(
"\n#phase, time , streamID , author, to previous pick "
"[s], delay [s]",
file=sys.stdout,
)
t0 = None
for obj, _ in objects:
p = seiscomp.datamodel.Pick.Cast(obj)
if not p:
continue
time = p.time().value()
try:
phase = p.phaseHint().code()
except ValueError:
phase = "None"
wfID = p.waveformID()
net = wfID.networkCode()
sta = wfID.stationCode()
loc = wfID.locationCode()
cha = wfID.channelCode()
try:
author = p.creationInfo().author()
except ValueError:
author = "None"
try:
delay = f"{(p.creationInfo().creationTime() - time).toDouble():.3f}"
except ValueError:
delay = "None"
if t0 is not None:
deltaT = f"{(time - t0).toDouble():.3f}"
else:
deltaT = "None"
streamID = f"{net}.{sta}.{loc}.{cha}"
print(
f"{phase: <6}, {time.toString('%FT%T')}.{int(time.microseconds()/1000):03d}"
f", {streamID: <15}, {author}, {deltaT}, {delay}",
file=sys.stdout,
)
t0 = time
return True
def printStatistics(ep):
minPickTime = None
maxPickTime = None
minPickCTime = None
maxPickCTime = None
minAmplitudeTime = None
maxAmplitudeTime = None
minOriginTime = None
maxOriginTime = None
minOriginCTime = None
maxOriginCTime = None
# read picks
nslc = set()
authorPick = set()
authorAmplitude = set()
authorOrigin = set()
detectionStreams = set()
cntPick = ep.pickCount()
for i in range(cntPick):
pick = ep.pick(i)
try:
authorPick.add(pick.creationInfo().author())
except ValueError:
print(
f"Author information not found in pick {pick.publicID()}: NSLC list may"
" be incomplete",
file=sys.stderr,
)
try:
net = pick.waveformID().networkCode()
sta = pick.waveformID().stationCode()
loc = pick.waveformID().locationCode()
cha = pick.waveformID().channelCode()
nslc.add(f"{net}.{sta}.{loc}.{cha}")
detectionStreams.add(f".{loc}.{cha}")
except ValueError:
print(
f"Stream information not found in pick {pick.publicID()}: NSLC list "
"may be incomplete",
file=sys.stderr,
)
if not minPickTime:
minPickTime = pick.time().value()
elif pick.time().value() < minPickTime:
minPickTime = pick.time().value()
if not maxPickTime:
maxPickTime = pick.time().value()
elif pick.time().value() > maxPickTime:
maxPickTime = pick.time().value()
try:
pick.creationInfo().creationTime()
except ValueError:
print(
f"Creation time not found in pick {pick.publicID()}: Statistics may "
"be incomplete",
file=sys.stderr,
)
continue
if not minPickCTime:
minPickCTime = pick.creationInfo().creationTime()
elif pick.creationInfo().creationTime() < minPickCTime:
minPickCTime = pick.creationInfo().creationTime()
if not maxPickCTime:
maxPickCTime = pick.creationInfo().creationTime()
elif pick.creationInfo().creationTime() > maxPickCTime:
maxPickCTime = pick.creationInfo().creationTime()
# read amplitudes
cntAmp = ep.amplitudeCount()
for i in range(cntAmp):
amp = ep.amplitude(i)
try:
authorAmplitude.add(amp.creationInfo().author())
except ValueError:
print(
f"Author information not found in amplitude {amp.publicID()}: NSLC "
"list may be incomplete",
file=sys.stderr,
)
try:
net = amp.waveformID().networkCode()
sta = amp.waveformID().stationCode()
loc = amp.waveformID().locationCode()
cha = amp.waveformID().channelCode()
nslc.add(f"{net}.{sta}.{loc}.{cha}")
detectionStreams.add(f".{loc}.{cha}")
except ValueError:
print(
f"Stream information not found in amplitude {amp.publicID()}: NSLC "
"list may be incomplete",
file=sys.stderr,
)
if not minAmplitudeTime:
minAmplitudeTime = amp.timeWindow().reference()
elif amp.timeWindow().reference() < minAmplitudeTime:
minAmplitudeTime = amp.timeWindow().reference()
if not maxAmplitudeTime:
maxAmplitudeTime = amp.timeWindow().reference()
elif amp.timeWindow().reference() > maxAmplitudeTime:
maxAmplitudeTime = amp.timeWindow().reference()
# read origins
cntOrg = ep.originCount()
for i in range(cntOrg):
oo = ep.origin(i)
try:
authorOrigin.add(oo.creationInfo().author())
except ValueError:
print(
f"Author information not found in origin {oo.publicID()}:",
file=sys.stderr,
)
if not minOriginTime:
minOriginTime = oo.time().value()
elif oo.time().value() < minOriginTime:
minOriginTime = oo.time().value()
if not maxOriginTime:
maxOriginTime = oo.time().value()
elif oo.time().value() > maxOriginTime:
maxOriginTime = oo.time().value()
try:
oo.creationInfo().creationTime()
except ValueError:
print(
f"Creation time not found in oo {oo.publicID()}: Statistics may "
"be incomplete",
file=sys.stderr,
)
continue
if not minOriginCTime:
minOriginCTime = oo.creationInfo().creationTime()
elif oo.creationInfo().creationTime() < minOriginCTime:
minOriginCTime = oo.creationInfo().creationTime()
if not maxOriginCTime:
maxOriginCTime = oo.creationInfo().creationTime()
elif oo.creationInfo().creationTime() > maxOriginCTime:
maxOriginCTime = oo.creationInfo().creationTime()
print(
f"""
Picks
+ number: {cntPick}
+ first pick: {minPickTime}
+ last pick: {maxPickTime}""",
file=sys.stdout,
)
if cntPick > 0:
print(
f""" + interval: {(maxPickTime - minPickTime).toDouble():.3f} s""",
file=sys.stdout,
)
try:
print(
f""" + first created: {minPickCTime}
+ last created: {maxPickCTime}
+ interval: {(maxPickCTime - minPickCTime).toDouble():.3f} s""",
file=sys.stdout,
)
except TypeError:
print(
""" + first created: no creation information
+ last created: no creation information
+ interval: no creation information""",
file=sys.stdout,
)
pass
print(f" + found {len(authorPick)} pick author(s):", file=sys.stdout)
for i in authorPick:
print(f" + {i}", file=sys.stdout)
print(
f"""
Amplitudes
+ number: {cntAmp}""",
file=sys.stdout,
)
if cntAmp > 0:
print(
f""" + first amplitude: {minAmplitudeTime}
+ last amplitude: {maxAmplitudeTime}
+ interval: {(maxAmplitudeTime - minAmplitudeTime).toDouble():.3f} s""",
file=sys.stdout,
)
print(f" + found {len(authorAmplitude)} amplitude author(s):", file=sys.stdout)
for i in authorAmplitude:
print(f" + {i}", file=sys.stdout)
print(
f"""
Origins
+ number: {cntOrg}""",
file=sys.stdout,
)
if cntOrg > 0:
print(
f""" + first origin: {minOriginTime}
+ last origin: {maxOriginTime}
+ interval: {(maxOriginTime - minOriginTime).toDouble():.3f} s""",
file=sys.stdout,
)
try:
print(
f""" + first created: {minOriginCTime}
+ last created: {maxOriginCTime}
+ interval: {(maxOriginCTime - minOriginCTime).toDouble():.3f} s""",
file=sys.stdout,
)
except TypeError:
print(
""" + first created: no creation information
+ last created: no creation information
+ interval: no creation information""",
file=sys.stdout,
)
pass
print(f" + found {len(authorOrigin)} origin author(s):", file=sys.stdout)
for i in authorOrigin:
print(f" + {i}", file=sys.stdout)
# stream information
print(f"\nFound {len(detectionStreams)} SensorLocation.Channel:", file=sys.stdout)
for i in detectionStreams:
print(f" + {i}", file=sys.stdout)
print(f"\nFound {len(nslc)} streams:", file=sys.stdout)
for i in sorted(nslc):
print(f" + {i}", file=sys.stdout)
return True
class PickPlayback(seiscomp.client.Application):
def __init__(self, argc, argv):
super().__init__(argc, argv)
self.speed = 1.0
self.timing = "creationTime"
self.jump = 0.0
self.print = False
self.printList = False
self.group = "PICK"
self.ampGroup = "AMPLITUDE"
self.orgGroup = "LOCATION"
self.fileNames = None
self.mode = "historic"
self.authors = None
self.objects = None
self.setMessagingUsername("pbpick")
self.setMessagingEnabled(True)
self.setPrimaryMessagingGroup("PICK")
self.setDatabaseEnabled(False, False)
def createCommandLineDescription(self):
self.commandline().addGroup("Playback")
self.commandline().addStringOption(
"Playback",
"authors",
"Author of objects to filter before playing back. Objects from all other "
"authors are ignored. Separate multiple authors by comma.",
)
self.commandline().addDoubleOption(
"Playback", "jump,j", "Minutes to skip objects in the beginning."
)
self.commandline().addOption(
"Playback",
"list",
"Just list important pick information from the read XML file and then "
"exit without playing back. The sorting of the list depends on '--timing'."
"Information include: phase hint, pick time, stream ID, author, time to "
"previous pick, delay.",
)
self.commandline().addStringOption(
"Playback",
"mode",
"Playback mode: 'historic' or 'realTime'. "
"'realTime' mimics current situation. Default: 'historic'.",
)
self.commandline().addStringOption(
"Playback",
"object,o",
"Limit the playback to the given list of objects. Supported values are: \n"
"pick, amplitude, origin.",
)
self.commandline().addOption(
"Playback",
"print",
"Just print some statistics of the read XML file and then "
"exit without playing back. The list of stream codes (NSLC) is printed to "
"stdout. All other information is printed to stderr. The information can "
"be used for filtering waveforms (scart) or inventory (invextr), for "
"creating global bindings or applying author filtering, e.g., in "
"dump_picks.",
)
self.commandline().addDoubleOption(
"Playback", "speed", "Speed of playback.\n1: true speed."
)
self.commandline().addStringOption(
"Playback",
"timing",
"Timing reference: pickTime or creationTime. Default: creationTime. "
"'pickTime' plays back in order of actual times of objects, "
"'creationTime' considers their creation times instead. Use 'pickTime' if "
"creation times are not representative of the order of objects, e.g., when "
"created in playbacks. 'creationTime' should be considered for playing "
"back origins since their actual origin time values are always before "
"picks and amplitudes.",
)
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options] [XML file][:PICK:AMPLITUDE:LOCATION]
Play back pick, amplitude and origin objects from one or more XML files in SCML format
sending them to the SeisComP messaging in timely order. Default message groups:
* PICK for picks,
* AMPLITUDE for amplitudes.
* LOCATION for origins,"""
)
super().printUsage()
print(
f"""Examples:
Play back picks and other objects in file 'pick.xml' at true speed jumping the
first 2 minutes
{os.path.basename(__file__)} -j 2 picks.xml
Play back picks and other objects from 2 XML files sending the picks, amplitudes
and origins ordered by creation time to different message groups but amplitudes
to the same default group (AMPLITUDE).
{os.path.basename(__file__)} origins.xml l1origins.xml:L1PICK:AMPLITUDE:L1LOCATION
Just print statistics and stream information
{os.path.basename(__file__)} --print picks.xml
"""
)
def init(self):
if not super().init():
return False
return True
def validateParameters(self):
if not super().validateParameters():
return False
try:
self.authors = self.commandline().optionString("authors").split(",")
except RuntimeError:
pass
try:
self.mode = self.commandline().optionString("mode")
except RuntimeError:
pass
try:
self.objects = self.commandline().optionString("object")
except RuntimeError:
pass
if self.mode not in ("historic", "realTime"):
print(f"Unknown mode: {self.mode}", file=sys.stderr)
return False
try:
self.print = self.commandline().hasOption("print")
except RuntimeError:
pass
try:
self.printList = self.commandline().hasOption("list")
except RuntimeError:
pass
try:
self.speed = self.commandline().optionDouble("speed")
except RuntimeError:
pass
try:
self.timing = self.commandline().optionString("timing")
except RuntimeError:
pass
try:
self.jump = self.commandline().optionDouble("jump")
except RuntimeError:
pass
if self.timing not in ("pickTime", "creationTime"):
print(f"Unknown timing: {self.timing}", file=sys.stderr)
return False
try:
self.group = self.commandline().optionString("primary-group")
except RuntimeError:
pass
files = self.commandline().unrecognizedOptions()
if not files:
print("At least one XML file must be given!", file=sys.stderr)
return False
print(files, file=sys.stderr)
self.fileNames = list(files)
if self.print or self.printList:
self.setMessagingEnabled(False)
return True
def run(self):
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
objects = []
eps = []
minTime = None
maxTime = None
print("Input:", file=sys.stdout)
for fileName in self.fileNames:
group = self.group
ampGroup = self.ampGroup
orgGroup = self.orgGroup
toks = fileName.split(":")
if len(toks) == 2:
fileName = toks[0]
group = toks[1]
elif len(toks) == 3:
fileName = toks[0]
group = toks[1]
ampGroup = toks[2]
elif len(toks) == 4:
fileName = toks[0]
group = toks[1]
ampGroup = toks[2]
orgGroup = toks[3]
print(
f" + file: {fileName}",
file=sys.stdout,
)
ar = seiscomp.io.XMLArchive()
if not ar.open(fileName):
print(f"Could not open {fileName}", file=sys.stderr)
return False
obj = ar.readObject()
ar.close()
if obj is None:
print("Empty document", file=sys.stderr)
return False
ep = seiscomp.datamodel.EventParameters.Cast(obj)
if self.print:
printStatistics(ep)
if not self.printList:
return True
eps.append(ep)
if ep is None:
print(
f"Expected event parameters, got {obj.className()}", file=sys.stderr
)
return False
# read picks
cntPick = ep.pickCount()
if cntPick == 0:
print(f"No picks found in file {fileName}", file=sys.stderr)
if self.objects is not None and "pick" not in self.objects:
print(
f"Skipping picks. Supported objects: {self.objects}",
file=sys.stderr,
)
cntPick = 0
for i in range(cntPick):
pick = ep.pick(i)
if self.authors is not None:
try:
if (
pick.creationInfo().author() not in self.authors
and not self.printList
):
print(
f"Skipping pick {pick.publicID()}: "
f"{pick.creationInfo().author()} not in author list",
file=sys.stderr,
)
continue
except ValueError:
if not self.printList:
print(
f"Skipping pick {pick.publicID()}: "
f"author is not available",
file=sys.stderr,
)
continue
if self.timing == "creationTime":
try:
pick.creationInfo().creationTime()
except Exception:
if not self.printList:
print(
f"Skipping pick {pick.publicID()}: no creation time",
file=sys.stderr,
)
continue
# filter by time
if minTime and pick.time().value() < minTime:
continue
if maxTime and pick.time().value() >= maxTime:
continue
objects.append((pick, group))
# read amplitudes and add to objects
cntAmp = ep.amplitudeCount()
if cntAmp == 0:
print("No Amplitudes found", file=sys.stderr)
if self.objects is not None and "amplitude" not in self.objects:
print(
f"Skipping amplitudes. Supported objects: {self.objects}",
file=sys.stderr,
)
cntAmp = 0
for i in range(cntAmp):
amp = ep.amplitude(i)
if self.authors is not None:
try:
if (
amp.creationInfo().author() not in self.authors
and not self.printList
):
print(
f"Skipping amplitude {amp.publicID()}: "
f"{amp.creationInfo().author()} not in author list",
file=sys.stderr,
)
continue
except ValueError:
if not self.printList:
print(
f"Skipping amplitude {amp.publicID()}: "
f"author is not available",
file=sys.stderr,
)
continue
if self.timing == "creationTime":
try:
amp.creationInfo().creationTime()
except Exception:
print(
f"Skipping amplitude {amp.publicID()}: no creation time",
file=sys.stderr,
)
continue
objects.append((amp, ampGroup))
# read origins and add to objects
cntOrgs = ep.originCount()
if cntOrgs == 0:
print("No Origins found", file=sys.stderr)
if self.objects is not None and "origin" not in self.objects:
print(
f"Skipping origins. Supported objects: {self.objects}",
file=sys.stderr,
)
cntOrgs = 0
for i in range(cntOrgs):
oo = ep.origin(i)
if self.authors is not None:
try:
if (
oo.creationInfo().author() not in self.authors
and not self.printList
):
print(
f"Skipping origin {oo.publicID()}: "
f"{oo.creationInfo().author()} not in author list",
file=sys.stderr,
)
continue
except ValueError:
if not self.printList:
print(
f"Skipping origin {oo.publicID()}: "
f"author is not available",
file=sys.stderr,
)
continue
if self.timing == "creationTime":
try:
oo.creationInfo().creationTime()
except Exception:
try:
string = oo.publicID().split("/")[1].split(".")[:2]
timeString = string[0] + "." + string[1]
timeFormat = "%Y%m%d%H%M%S.%f"
t = seiscomp.core.Time()
t.fromString(str(timeString), timeFormat)
ci = seiscomp.datamodel.CreationInfo()
ci.setCreationTime(t)
oo.setCreationInfo(ci)
print(
f"creation time not found in origin {oo.publicID()}: "
f"assuming {oo.creationInfo().creationTime()} from "
"originID ",
file=sys.stderr,
)
except Exception:
if not self.printList:
print(
f"Skipping origin {oo.publicID()}: no creation time",
file=sys.stderr,
)
continue
objects.append((oo, orgGroup))
print(
f" + considering {cntPick} picks, {cntAmp} amplitudes, {cntOrgs} origins",
file=sys.stdout,
)
if self.print or self.printList:
print(" + do not send objects to messaging")
else:
print(
f""" + sending objects to groups
+ picks: {group}
+ amplitudes: {ampGroup}
+ origins: {orgGroup}""",
file=sys.stdout,
)
if self.timing == "pickTime":
try:
objects.sort(key=timing_pickTime)
except ValueError:
print("Time value not set in at least 1 object", file=sys.stderr)
if not self.printList:
return False
elif self.timing == "creationTime":
try:
objects.sort(key=timing_creationTime)
except ValueError:
print("Creation time not set in at least 1 object", file=sys.stderr)
if not self.printList:
return False
else:
print(f"Unknown timing: {self.timing}", file=sys.stderr)
return False
print("Setup:", file=sys.stdout)
print(f" + author filter: {self.authors}", file=sys.stdout)
print(f" + timing/sorting: {self.timing}", file=sys.stdout)
if self.printList:
listPicks(self, objects)
return True
seiscomp.datamodel.Notifier.Enable()
firstTime = None
lastTime = None
refTime = None
addSeconds = 0.0
sys.stdout.flush()
for obj, group in objects:
po = seiscomp.datamodel.Pick.Cast(obj)
ao = seiscomp.datamodel.Amplitude.Cast(obj)
oo = seiscomp.datamodel.Origin.Cast(obj)
if self.isExitRequested():
break
if self.timing == "pickTime":
if ao:
refTime = obj.timeWindow().reference()
elif po:
refTime = obj.time().value()
elif oo:
refTime = obj.time().value()
else:
print(
"Object neither pick nor amplitude or origin- ignoring",
file=sys.stderr,
)
return False
else:
refTime = obj.creationInfo().creationTime()
if not firstTime:
firstTime = refTime
print(f" + first time: {firstTime}", file=sys.stderr)
print(f" + playback mode: {self.mode}", file=sys.stderr)
print(f" + speed factor: {self.speed}", file=sys.stderr)
if self.mode == "realTime":
now = seiscomp.core.Time.GMT()
addSeconds = (now - firstTime).toDouble()
print(
f" + adding {addSeconds: .3f} s to: pick time, amplitude "
"reference time, origin time, creation time",
file=sys.stderr,
)
print("Playback progress:", file=sys.stderr)
objectType = "pick"
if ao:
objectType = "amplitude"
if oo:
objectType = "origin"
print(
f" + {obj.publicID()} {objectType}: {group} - reference time: {refTime}",
end="",
file=sys.stderr,
)
# add addSeconds to all times in real-time mode
if self.mode == "realTime":
objectInfo = obj.creationInfo()
creationTime = objectInfo.creationTime() + seiscomp.core.TimeSpan(
addSeconds
)
obj.creationInfo().setCreationTime(creationTime)
if ao:
objectInfo = obj.timeWindow()
amplitudeTime = objectInfo.reference() + seiscomp.core.TimeSpan(
addSeconds
)
obj.timeWindow().setReference(amplitudeTime)
print(
"\n + real-time mode - using modified reference time: "
f"{obj.timeWindow().reference()}, creation time: {creationTime}",
end="",
file=sys.stderr,
)
elif po or oo:
objectTime = obj.time()
objectTime.setValue(
objectTime.value() + seiscomp.core.TimeSpan(addSeconds)
)
obj.setTime(objectTime)
print(
f"\n + real-time mode - using modified {objectType} time: "
f"{obj.time().value()}, creation time: {creationTime}",
end="",
file=sys.stderr,
)
else:
print(
"\n + object not pick, amplitude or origin - ignoring",
file=sys.stderr,
)
return False
delay = 0
if lastTime:
delay = (refTime - lastTime).toDouble() / self.speed
if (refTime - firstTime).toDouble() / 60.0 >= self.jump:
delay = max(delay, 0)
print(f" - time to sending: {delay:.4f} s", file=sys.stderr)
time.sleep(delay)
lastTime = refTime
nc = seiscomp.datamodel.NotifierCreator(seiscomp.datamodel.OP_ADD)
obj.accept(nc)
msg = seiscomp.datamodel.Notifier.GetMessage()
self.connection().send(group, msg)
else:
print(" - skipping", file=sys.stderr)
sys.stdout.flush()
print("")
return True
def main(argv):
app = PickPlayback(len(argv), argv)
return app()
if __name__ == "__main__":
sys.exit(main(sys.argv))

BIN
bin/ql2sc Executable file

Binary file not shown.

BIN
bin/run_with_lock Executable file

Binary file not shown.

226
bin/sc2pa Executable file
View File

@ -0,0 +1,226 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import time
import sys
import os
import time
import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging
from seiscomp.scbulletin import Bulletin, stationCount
class ProcAlert(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.maxAgeDays = 1.0
self.minPickCount = 25
self.procAlertScript = ""
ep = seiscomp.datamodel.EventParameters()
def createCommandLineDescription(self):
try:
self.commandline().addGroup("Publishing")
self.commandline().addIntOption(
"Publishing",
"min-arr",
"Minimum arrival count of a published origin",
self.minPickCount,
)
self.commandline().addDoubleOption(
"Publishing",
"max-age",
"Maximum age in days of published origins",
self.maxAgeDays,
)
self.commandline().addStringOption(
"Publishing",
"procalert-script",
"Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2",
)
self.commandline().addOption(
"Publishing", "test", "Test mode, no messages are sent"
)
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
def initConfiguration(self):
if not seiscomp.client.Application.initConfiguration(self):
return False
try:
self.procAlertScript = self.configGetString("scripts.procAlert")
except:
pass
try:
self.minPickCount = self.configGetInt("minArrivals")
except:
pass
try:
self.maxAgeDays = self.configGetDouble("maxAgeDays")
except:
pass
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self.procAlertScript = self.commandline().optionString("procalert-script")
except:
pass
try:
self.minPickCount = self.commandline().optionInt("min-arr")
except:
pass
try:
self.maxAgeDays = self.commandline().optionDouble("max-age")
except:
pass
self.bulletin = Bulletin(self.query(), "autoloc1")
self.cache = seiscomp.datamodel.PublicObjectRingBuffer(self.query(), 100)
if not self.procAlertScript:
seiscomp.logging.warning("No procalert script given")
else:
seiscomp.logging.info(f"Using procalert script: {self.procAlertScript}")
return True
def addObject(self, parentID, obj):
org = seiscomp.datamodel.Origin.Cast(obj)
if org:
self.cache.feed(org)
seiscomp.logging.info(f"Received origin {org.publicID()}")
return
self.updateObject(parentID, obj)
def updateObject(self, parentID, obj):
try:
evt = seiscomp.datamodel.Event.Cast(obj)
if evt:
orid = evt.preferredOriginID()
org = self.cache.get(seiscomp.datamodel.Origin, orid)
if not org:
seiscomp.logging.error(f"Unable to fetch origin {orid}")
return
if org.arrivalCount() == 0:
self.query().loadArrivals(org)
if org.stationMagnitudeCount() == 0:
self.query().loadStationMagnitudes(org)
if org.magnitudeCount() == 0:
self.query().loadMagnitudes(org)
if not self.originMeetsCriteria(org, evt):
seiscomp.logging.warning(f"Origin {orid} not published")
return
txt = self.bulletin.printEvent(evt)
for line in txt.split("\n"):
line = line.rstrip()
seiscomp.logging.info(line)
seiscomp.logging.info("")
if not self.commandline().hasOption("test"):
self.send_procalert(txt, evt.publicID())
return
except:
sys.stderr.write(f"{sys.exc_info()}\n")
def hasValidNetworkMagnitude(self, org, evt):
nmag = org.magnitudeCount()
for imag in range(nmag):
mag = org.magnitude(imag)
if mag.publicID() == evt.preferredMagnitudeID():
return True
return False
def send_procalert(self, txt, evid):
if self.procAlertScript:
tmp = f"/tmp/yyy{evid.replace('/', '_').replace(':', '-')}"
f = file(tmp, "w")
f.write(f"{txt}")
f.close()
os.system(self.procAlertScript + " " + tmp + " " + evid)
def coordinates(self, org):
return org.latitude().value(), org.longitude().value(), org.depth().value()
def originMeetsCriteria(self, org, evt):
publish = True
lat, lon, dep = self.coordinates(org)
if 43 < lat < 70 and -10 < lon < 60 and dep > 200:
seiscomp.logging.error("suspicious region/depth - ignored")
publish = False
if stationCount(org) < self.minPickCount:
seiscomp.logging.error("too few picks - ignored")
publish = False
now = seiscomp.core.Time.GMT()
if (now - org.time().value()).seconds() / 86400.0 > self.maxAgeDays:
seiscomp.logging.error("origin too old - ignored")
publish = False
try:
if org.evaluationMode() == seiscomp.datamodel.MANUAL:
publish = True
except:
pass
try:
if org.evaluationStatus() == seiscomp.datamodel.CONFIRMED:
publish = True
except:
pass
if not self.hasValidNetworkMagnitude(org, evt):
seiscomp.logging.error("no network magnitude - ignored")
publish = False
return publish
app = ProcAlert(len(sys.argv), sys.argv)
sys.exit(app())

1
bin/sc32inv Symbolic link
View File

@ -0,0 +1 @@
scml2inv

843
bin/scalert Executable file
View File

@ -0,0 +1,843 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import os
import sys
import re
import subprocess
import traceback
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
import seiscomp.math
import seiscomp.logging
import seiscomp.seismology
import seiscomp.system
class ObjectAlert(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setLoadRegionsEnabled(True)
self.setMessagingUsername("")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
self.setLoadCitiesEnabled(True)
self.setLoadRegionsEnabled(True)
self._ampType = "snr"
self._citiesMaxDist = 20
self._citiesMinPopulation = 50000
self._eventDescriptionPattern = None
self._pickScript = None
self._ampScript = None
self._alertScript = None
self._eventScript = None
self._pickProc = None
self._ampProc = None
self._alertProc = None
self._eventProc = None
self._newWhenFirstSeen = False
self._oldEvents = []
self._agencyIDs = []
self._authors = []
self._phaseHints = []
self._phaseStreams = []
self._phaseNumber = 1
self._phaseInterval = 1
self._cache = None
self._pickCache = seiscomp.datamodel.PublicObjectTimeSpanBuffer()
def createCommandLineDescription(self):
self.commandline().addOption(
"Generic",
"first-new",
"calls an event a new event when it is seen the first time",
)
self.commandline().addGroup("Alert")
self.commandline().addStringOption(
"Alert", "amp-type", "amplitude type to listen to", self._ampType
)
self.commandline().addStringOption(
"Alert",
"pick-script",
"script to be called when a pick arrived, network-, station code pick "
"publicID are passed as parameters $1, $2, $3 and $4",
)
self.commandline().addStringOption(
"Alert",
"amp-script",
"script to be called when a station amplitude arrived, network-, station "
"code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4",
)
self.commandline().addStringOption(
"Alert",
"alert-script",
"script to be called when a preliminary origin arrived, latitude and "
"longitude are passed as parameters $1 and $2",
)
self.commandline().addStringOption(
"Alert",
"event-script",
"script to be called when an event has been declared; the message string, a "
"flag (1=new event, 0=update event), the EventID, the arrival count and the "
"magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5",
)
self.commandline().addGroup("Cities")
self.commandline().addStringOption(
"Cities",
"max-dist",
"maximum distance for using the distance from a city to the earthquake",
)
self.commandline().addStringOption(
"Cities",
"min-population",
"minimum population for a city to become a point of interest",
)
self.commandline().addGroup("Debug")
self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
foundScript = False
# module configuration paramters
try:
self._newWhenFirstSeen = self.configGetBool("firstNew")
except RuntimeError:
pass
try:
self._agencyIDs = [self.configGetString("agencyID")]
except RuntimeError:
pass
try:
agencyIDs = self.configGetStrings("agencyIDs")
self._agencyIDs = []
for item in agencyIDs:
item = item.strip()
if item and item not in self._agencyIDs:
self._agencyIDs.append(item)
except RuntimeError:
pass
try:
authors = self.configGetStrings("authors")
self._authors = []
for item in authors:
item = item.strip()
if item not in self._authors:
self._authors.append(item)
except RuntimeError:
pass
self._phaseHints = ["P", "S"]
try:
phaseHints = self.configGetStrings("constraints.phaseHints")
self._phaseHints = []
for item in phaseHints:
item = item.strip()
if item not in self._phaseHints:
self._phaseHints.append(item)
except RuntimeError:
pass
self._phaseStreams = []
try:
phaseStreams = self.configGetStrings("constraints.phaseStreams")
for item in phaseStreams:
rule = item.strip()
# rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed
if not re.fullmatch(r"[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+", rule):
seiscomp.logging.error(
f"Wrong stream ID format in `constraints.phaseStreams`: {item}"
)
return False
# convert rule to a valid regular expression
rule = re.sub(r"\.", r"\.", rule)
rule = re.sub(r"\?", ".", rule)
rule = re.sub(r"\*", ".*", rule)
if rule not in self._phaseStreams:
self._phaseStreams.append(rule)
except RuntimeError:
pass
try:
self._phaseNumber = self.configGetInt("constraints.phaseNumber")
except RuntimeError:
pass
try:
self._phaseInterval = self.configGetInt("constraints.phaseInterval")
except RuntimeError:
pass
if self._phaseNumber > 1:
self._pickCache.setTimeSpan(seiscomp.core.TimeSpan(self._phaseInterval))
self.enableTimer(1)
try:
self._eventDescriptionPattern = self.configGetString("poi.message")
except RuntimeError:
pass
try:
self._citiesMaxDist = self.configGetDouble("poi.maxDist")
except RuntimeError:
pass
try:
self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
except RuntimeError:
pass
# mostly command-line options
try:
self._citiesMaxDist = self.commandline().optionDouble("max-dist")
except RuntimeError:
pass
try:
if self.commandline().hasOption("first-new"):
self._newWhenFirstSeen = True
except RuntimeError:
pass
try:
self._citiesMinPopulation = self.commandline().optionInt("min-population")
except RuntimeError:
pass
try:
self._ampType = self.commandline().optionString("amp-type")
except RuntimeError:
pass
try:
self._pickScript = self.commandline().optionString("pick-script")
except RuntimeError:
try:
self._pickScript = self.configGetString("scripts.pick")
except RuntimeError:
seiscomp.logging.warning("No pick script defined")
if self._pickScript:
self._pickScript = seiscomp.system.Environment.Instance().absolutePath(
self._pickScript
)
seiscomp.logging.info(f"Using pick script {self._pickScript}")
if not os.path.isfile(self._pickScript):
seiscomp.logging.error(" + not exising")
return False
if not os.access(self._pickScript, os.X_OK):
seiscomp.logging.error(" + not executable")
return False
foundScript = True
try:
self._ampScript = self.commandline().optionString("amp-script")
except RuntimeError:
try:
self._ampScript = self.configGetString("scripts.amplitude")
except RuntimeError:
seiscomp.logging.warning("No amplitude script defined")
if self._ampScript:
self._ampScript = seiscomp.system.Environment.Instance().absolutePath(
self._ampScript
)
seiscomp.logging.info(f"Using amplitude script {self._ampScript}")
if not os.path.isfile(self._ampScript):
seiscomp.logging.error(" + not exising")
return False
if not os.access(self._ampScript, os.X_OK):
seiscomp.logging.error(" + not executable")
return False
foundScript = True
try:
self._alertScript = self.commandline().optionString("alert-script")
except RuntimeError:
try:
self._alertScript = self.configGetString("scripts.alert")
except RuntimeError:
seiscomp.logging.warning("No alert script defined")
if self._alertScript:
self._alertScript = seiscomp.system.Environment.Instance().absolutePath(
self._alertScript
)
seiscomp.logging.info(f"Using alert script {self._alertScript}")
if not os.path.isfile(self._alertScript):
seiscomp.logging.error(" + not exising")
return False
if not os.access(self._alertScript, os.X_OK):
seiscomp.logging.error(" + not executable")
return False
foundScript = True
try:
self._eventScript = self.commandline().optionString("event-script")
except RuntimeError:
try:
self._eventScript = self.configGetString("scripts.event")
except RuntimeError:
seiscomp.logging.warning("No event script defined")
if self._eventScript:
self._eventScript = seiscomp.system.Environment.Instance().absolutePath(
self._eventScript
)
seiscomp.logging.info(f"Using event script {self._eventScript}")
if not os.path.isfile(self._eventScript):
seiscomp.logging.error(" + not exising")
return False
if not os.access(self._eventScript, os.X_OK):
seiscomp.logging.error(" + not executable")
return False
foundScript = True
if not foundScript:
seiscomp.logging.error("Found no valid script in configuration")
return False
seiscomp.logging.info("Creating ringbuffer for 100 objects")
if not self.query():
seiscomp.logging.warning("No valid database interface to read from")
self._cache = seiscomp.datamodel.PublicObjectRingBuffer(self.query(), 100)
if self._ampScript and self.connection():
seiscomp.logging.info(
"Amplitude script defined: subscribing to AMPLITUDE message group"
)
self.connection().subscribe("AMPLITUDE")
if self._pickScript and self.connection():
seiscomp.logging.info(
"Pick script defined: subscribing to PICK message group"
)
self.connection().subscribe("PICK")
if self._newWhenFirstSeen:
seiscomp.logging.info(
"A new event is declared when I see it the first time"
)
seiscomp.logging.info("Filtering:")
if self._agencyIDs:
agencies = " ".join(self._agencyIDs)
seiscomp.logging.info(
f" + agencyIDs filter for events and picks: {agencies}"
)
else:
seiscomp.logging.info(" + agencyIDs: no filter is applied")
if " ".join(self._authors):
authors = " ".join(self._authors)
seiscomp.logging.info(f" + Authors filter for events and picks: {authors}")
else:
seiscomp.logging.info(" + authors: no filter is applied")
if " ".join(self._phaseHints):
phaseHints = " ".join(self._phaseHints)
seiscomp.logging.info(f" + phase hint filter for picks: '{phaseHints}'")
else:
seiscomp.logging.info(" + phase hints: no filter is applied")
if " ".join(self._phaseStreams):
streams = " ".join(self._phaseStreams)
seiscomp.logging.info(f" + phase stream ID filter for picks: '{streams}'")
else:
seiscomp.logging.info(" + phase stream ID: no filter is applied")
return True
def run(self):
try:
try:
eventID = self.commandline().optionString("eventid")
event = self._cache.get(seiscomp.datamodel.Event, eventID)
if event:
self.notifyEvent(event)
except RuntimeError:
pass
return seiscomp.client.Application.run(self)
except Exception:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
return False
def done(self):
self._cache = None
seiscomp.client.Application.done(self)
def runPickScript(self, pickObjectList):
if not self._pickScript:
return
for pickObject in pickObjectList:
# parse values
try:
net = pickObject.waveformID().networkCode()
except Exception:
net = "unknown"
try:
sta = pickObject.waveformID().stationCode()
except Exception:
sta = "unknown"
pickID = pickObject.publicID()
try:
phaseHint = pickObject.phaseHint().code()
except Exception:
phaseHint = "unknown"
print(net, sta, pickID, phaseHint, file=sys.stderr)
if self._pickProc is not None:
if self._pickProc.poll() is None:
seiscomp.logging.info(
"Pick script still in progress -> wait one second"
)
self._pickProc.wait(1)
if self._pickProc.poll() is None:
seiscomp.logging.warning(
"Pick script still in progress -> skipping message"
)
return
try:
self._pickProc = subprocess.Popen(
[self._pickScript, net, sta, pickID, phaseHint]
)
seiscomp.logging.info(
f"Started pick script with pid {self._pickProc.pid}"
)
except Exception:
seiscomp.logging.error(
f"Failed to start pick script '{self._pickScript}'"
)
def runAmpScript(self, ampObject):
if not self._ampScript:
return
# parse values
net = ampObject.waveformID().networkCode()
sta = ampObject.waveformID().stationCode()
amp = ampObject.amplitude().value()
ampID = ampObject.publicID()
if self._ampProc is not None:
if self._ampProc.poll() is None:
seiscomp.logging.warning(
"Amplitude script still in progress -> skipping message"
)
return
try:
self._ampProc = subprocess.Popen(
[self._ampScript, net, sta, f"{amp:.2f}", ampID]
)
seiscomp.logging.info(
f"Started amplitude script with pid {self._ampProc.pid}"
)
except Exception:
seiscomp.logging.error(
f"Failed to start amplitude script '{self._ampScript}'"
)
def runAlert(self, lat, lon):
if not self._alertScript:
return
if self._alertProc is not None:
if self._alertProc.poll() is None:
seiscomp.logging.warning(
"AlertScript still in progress -> skipping message"
)
return
try:
self._alertProc = subprocess.Popen(
[self._alertScript, f"{lat:.1f}", f"{lon:.1f}"]
)
seiscomp.logging.info(
f"Started alert script with pid {self._alertProc.pid}"
)
except Exception:
seiscomp.logging.error(
f"Failed to start alert script '{self._alertScript}'"
)
def handleMessage(self, msg):
try:
dm = seiscomp.core.DataMessage.Cast(msg)
if dm:
for att in dm:
org = seiscomp.datamodel.Origin.Cast(att)
if org:
try:
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
self.runAlert(
org.latitude().value(), org.longitude().value()
)
except Exception:
pass
# ao = seiscomp.datamodel.ArtificialOriginMessage.Cast(msg)
# if ao:
# org = ao.origin()
# if org:
# self.runAlert(org.latitude().value(), org.longitude().value())
# return
seiscomp.client.Application.handleMessage(self, msg)
except Exception:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def addObject(self, parentID, scObject):
try:
# pick
obj = seiscomp.datamodel.Pick.Cast(scObject)
if obj:
self._cache.feed(obj)
seiscomp.logging.debug(f"got new pick '{obj.publicID()}'")
agencyID = obj.creationInfo().agencyID()
author = obj.creationInfo().author()
phaseHint = obj.phaseHint().code()
if self._phaseStreams:
waveformID = "%s.%s.%s.%s" % (
obj.waveformID().networkCode(),
obj.waveformID().stationCode(),
obj.waveformID().locationCode(),
obj.waveformID().channelCode(),
)
matched = False
for rule in self._phaseStreams:
if re.fullmatch(rule, waveformID):
matched = True
break
if not matched:
seiscomp.logging.debug(
f" + stream ID {waveformID} does not match constraints.phaseStreams rules"
)
return
if not self._agencyIDs or agencyID in self._agencyIDs:
if not self._phaseHints or phaseHint in self._phaseHints:
self.notifyPick(obj)
else:
seiscomp.logging.debug(
f" + phase hint {phaseHint} does not match '{self._phaseHints}'"
)
else:
seiscomp.logging.debug(
f" + agencyID {agencyID} does not match '{self._agencyIDs}'"
)
return
# amplitude
obj = seiscomp.datamodel.Amplitude.Cast(scObject)
if obj:
if obj.type() == self._ampType:
seiscomp.logging.debug(
f"got new {self._ampType} amplitude '{obj.publicID()}'"
)
self.notifyAmplitude(obj)
return
# origin
obj = seiscomp.datamodel.Origin.Cast(scObject)
if obj:
self._cache.feed(obj)
seiscomp.logging.debug(f"got new origin '{obj.publicID()}'")
try:
if obj.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
self.runAlert(obj.latitude().value(), obj.longitude().value())
except Exception:
pass
return
# magnitude
obj = seiscomp.datamodel.Magnitude.Cast(scObject)
if obj:
self._cache.feed(obj)
seiscomp.logging.debug(f"got new magnitude '{obj.publicID()}'")
return
# event
obj = seiscomp.datamodel.Event.Cast(scObject)
if obj:
org = self._cache.get(
seiscomp.datamodel.Origin, obj.preferredOriginID()
)
agencyID = org.creationInfo().agencyID()
author = org.creationInfo().author()
seiscomp.logging.debug(f"got new event '{obj.publicID()}'")
if not self._agencyIDs or agencyID in self._agencyIDs:
if not self._authors or author in self._authors:
self.notifyEvent(obj, True)
return
except Exception:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def updateObject(self, parentID, scObject):
try:
obj = seiscomp.datamodel.Event.Cast(scObject)
if obj:
org = self._cache.get(
seiscomp.datamodel.Origin, obj.preferredOriginID()
)
agencyID = org.creationInfo().agencyID()
author = org.creationInfo().author()
seiscomp.logging.debug(f"update event '{obj.publicID()}'")
if not self._agencyIDs or agencyID in self._agencyIDs:
if not self._authors or author in self._authors:
self.notifyEvent(obj, False)
except Exception:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def handleTimeout(self):
self.checkEnoughPicks()
def checkEnoughPicks(self):
if self._pickCache.size() >= self._phaseNumber:
# wait until self._phaseInterval has elapsed before calling the
# script (more picks might come)
timeWindowLength = (
seiscomp.core.Time.GMT() - self._pickCache.oldest()
).length()
if timeWindowLength >= self._phaseInterval:
picks = [seiscomp.datamodel.Pick.Cast(o) for o in self._pickCache]
self.runPickScript(picks)
self._pickCache.clear()
def notifyPick(self, pick):
if self._phaseNumber <= 1:
self.runPickScript([pick])
else:
self.checkEnoughPicks()
self._pickCache.feed(pick)
def notifyAmplitude(self, amp):
self.runAmpScript(amp)
def notifyEvent(self, evt, newEvent=True, dtmax=3600):
try:
org = self._cache.get(seiscomp.datamodel.Origin, evt.preferredOriginID())
if not org:
seiscomp.logging.warning(
f"unable to get origin {evt.preferredOriginID()}, ignoring event message"
)
return
preliminary = False
try:
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
preliminary = True
except Exception:
pass
if preliminary is False:
nmag = self._cache.get(
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()
)
if nmag:
mag = nmag.magnitude().value()
mag = f"magnitude {mag:.1f}"
else:
if len(evt.preferredMagnitudeID()) > 0:
seiscomp.logging.warning(
f"unable to get magnitude {evt.preferredMagnitudeID()}, "
"ignoring event message"
)
else:
seiscomp.logging.warning(
"no preferred magnitude yet, ignoring event message"
)
return
# keep track of old events
if self._newWhenFirstSeen:
if evt.publicID() in self._oldEvents:
newEvent = False
else:
newEvent = True
self._oldEvents.append(evt.publicID())
dsc = seiscomp.seismology.Regions.getRegionName(
org.latitude().value(), org.longitude().value()
)
if self._eventDescriptionPattern:
try:
city, dist, azi = self.nearestCity(
org.latitude().value(),
org.longitude().value(),
self._citiesMaxDist,
self._citiesMinPopulation,
)
if city:
dsc = self._eventDescriptionPattern
region = seiscomp.seismology.Regions.getRegionName(
org.latitude().value(), org.longitude().value()
)
distStr = str(int(seiscomp.math.deg2km(dist)))
dsc = (
dsc.replace("@region@", region)
.replace("@dist@", distStr)
.replace("@poi@", city.name())
)
except Exception:
pass
seiscomp.logging.debug(f"desc: {dsc}")
dep = org.depth().value()
now = seiscomp.core.Time.GMT()
otm = org.time().value()
dt = (now - otm).seconds()
# if dt > dtmax:
# return
if dt > 3600:
dt = f"{int(dt / 3600)} hours {int((dt % 3600) / 60)} minutes ago"
elif dt > 120:
dt = f"{int(dt / 60)} minutes ago"
else:
dt = f"{int(dt)} seconds ago"
if preliminary:
message = f"earthquake, XXL, preliminary, {dt}, {dsc}"
else:
message = "earthquake, %s, %s, %s, depth %d kilometers" % (
dt,
dsc,
mag,
int(dep + 0.5),
)
seiscomp.logging.info(message)
if not self._eventScript:
return
if self._eventProc is not None:
if self._eventProc.poll() is None:
seiscomp.logging.warning(
"EventScript still in progress -> skipping message"
)
return
try:
param2 = 0
param3 = 0
param4 = ""
if newEvent:
param2 = 1
org = self._cache.get(
seiscomp.datamodel.Origin, evt.preferredOriginID()
)
if org:
try:
param3 = org.quality().associatedPhaseCount()
except Exception:
pass
nmag = self._cache.get(
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()
)
if nmag:
param4 = f"{nmag.magnitude().value():.1f}"
self._eventProc = subprocess.Popen(
[
self._eventScript,
message,
"%d" % param2,
evt.publicID(),
"%d" % param3,
param4,
]
)
seiscomp.logging.info(
f"Started event script with pid {self._eventProc.pid}"
)
except Exception:
seiscomp.logging.error(
f"Failed to start event script '{self._eventScript} {message} "
f"{param2} {param3} {param4}'"
)
except Exception:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def printUsage(self):
print(
"""Usage:
scalert [options]
Execute custom scripts upon arrival of objects or updates"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Execute scalert on command line with debug output
scalert --debug
"""
)
app = ObjectAlert(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/scamp Executable file

Binary file not shown.

BIN
bin/scanloc Executable file

Binary file not shown.

BIN
bin/scardac Executable file

Binary file not shown.

1669
bin/scart Executable file

File diff suppressed because it is too large Load Diff

BIN
bin/scautoloc Executable file

Binary file not shown.

BIN
bin/scautopick Executable file

Binary file not shown.

19
bin/scbulletin Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import seiscomp.scbulletin
if __name__ == "__main__":
seiscomp.scbulletin.main()

BIN
bin/scchkcfg Executable file

Binary file not shown.

BIN
bin/sccnv Executable file

Binary file not shown.

BIN
bin/scconfig Executable file

Binary file not shown.

BIN
bin/scdb Executable file

Binary file not shown.

1320
bin/scdbstrip Executable file

File diff suppressed because it is too large Load Diff

BIN
bin/scdispatch Executable file

Binary file not shown.

292
bin/scdumpcfg Executable file
View File

@ -0,0 +1,292 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import os
import seiscomp.client
import seiscomp.datamodel
import seiscomp.config
import seiscomp.system
def readParams(sc_params):
if sc_params.baseID():
sc_params_base = seiscomp.datamodel.ParameterSet.Find(sc_params.baseID())
if sc_params_base is None:
print(
f"Warning: {sc_params.baseID()}: base parameter set for "
f"{sc_params.publicID()} not found",
file=sys.stderr,
)
params = {}
else:
params = readParams(sc_params_base)
else:
params = {}
for i in range(sc_params.parameterCount()):
p = sc_params.parameter(i)
params[p.name()] = p.value()
return params
class DumpCfg(seiscomp.client.Application):
def __init__(self, argc, argv):
if argc < 2:
print("scdumpcfg {modname} [options]", file=sys.stderr)
raise RuntimeError
self.appName = argv[1]
self.config = seiscomp.config.Config()
# Remove first parameter to replace appname with passed module name
# argc = argc - 1
# argv = argv[1:]
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setMessagingUsername("")
self.setDatabaseEnabled(True, True)
self.setLoadConfigModuleEnabled(True)
self.setDaemonEnabled(False)
self.dumpBindings = False
self.allowGlobal = False
self.formatCfg = False
self.nslc = False
self.param = None
def createCommandLineDescription(self):
self.commandline().addGroup("Dump")
self.commandline().addOption(
"Dump", "bindings,B", "Dump bindings instead of module configuration."
)
self.commandline().addOption(
"Dump",
"allow-global,G",
"Print global bindings if no module binding is avaible.",
)
self.commandline().addStringOption(
"Dump",
"param,P",
"Specify the parameter name(s) to filter for. Use comma sepration for "
"multiple parameters.",
)
self.commandline().addOption(
"Dump", "cfg", "Print output in .cfg format. Does not work along with -B."
)
self.commandline().addOption(
"Dump",
"nslc",
"Print the list of channels which have bindings of the given module. "
"Requires to set -B. Can be used by other modules, e.g., invextr, scart, "
"scmssort, scevtstreams.",
)
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
self.dumpBindings = self.commandline().hasOption("bindings")
try:
param = self.commandline().optionString("param")
self.param = param.split(",")
except RuntimeError:
pass
self.allowGlobal = self.commandline().hasOption("allow-global")
self.formatCfg = self.commandline().hasOption("cfg")
self.nslc = self.commandline().hasOption("nslc")
if self.dumpBindings and self.databaseURI() != "":
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, False)
if not self.dumpBindings:
self.setMessagingEnabled(False)
self.setDatabaseEnabled(False, False)
self.setLoadConfigModuleEnabled(False)
return True
def initConfiguration(self):
if self.appName in ("-h", "--help"):
self.printUsage()
return False
if not seiscomp.client.Application.initConfiguration(self):
return False
seiscomp.system.Environment.Instance().initConfig(self.config, self.appName)
return True
def initSubscriptions(self):
# Do nothing.
return True
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]
Dump bindings or module configurations used by a specific module or global for \
particular stations.""",
file=sys.stderr,
)
seiscomp.client.Application.printUsage(self)
print(
f"""Examples:
Dump scautopick bindings configuration including global for all stations
{os.path.basename(__file__)} scautopick -d localhost -BG
Connect to messaging for the database connection and dump scautopick bindings \
configuration including global for all stations
{os.path.basename(__file__)} scautopick -H localhost -BG
Dump scautopick module configuration including global parameters
{os.path.basename(__file__)} scautopick --cfg
Dump global bindings configuration considerd by scmv
{os.path.basename(__file__)} scmv -d localhost -BG
Dump the list of streams configured with scautopick bindings
{os.path.basename(__file__)} scautopick -d localhost -B --nslc
Dump specific parameters configured with scautopick bindings
{os.path.basename(__file__)} scautopick -B -d localhost \
-P spicker.AIC.minSNR,spicker.AIC.minCnt
""",
file=sys.stderr,
)
def run(self):
cfg = self.config
if self.nslc:
nslc = set()
if not self.dumpBindings:
symtab = cfg.symbolTable()
names = cfg.names()
count = 0
for name in names:
if self.param and name not in self.param:
continue
sym = symtab.get(name)
if self.formatCfg:
if sym.comment:
if count > 0:
print("")
print(f"{sym.comment}")
print(f"{cfg.escapeIdentifier(sym.name)} = {sym.content}")
else:
print(f"{sym.name}")
print(f" value(s) : {', '.join(sym.values)}")
print(f" source : {sym.uri}")
count = count + 1
if self.param and count == 0:
print(f"{self.param}: definition not found", file=sys.stderr)
else:
cfg = self.configModule()
if cfg is None:
print("No config module read", file=sys.stderr)
return False
tmp = {}
for i in range(cfg.configStationCount()):
cfg_sta = cfg.configStation(i)
tmp[(cfg_sta.networkCode(), cfg_sta.stationCode())] = cfg_sta
name = self.appName
# For backward compatibility rename global to default
if name == "global":
name = "default"
for item in sorted(tmp.keys()):
cfg_sta = tmp[item]
sta_enabled = cfg_sta.enabled()
cfg_setup = seiscomp.datamodel.findSetup(
cfg_sta, name, self.allowGlobal
)
if not cfg_setup is None:
suffix = ""
if sta_enabled and cfg_setup.enabled():
out = "+ "
else:
suffix = " ("
if not sta_enabled:
suffix += "station disabled"
if not cfg_setup.enabled():
if suffix:
suffix += ", "
suffix += "setup disabled"
suffix += ")"
out = "- "
out += f"{cfg_sta.networkCode()}.{cfg_sta.stationCode()}{suffix}\n"
params = seiscomp.datamodel.ParameterSet.Find(
cfg_setup.parameterSetID()
)
if params is None:
print(
f"ERROR: {cfg_setup.parameterSetID()}: ParameterSet not found",
file=sys.stderr,
)
return False
params = readParams(params)
if self.nslc:
try:
sensorLocation = params["detecLocid"]
except KeyError:
sensorLocation = ""
try:
detecStream = params["detecStream"]
except KeyError:
detecStream = ""
stream = f"{cfg_sta.networkCode()}.{cfg_sta.stationCode()}.{sensorLocation}.{detecStream}"
nslc.add(stream)
count = 0
for param_name in sorted(params.keys()):
if self.param and param_name not in self.param:
continue
out += f" {param_name}: {params[param_name]}\n"
count = count + 1
if not self.nslc and count > 0:
print(out)
if self.nslc:
for stream in sorted(nslc):
print(stream, file=sys.stdout)
return True
try:
app = DumpCfg(len(sys.argv), sys.argv)
except Exception:
sys.exit(1)
sys.exit(app())

84
bin/scdumpobject Executable file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import seiscomp.client, seiscomp.datamodel, seiscomp.io
class ObjectDumper(seiscomp.client.Application):
def __init__(self):
seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, False)
self.setMessagingUsername("")
def createCommandLineDescription(self):
seiscomp.client.Application.createCommandLineDescription(self)
self.commandline().addGroup("Dump")
self.commandline().addStringOption("Dump", "public-id,P", "publicID")
def loadEventParametersObject(self, publicID):
for tp in (
seiscomp.datamodel.Pick,
seiscomp.datamodel.Amplitude,
seiscomp.datamodel.Origin,
seiscomp.datamodel.Event,
seiscomp.datamodel.FocalMechanism,
seiscomp.datamodel.Magnitude,
seiscomp.datamodel.StationMagnitude,
):
obj = self.query().loadObject(tp.TypeInfo(), publicID)
obj = tp.Cast(obj)
if obj:
ep = seiscomp.datamodel.EventParameters()
ep.add(obj)
return ep
def loadInventoryObject(self, publicID):
for tp in (
seiscomp.datamodel.Network,
seiscomp.datamodel.Station,
seiscomp.datamodel.Sensor,
seiscomp.datamodel.SensorLocation,
seiscomp.datamodel.Stream,
):
obj = self.query().loadObject(tp.TypeInfo(), publicID)
obj = tp.Cast(obj)
if obj:
return obj
def run(self):
publicID = self.commandline().optionString("public-id")
obj = self.loadEventParametersObject(publicID)
if obj is None:
obj = self.loadInventoryObject(publicID)
if obj is None:
raise ValueError("unknown object '" + publicID + "'")
# dump formatted XML archive to stdout
ar = seiscomp.io.XMLArchive()
ar.setFormattedOutput(True)
ar.create("-")
ar.writeObject(obj)
ar.close()
return True
if __name__ == "__main__":
app = ObjectDumper()
app()

79
bin/sceplog Executable file
View File

@ -0,0 +1,79 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import os
import seiscomp.client
import seiscomp.datamodel
import seiscomp.io
class EventParameterLog(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(False, False)
self.setMessagingUsername("")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.addMessagingSubscription("AMPLITUDE")
self.addMessagingSubscription("PICK")
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
# EventParameter object
self._eventParameters = seiscomp.datamodel.EventParameters()
def printUsage(self):
print(
"""Usage:
sceplog [options]
Receive event parameters from messaging and write them to stdout in SCML"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Execute sceplog with debug output
sceplog --debug
"""
)
def run(self):
if not seiscomp.client.Application.run(self):
return False
ar = seiscomp.io.XMLArchive()
ar.setFormattedOutput(True)
if ar.create("-"):
ar.writeObject(self._eventParameters)
ar.close()
# Hack to avoid the "close failed in file object destructor"
# exception
# print ""
sys.stdout.write("\n")
return True
app = EventParameterLog(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/scesv Executable file

Binary file not shown.

BIN
bin/scevent Executable file

Binary file not shown.

924
bin/scevtlog Executable file
View File

@ -0,0 +1,924 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import os
import traceback
import re
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
import seiscomp.io
import seiscomp.logging
import seiscomp.system
def time2str(time):
"""
Convert a seiscomp.core.Time to a string
"""
return time.toString("%Y-%m-%d %H:%M:%S.%f000000")[:23]
def createDirectory(dir):
if os.access(dir, os.W_OK):
return True
try:
os.makedirs(dir)
return True
except:
return False
def originStatusToChar(org):
# Manual origin are always tagged as M
try:
if org.evaluationMode() == seiscomp.datamodel.MANUAL:
return "M"
except:
pass
try:
if org.evaluationStatus() == seiscomp.datamodel.PRELIMINARY:
return "P"
elif (
org.evaluationStatus() == seiscomp.datamodel.CONFIRMED
or org.evaluationStatus() == seiscomp.datamodel.REVIEWED
or org.evaluationStatus() == seiscomp.datamodel.FINAL
):
return "C"
elif org.evaluationStatus() == seiscomp.datamodel.REJECTED:
return "X"
elif org.evaluationStatus() == seiscomp.datamodel.REPORTED:
return "R"
except:
pass
return "A"
class CachePopCallback(seiscomp.datamodel.CachePopCallback):
def __init__(self, target):
seiscomp.datamodel.CachePopCallback.__init__(self)
self.target = target
def handle(self, obj):
self.target.objectAboutToPop(obj)
class EventHistory(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
seiscomp.datamodel.Notifier.SetEnabled(False)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setMessagingUsername("scevtlog")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
# Create a callback object that gets called when an object
# is going to be removed from the cache
self._popCallback = CachePopCallback(self)
# Create an object cache of half an hour
self._cache = seiscomp.datamodel.PublicObjectTimeSpanBuffer(
self.query(), seiscomp.core.TimeSpan(30.0 * 60.0)
)
self._cache.setPopCallback(self._popCallback)
# Event progress counter
self._eventProgress = dict()
# Event-Origin mapping
self._eventToOrg = dict()
self._orgToEvent = dict()
# Event-Magnitude mapping
self._eventToMag = dict()
self._magToEvent = dict()
self._directory = "@LOGDIR@/events"
self._format = "xml"
self._currentDirectory = ""
self._revisionFileExt = ".zip"
self._useGZIP = False
def createCommandLineDescription(self):
try:
self.commandline().addGroup("Storage")
self.commandline().addStringOption(
"Storage",
"directory,o",
"Specify the storage directory. " "Default: @LOGDIR@/events.",
)
self.commandline().addStringOption(
"Storage",
"format,f",
"Specify storage format (autoloc1, autoloc3, xml [default])",
)
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
return True
def initConfiguration(self):
if not seiscomp.client.Application.initConfiguration(self):
return False
try:
self._directory = self.configGetString("directory")
except:
pass
try:
self._format = self.configGetString("format")
except:
pass
try:
if self.configGetBool("gzip"):
self._useGZIP = True
self._revisionFileExt = ".gz"
except:
pass
return True
def printUsage(self):
print(
"""Usage:
scevtlog [options]
Save event history into files"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Execute on command line with debug output
scevtlog --debug
"""
)
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self._directory = self.commandline().optionString("directory")
except:
pass
try:
self._format = self.commandline().optionString("format")
except:
pass
if (
self._format != "autoloc1"
and self._format != "autoloc3"
and self._format != "xml"
):
self._format = "xml"
try:
if self._directory[-1] != "/":
self._directory = self._directory + "/"
except:
pass
if self._directory:
self._directory = seiscomp.system.Environment.Instance().absolutePath(
self._directory
)
sys.stderr.write(f"Logging events to {self._directory}\n")
self._cache.setDatabaseArchive(self.query())
return True
# def run(self):
# obj = self._cache.get(seiscomp.datamodel.Magnitude, "or080221153929#16#netMag.mb")
# self.updateObject(obj)
# return True
def done(self):
seiscomp.client.Application.done(self)
self._cache.setDatabaseArchive(None)
def printEvent(self, evt, newEvent):
if self._format != "xml":
self.printEventProcAlert(evt, newEvent)
else:
self.printEventXML(evt, newEvent)
self.advanceEventProgress(evt.publicID())
def getSummary(self, time, org, mag):
strTime = time.toString("%Y-%m-%d %H:%M:%S")
summary = [strTime, "", "", "", "", "", "", "", "", ""]
if org:
tim = org.time().value()
latency = time - tim
summary[1] = "%5d.%02d" % (
latency.seconds() / 60,
(latency.seconds() % 60) * 100 / 60,
)
lat = org.latitude().value()
lon = org.longitude().value()
dep = "%7s" % "---"
try:
dep = f"{org.depth().value():7.0f}"
summary[4] = dep
except:
summary[4] = "%7s" % ""
phases = "%5s" % "---"
try:
phases = "%5d" % org.quality().usedPhaseCount()
summary[5] = phases
except:
summary[5] = "%5s" % ""
summary[2] = f"{lat:7.2f}"
summary[3] = f"{lon:7.2f}"
try:
summary[9] = originStatusToChar(org)
except:
summary[9] = "-"
if mag:
summary[6] = "%12s" % mag.type()
summary[7] = f"{mag.magnitude().value():5.2f}"
try:
summary[8] = "%5d" % mag.stationCount()
except:
summary[8] = " "
else:
summary[6] = "%12s" % ""
summary[7] = " "
summary[8] = " "
return summary
def printEventProcAlert(self, evt, newEvent):
now = seiscomp.core.Time.GMT()
org = self._cache.get(seiscomp.datamodel.Origin, evt.preferredOriginID())
prefmag = self._cache.get(
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()
)
summary = self.getSummary(now, org, prefmag)
# Load arrivals
if org.arrivalCount() == 0:
self.query().loadArrivals(org)
# Load station magnitudes
if org.stationMagnitudeCount() == 0:
self.query().loadStationMagnitudes(org)
# Load magnitudes
if org.magnitudeCount() == 0:
self.query().loadMagnitudes(org)
picks = []
amps = []
if org:
narr = org.arrivalCount()
for i in range(narr):
picks.append(
self._cache.get(seiscomp.datamodel.Pick, org.arrival(i).pickID())
)
nstamags = org.stationMagnitudeCount()
for i in range(nstamags):
amps.append(
self._cache.get(
seiscomp.datamodel.Amplitude,
org.stationMagnitude(i).amplitudeID(),
)
)
netmag = {}
nmag = org.magnitudeCount()
bulletin = seiscomp.scbulletin.Bulletin(None, self._format)
try:
txt = bulletin.printEvent(evt)
except:
txt = ""
if self._directory is None:
sys.stdout.write("%s" % ("#<\n" + txt + "#>\n"))
sys.stdout.flush()
else:
# Use created time to look up the proper directory
try:
arNow = evt.creationInfo().creationTime().get()
# Otherwise use now (in case that event.created has not been set
# which is always valid within the SC3 distribution
except:
arNow = now.get()
seiscomp.logging.error(
"directory is "
+ self._directory
+ "/".join(["%.2d" % i for i in arNow[1:4]])
+ "/"
+ evt.publicID()
+ "/"
)
directory = (
self._directory
+ "/".join(["%.2d" % i for i in arNow[1:4]])
+ "/"
+ evt.publicID()
+ "/"
)
if directory != self._currentDirectory:
if createDirectory(directory) == False:
seiscomp.logging.error(f"Unable to create directory {directory}")
return
self._currentDirectory = directory
self.writeLog(
self._currentDirectory
+ self.convertID(evt.publicID())
+ "."
+ ("%06d" % self.eventProgress(evt.publicID(), directory)),
txt,
"w",
)
self.writeLog(
self._currentDirectory + self.convertID(evt.publicID()) + ".last",
txt,
"w",
)
self.writeLog(self._directory + "last", txt, "w")
self.writeLog(
self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
"|".join(summary),
"a",
"# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount",
)
seiscomp.logging.info("cache size = %d" % self._cache.size())
def printEventXML(self, evt, newEvent):
now = seiscomp.core.Time.GMT()
# Load comments
if evt.commentCount() == 0:
self.query().loadComments(evt)
# Load origin references
if evt.originReferenceCount() == 0:
self.query().loadOriginReferences(evt)
# Load event descriptions
if evt.eventDescriptionCount() == 0:
self.query().loadEventDescriptions(evt)
org = self._cache.get(seiscomp.datamodel.Origin, evt.preferredOriginID())
if evt.preferredFocalMechanismID():
fm = self._cache.get(
seiscomp.datamodel.FocalMechanism, evt.preferredFocalMechanismID()
)
else:
fm = None
# Load comments
if org.commentCount() == 0:
self.query().loadComments(org)
# Load arrivals
if org.arrivalCount() == 0:
self.query().loadArrivals(org)
prefmag = self._cache.get(
seiscomp.datamodel.Magnitude, evt.preferredMagnitudeID()
)
wasEnabled = seiscomp.datamodel.PublicObject.IsRegistrationEnabled()
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
ep = seiscomp.datamodel.EventParameters()
evt_cloned = seiscomp.datamodel.Event.Cast(evt.clone())
ep.add(evt_cloned)
summary = self.getSummary(now, org, prefmag)
if fm:
ep.add(fm)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
# Load focal mechainsm references
if evt.focalMechanismReferenceCount() == 0:
self.query().loadFocalMechanismReferences(evt)
# Load moment tensors
if fm.momentTensorCount() == 0:
self.query().loadMomentTensors(fm)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
# Copy focal mechanism reference
fm_ref = evt.focalMechanismReference(
seiscomp.datamodel.FocalMechanismReferenceIndex(fm.publicID())
)
if fm_ref:
fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference.Cast(
fm_ref.clone()
)
if fm_ref_cloned is None:
fm_ref_cloned = seiscomp.datamodel.FocalMechanismReference(
fm.publicID()
)
evt_cloned.add(fm_ref_cloned)
nmt = fm.momentTensorCount()
for i in range(nmt):
mt = fm.momentTensor(i)
if not mt.derivedOriginID():
continue
# Origin already added
if ep.findOrigin(mt.derivedOriginID()) is not None:
continue
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
derivedOrigin = self._cache.get(
seiscomp.datamodel.Origin, mt.derivedOriginID()
)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
if derivedOrigin is None:
seiscomp.logging.warning(
f"derived origin for MT {mt.derivedOriginID()} not found"
)
continue
# Origin has been read from database -> read all childs
if not self._cache.cached():
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
self.query().load(derivedOrigin)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
# Add it to the event parameters
ep.add(derivedOrigin)
if org:
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
# Load magnitudes
if org.magnitudeCount() == 0:
self.query().loadMagnitudes(org)
if org.stationMagnitudeCount() == 0:
self.query().loadStationMagnitudes(org)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
# Copy event comments
ncmts = evt.commentCount()
for i in range(ncmts):
cmt_cloned = seiscomp.datamodel.Comment.Cast(evt.comment(i).clone())
evt_cloned.add(cmt_cloned)
# Copy origin references
org_ref = evt.originReference(
seiscomp.datamodel.OriginReferenceIndex(org.publicID())
)
if org_ref:
org_ref_cloned = seiscomp.datamodel.OriginReference.Cast(
org_ref.clone()
)
if org_ref_cloned is None:
org_ref_cloned = seiscomp.datamodel.OriginReference(org.publicID())
evt_cloned.add(org_ref_cloned)
# Copy event descriptions
for i in range(evt.eventDescriptionCount()):
ed_cloned = seiscomp.datamodel.EventDescription.Cast(
evt.eventDescription(i).clone()
)
evt_cloned.add(ed_cloned)
org_cloned = seiscomp.datamodel.Origin.Cast(org.clone())
ep.add(org_cloned)
# Copy origin comments
ncmts = org.commentCount()
for i in range(ncmts):
cmt_cloned = seiscomp.datamodel.Comment.Cast(org.comment(i).clone())
org_cloned.add(cmt_cloned)
# Copy arrivals
narr = org.arrivalCount()
for i in range(narr):
arr_cloned = seiscomp.datamodel.Arrival.Cast(org.arrival(i).clone())
org_cloned.add(arr_cloned)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
pick = self._cache.get(seiscomp.datamodel.Pick, arr_cloned.pickID())
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
if pick:
pick_cloned = seiscomp.datamodel.Pick.Cast(pick.clone())
# Load comments
if pick.commentCount() == 0:
self.query().loadComments(pick)
# Copy pick comments
ncmts = pick.commentCount()
for i in range(ncmts):
cmt_cloned = seiscomp.datamodel.Comment.Cast(
pick.comment(i).clone()
)
pick_cloned.add(cmt_cloned)
ep.add(pick_cloned)
# Copy network magnitudes
nmag = org.magnitudeCount()
for i in range(nmag):
mag = org.magnitude(i)
mag_cloned = seiscomp.datamodel.Magnitude.Cast(mag.clone())
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
if mag.stationMagnitudeContributionCount() == 0:
self.query().loadStationMagnitudeContributions(mag)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
# Copy magnitude references
nmagref = mag.stationMagnitudeContributionCount()
for j in range(nmagref):
mag_ref_cloned = (
seiscomp.datamodel.StationMagnitudeContribution.Cast(
mag.stationMagnitudeContribution(j).clone()
)
)
mag_cloned.add(mag_ref_cloned)
org_cloned.add(mag_cloned)
# Copy station magnitudes and station amplitudes
smag = org.stationMagnitudeCount()
amp_map = dict()
for i in range(smag):
mag_cloned = seiscomp.datamodel.StationMagnitude.Cast(
org.stationMagnitude(i).clone()
)
org_cloned.add(mag_cloned)
if (mag_cloned.amplitudeID() in amp_map) == False:
amp_map[mag_cloned.amplitudeID()] = True
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
amp = self._cache.get(
seiscomp.datamodel.Amplitude, mag_cloned.amplitudeID()
)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(False)
if amp:
amp_cloned = seiscomp.datamodel.Amplitude.Cast(amp.clone())
ep.add(amp_cloned)
seiscomp.datamodel.PublicObject.SetRegistrationEnabled(wasEnabled)
# archive.create(event.publicID() + )
ar = seiscomp.io.XMLArchive()
ar.setFormattedOutput(True)
if self._directory is None:
sys.stdout.write("#<\n")
ar.create("-")
ar.writeObject(ep)
ar.close()
sys.stdout.write("#>\n")
sys.stdout.flush()
else:
# Use created time to look up the proper directory
try:
arNow = evt.creationInfo().creationTime().get()
# Otherwise use now (in case that event.created has not been set
# which is always valid within the SC3 distribution
except:
arNow = now.get()
directory = (
self._directory
+ "/".join(["%.2d" % i for i in arNow[1:4]])
+ "/"
+ evt.publicID()
+ "/"
)
if directory != self._currentDirectory:
if createDirectory(directory) == False:
seiscomp.logging.error(f"Unable to create directory {directory}")
return
self._currentDirectory = directory
# self.writeLog(self._currentDirectory + evt.publicID(), "#<\n" + txt + "#>\n")
# self.writeLog(self._currentDirectory + evt.publicID() + ".last", txt, "w")
ar.create(
self._currentDirectory
+ self.convertID(evt.publicID())
+ "."
+ ("%06d" % self.eventProgress(evt.publicID(), directory))
+ ".xml"
+ self._revisionFileExt
)
ar.setCompression(True)
if self._useGZIP:
ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
ar.writeObject(ep)
ar.close()
# Write last file to root
ar.create(self._directory + "last.xml" + self._revisionFileExt)
ar.setCompression(True)
if self._useGZIP:
ar.setCompressionMethod(seiscomp.io.XMLArchive.GZIP)
ar.writeObject(ep)
ar.close()
# Write last xml
ar.create(
self._currentDirectory + self.convertID(evt.publicID()) + ".last.xml"
)
ar.setCompression(False)
ar.writeObject(ep)
ar.close()
self.writeLog(
self._currentDirectory + self.convertID(evt.publicID()) + ".summary",
"|".join(summary),
"a",
"# Layout: Timestamp, +OT (minutes, decimal), Latitude, Longitude, Depth, PhaseCount, MagType, Magnitude, MagCount",
)
del ep
def convertID(self, id):
"""Converts an ID containing slashes to one without slashes"""
p = re.compile("/")
return p.sub("_", id)
def writeLog(self, file, text, mode="a", header=None):
of = open(file, mode)
if of:
if of.tell() == 0 and not header is None:
of.write(header + "\n")
of.write(text + "\n")
of.close()
else:
seiscomp.logging.error(f"Unable to write file: {file}")
def objectAboutToPop(self, obj):
try:
evt = seiscomp.datamodel.Event.Cast(obj)
if evt:
try:
self._orgToEvent.pop(evt.preferredOriginID())
self._eventToOrg.pop(evt.publicID())
self._magToEvent.pop(evt.preferredMagnitudeID())
self._eventToMag.pop(evt.publicID())
self._eventProgress.pop(evt.publicID())
return
except:
pass
org = seiscomp.datamodel.Origin.Cast(obj)
if org:
try:
self._orgToEvent.pop(org.publicID())
except:
pass
return
mag = seiscomp.datamodel.Magnitude.Cast(obj)
if mag:
try:
self._magToEvent.pop(mag.publicID())
except:
pass
return
except:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
sys.exit(-1)
def eventProgress(self, evtID, directory):
# The progress is already stored
if evtID in self._eventProgress:
return self._eventProgress[evtID]
# Find the maximum file counter
maxid = -1
files = os.listdir(directory)
for file in files:
if os.path.isfile(directory + file) == False:
continue
fid = file[len(evtID + ".") : len(file)]
sep = fid.find(".")
if sep == -1:
sep = len(fid)
fid = fid[0:sep]
try:
nid = int(fid)
except:
continue
if nid > maxid:
maxid = nid
maxid = maxid + 1
self._eventProgress[evtID] = maxid
return maxid
def advanceEventProgress(self, evtID):
try:
self._eventProgress[evtID] = self._eventProgress[evtID] + 1
except:
pass
def addObject(self, parentID, object):
try:
obj = seiscomp.datamodel.Event.Cast(object)
if obj:
self._cache.feed(obj)
self._eventProgress[obj.publicID()] = 0
self.printEvent(obj, True)
self.updateCache(obj)
return
# New Magnitudes or Origins are not important for
# the history update but we feed it into the cache to
# access them faster later on in case they will become
# preferred entities
obj = seiscomp.datamodel.Magnitude.Cast(object)
if obj:
self._cache.feed(obj)
return
obj = seiscomp.datamodel.Origin.Cast(object)
if obj:
self._cache.feed(obj)
return
obj = seiscomp.datamodel.Pick.Cast(object)
if obj:
self._cache.feed(obj)
return
obj = seiscomp.datamodel.Amplitude.Cast(object)
if obj:
self._cache.feed(obj)
return
except:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
sys.exit(-1)
def updateObject(self, parentID, object):
try:
obj = seiscomp.datamodel.Event.Cast(object)
if obj:
self._cache.feed(obj)
self.printEvent(obj, False)
self.updateCache(obj)
return
# Updates of a Magnitude are only imported when it is
# the preferred one.
obj = seiscomp.datamodel.Magnitude.Cast(object)
if obj:
try:
evtID = self._magToEvent[obj.publicID()]
if evtID:
self._cache.feed(obj)
evt = self._cache.get(seiscomp.datamodel.Event, evtID)
if evt:
self.printEvent(evt, False)
else:
sys.stderr.write(
"Unable to fetch event for ID '%s' while update of magnitude '%s'\n"
% (evtID, obj.publicID())
)
else:
# Magnitude has not been associated to an event yet
pass
except:
# Search the corresponding event from the database
evt = self.query().getEventByPreferredMagnitudeID(obj.publicID())
# Associate the event (even if None) with the magnitude ID
if evt:
self._magToEvent[obj.publicID()] = evt.publicID()
self._cache.feed(obj)
self.printEvent(evt, False)
else:
self._magToEvent[obj.publicID()] = None
return
# Usually we do not update origins. To have it complete,
# this case will be supported as well
obj = seiscomp.datamodel.Origin.Cast(object)
if obj:
try:
evtID = self._orgToEvent[obj.publicID()]
if evtID:
self._cache.feed(obj)
evt = self._cache.get(seiscomp.datamodel.Event, evtID)
if evt:
self.printEvent(evt, False)
else:
sys.stderr.write(
"Unable to fetch event for ID '%s' while update of origin '%s'\n"
% (evtID, obj.publicID())
)
else:
# Origin has not been associated to an event yet
pass
except:
# Search the corresponding event from the database
evt = self.query().getEvent(obj.publicID())
if evt:
if evt.preferredOriginID() != obj.publicID():
evt = None
# Associate the event (even if None) with the origin ID
if evt:
self._orgToEvent[obj.publicID()] = evt.publicID()
self._cache.feed(obj)
self.printEvent(evt, False)
else:
self._orgToEvent[obj.publicID()] = None
return
return
except:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
sys.exit(-1)
def updateCache(self, evt):
# Event-Origin update
try:
orgID = self._eventToOrg[evt.publicID()]
if orgID != evt.preferredOriginID():
self._orgToEvent.pop(orgID)
except:
# origin not yet registered
pass
# Bind the current preferred origin ID to the event and
# vice versa
self._orgToEvent[evt.preferredOriginID()] = evt.publicID()
self._eventToOrg[evt.publicID()] = evt.preferredOriginID()
# Event-Magnitude update
try:
magID = self._eventToMag[evt.publicID()]
if magID != evt.preferredMagnitudeID():
self._magToEvent.pop(magID)
except:
# not yet registered
pass
# Bind the current preferred magnitude ID to the event and
# vice versa
self._magToEvent[evt.preferredMagnitudeID()] = evt.publicID()
self._eventToMag[evt.publicID()] = evt.preferredMagnitudeID()
app = EventHistory(len(sys.argv), sys.argv)
sys.exit(app())

332
bin/scevtls Executable file
View File

@ -0,0 +1,332 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import os
import sys
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
import seiscomp.logging
def readXML(self):
ar = seiscomp.io.XMLArchive()
if not ar.open(self._inputFile):
print(f"Unable to open input file {self._inputFile}")
return []
obj = ar.readObject()
if obj is None:
raise TypeError("invalid input file format")
ep = seiscomp.datamodel.EventParameters.Cast(obj)
if ep is None:
raise ValueError("no event parameters found in input file")
eventIDs = []
for i in range(ep.eventCount()):
evt = ep.event(i)
if self._modifiedAfterTime is not None:
try:
if evt.creationInfo().modificationTime() < self._modifiedAfterTime:
continue
except ValueError:
try:
if evt.creationInfo().creationTime() < self._modifiedAfterTime:
continue
except ValueError:
continue
if self._eventType:
try:
eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type())
if eventType != self._eventType:
continue
except ValueError:
continue
prefOrgID = evt.preferredOriginID()
# filter by origin time
org = ep.findOrigin(prefOrgID)
orgTime = org.time().value()
if orgTime < self._startTime:
continue
if orgTime > self._endTime:
continue
outputString = evt.publicID()
if self._preferredOrigin:
try:
outputString += " " + evt.preferredOriginID()
except ValueError:
outputString += " none"
eventIDs.append(outputString)
return eventIDs
class EventList(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, False)
self.setDaemonEnabled(False)
self._startTime = None
self._endTime = None
self.hours = None
self._delimiter = None
self._modifiedAfterTime = None
self._preferredOrigin = False
self._inputFile = None
self._eventType = None
def createCommandLineDescription(self):
self.commandline().addGroup("Input")
self.commandline().addStringOption(
"Input",
"input,i",
"Name of input XML file. Read from stdin if '-' is given. Deactivates "
"reading events from database",
)
self.commandline().addGroup("Events")
self.commandline().addStringOption(
"Events", "begin", "Specify the lower bound of the time interval."
)
self.commandline().addStringOption(
"Events", "end", "Specify the upper bound of the time interval."
)
self.commandline().addStringOption(
"Events",
"hours",
"Start searching given hours before"
" now. If set, --begin and --end "
"are ignored.",
)
self.commandline().addStringOption(
"Events",
"modified-after",
"Select events modified after the specified time.",
)
self.commandline().addStringOption(
"Events",
"event-type",
"Select events whith specified " "event type.",
)
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output",
"delimiter,D",
"Specify the delimiter of the resulting event IDs. Default: '\\n')",
)
self.commandline().addOption(
"Output",
"preferred-origin,p",
"Print the ID of the preferred origin along with the event ID.",
)
return True
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
try:
self._inputFile = self.commandline().optionString("input")
except RuntimeError:
pass
if self._inputFile:
self.setDatabaseEnabled(False, False)
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self.hours = float(self.commandline().optionString("hours"))
except RuntimeError:
pass
end = "2500-01-01T00:00:00Z"
if self.hours is None:
try:
start = self.commandline().optionString("begin")
except RuntimeError:
start = "1900-01-01T00:00:00Z"
self._startTime = seiscomp.core.Time.FromString(start)
if self._startTime is None:
seiscomp.logging.error(f"Wrong 'begin' format '{start}'")
return False
seiscomp.logging.debug(
f"Setting start to {self._startTime.toString('%FT%TZ')}"
)
try:
end = self.commandline().optionString("end")
except RuntimeError:
pass
self._endTime = seiscomp.core.Time.FromString(end)
if self._endTime is None:
seiscomp.logging.error(f"Wrong 'end' format '{end}'")
return False
seiscomp.logging.debug(f"Setting end to {self._endTime.toString('%FT%TZ')}")
else:
seiscomp.logging.debug(
"Time window set by hours option: ignoring all other time parameters"
)
secs = self.hours * 3600
maxSecs = 596523 * 3600
if secs > maxSecs:
seiscomp.logging.error(
f"Maximum hours exceeeded. Maximum is {int(maxSecs / 3600)}"
)
return False
self._startTime = seiscomp.core.Time.UTC() - seiscomp.core.TimeSpan(secs)
self._endTime = seiscomp.core.Time.FromString(end)
try:
self._delimiter = self.commandline().optionString("delimiter")
except RuntimeError:
self._delimiter = "\n"
try:
modifiedAfter = self.commandline().optionString("modified-after")
self._modifiedAfterTime = seiscomp.core.Time.FromString(modifiedAfter)
if self._modifiedAfterTime is None:
seiscomp.logging.error(
f"Wrong 'modified-after' format '{modifiedAfter}'"
)
return False
seiscomp.logging.debug(
f"Setting 'modified-after' time to {self._modifiedAfterTime.toString('%FT%TZ')}"
)
except RuntimeError:
pass
try:
self._preferredOrigin = self.commandline().hasOption("preferred-origin")
except RuntimeError:
pass
try:
self._eventType = self.commandline().optionString("event-type")
except RuntimeError:
pass
if self._eventType:
flagEvent = False
for i in range(seiscomp.datamodel.EEventTypeQuantity):
if self._eventType == seiscomp.datamodel.EEventTypeNames.name(i):
flagEvent = True
break
if not flagEvent:
seiscomp.logging.error(
f"'{self._eventType}' is not a valid SeisComP event type"
)
return False
return True
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]
List event IDs available in a given time range and print to stdout."""
)
seiscomp.client.Application.printUsage(self)
print(
f"""Examples:
Print all event IDs from year 2022 and thereafter
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \
--begin "2022-01-01 00:00:00"
Print all event IDs with event type 'quarry blast'
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp --event-type 'quarry blast'
Print IDs of all events in XML file
{os.path.basename(__file__)} -i events.xml
"""
)
def run(self):
out = []
seiscomp.logging.debug(f"Search interval: {self._startTime} - {self._endTime}")
if self._inputFile:
out = readXML(self)
sys.stdout.write(f"{self._delimiter.join(out)}\n")
return True
for obj in self.query().getEvents(self._startTime, self._endTime):
evt = seiscomp.datamodel.Event.Cast(obj)
if not evt:
continue
if self._modifiedAfterTime is not None:
try:
if evt.creationInfo().modificationTime() < self._modifiedAfterTime:
continue
except ValueError:
try:
if evt.creationInfo().creationTime() < self._modifiedAfterTime:
continue
except ValueError:
continue
if self._eventType:
try:
eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type())
if eventType != self._eventType:
continue
except ValueError:
continue
outputString = evt.publicID()
if self._preferredOrigin:
try:
outputString += " " + evt.preferredOriginID()
except ValueError:
outputString += " none"
out.append(outputString)
sys.stdout.write(f"{self._delimiter.join(out)}\n")
return True
def main():
app = EventList(len(sys.argv), sys.argv)
app()
if __name__ == "__main__":
main()

576
bin/scevtstreams Executable file
View File

@ -0,0 +1,576 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import re
from seiscomp import client, core, datamodel, io
def readStreamList(listFile):
"""
Read list of streams from file
Parameters
----------
file : file
Input list file, one line per stream
format: NET.STA.LOC.CHA
Returns
-------
list
streams.
"""
streams = []
try:
if listFile == "-":
f = sys.stdin
listFile = "stdin"
else:
f = open(listFile, "r", encoding="utf8")
except Exception:
print(f"error: unable to open '{listFile}'", file=sys.stderr)
return []
lineNumber = -1
for line in f:
lineNumber = lineNumber + 1
line = line.strip()
# ignore comments
if len(line) > 0 and line[0] == "#":
continue
if len(line) == 0:
continue
if len(line.split(".")) != 4:
f.close()
print(
f"error: {listFile} in line {lineNumber} has invalid line format, "
"expecting NET.STA.LOC.CHA - 1 line per stream",
file=sys.stderr,
)
return []
streams.append(line)
f.close()
if len(streams) == 0:
return []
return streams
class EventStreams(client.Application):
def __init__(self, argc, argv):
client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, False)
self.setDaemonEnabled(False)
self.eventID = None
self.inputFile = None
self.inputFormat = "xml"
self.margin = [300]
self.allNetworks = True
self.allStations = True
self.allLocations = True
self.allStreams = True
self.allComponents = True
# filter
self.network = None
self.station = None
self.streams = []
self.streamFilter = None
# output format
self.caps = False
self.fdsnws = False
def createCommandLineDescription(self):
self.commandline().addGroup("Input")
self.commandline().addStringOption(
"Input",
"input,i",
"Input XML file name. Reads event from the XML file instead of database. "
"Use '-' to read from stdin.",
)
self.commandline().addStringOption(
"Input",
"format,f",
"Input format to use (xml [default], zxml (zipped xml), binary). "
"Only relevant with --input.",
)
self.commandline().addGroup("Dump")
self.commandline().addStringOption(
"Dump", "event,E", "The ID of the event to consider."
)
self.commandline().addStringOption(
"Dump",
"net-sta",
"Filter read picks by network code or network and station code. Format: "
"NET or NET.STA.",
)
self.commandline().addStringOption(
"Dump",
"nslc",
"Stream list file to be used for filtering read picks by stream code. "
"'--net-sta' will be ignored. One line per stream, line format: "
"NET.STA.LOC.CHA.",
)
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output",
"margin,m",
"Time margin around the picked time window, default is 300. Added "
"before the first and after the last pick, respectively. Use 2 "
"comma-separted values (before,after) for asymmetric margins, e.g. "
"-m 120,300.",
)
self.commandline().addStringOption(
"Output",
"streams,S",
"Comma-separated list of streams per station to add, e.g. BH,SH,HH.",
)
self.commandline().addOption(
"Output",
"all-streams",
"Dump all streams. If unused, just streams with picks are dumped.",
)
self.commandline().addIntOption(
"Output",
"all-components,C",
"All components or just the picked ones (0). Default is 1",
)
self.commandline().addIntOption(
"Output",
"all-locations,L",
"All locations or just the picked ones (0). Default is 1",
)
self.commandline().addOption(
"Output",
"all-stations",
"Dump all stations from the same network. If unused, just stations "
"with picks are dumped.",
)
self.commandline().addOption(
"Output",
"all-networks",
"Dump all networks. If unused, just networks with picks are dumped."
" This option implies --all-stations, --all-locations, --all-streams, "
"--all-components and will only provide the time window.",
)
self.commandline().addOption(
"Output",
"resolve-wildcards,R",
"If all components are used, use inventory to resolve stream "
"components instead of using '?' (important when Arclink should be "
"used).",
)
self.commandline().addOption(
"Output",
"caps",
"Output in capstool format (Common Acquisition Protocol Server by "
"gempa GmbH).",
)
self.commandline().addOption(
"Output", "fdsnws", "Output in FDSN dataselect webservice POST format."
)
return True
def validateParameters(self):
if not client.Application.validateParameters(self):
return False
if self.commandline().hasOption("resolve-wildcards"):
self.setLoadStationsEnabled(True)
try:
self.inputFile = self.commandline().optionString("input")
self.setDatabaseEnabled(False, False)
except BaseException:
pass
return True
def init(self):
if not client.Application.init(self):
return False
try:
self.inputFormat = self.commandline().optionString("format")
except BaseException:
pass
try:
self.eventID = self.commandline().optionString("event")
except BaseException as exc:
if not self.inputFile:
raise ValueError(
"An eventID is mandatory if no input file is specified"
) from exc
try:
self.margin = self.commandline().optionString("margin").split(",")
except BaseException:
pass
try:
self.streams = self.commandline().optionString("streams").split(",")
except BaseException:
pass
try:
self.allComponents = self.commandline().optionInt("all-components") != 0
except BaseException:
pass
try:
self.allLocations = self.commandline().optionInt("all-locations") != 0
except BaseException:
pass
self.allStreams = self.commandline().hasOption("all-streams")
self.allStations = self.commandline().hasOption("all-stations")
self.allNetworks = self.commandline().hasOption("all-networks")
try:
networkStation = self.commandline().optionString("net-sta")
except RuntimeError:
networkStation = None
try:
nslcFile = self.commandline().optionString("nslc")
except RuntimeError:
nslcFile = None
if nslcFile:
networkStation = None
self.streamFilter = readStreamList(nslcFile)
if networkStation:
try:
self.network = networkStation.split(".")[0]
except IndexError:
print(
f"Error in network code '{networkStation}': Use '--net-sta' with "
"format NET or NET.STA",
file=sys.stderr,
)
return False
try:
self.station = networkStation.split(".")[1]
except IndexError:
pass
self.caps = self.commandline().hasOption("caps")
self.fdsnws = self.commandline().hasOption("fdsnws")
return True
def printUsage(self):
print(
"""Usage:
scevtstreams [options]
Extract stream information and time windows from an event"""
)
client.Application.printUsage(self)
print(
"""Examples:
Get the time windows for an event in the database:
scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp
Create lists compatible with fdsnws:
scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws
"""
)
def run(self):
resolveWildcards = self.commandline().hasOption("resolve-wildcards")
picks = []
# read picks from input file
if self.inputFile:
picks = self.readXML()
if not picks:
raise ValueError("Could not find picks in input file")
# read picks from database
else:
for obj in self.query().getEventPicks(self.eventID):
pick = datamodel.Pick.Cast(obj)
if pick is None:
continue
picks.append(pick)
if not picks:
raise ValueError(
f"Could not find picks for event {self.eventID} in database"
)
# filter picks
if self.streamFilter:
# # filter channel by --nslc option
channels = self.streamFilter
channelsRe = []
for channel in channels:
channel = re.sub(r"\.", r"\.", channel) # . becomes \.
channel = re.sub(r"\?", ".", channel) # ? becomes .
channel = re.sub(r"\*", ".*", channel) # * becomes.*
channel = re.compile(channel)
channelsRe.append(channel)
if self.streamFilter or self.network:
pickFiltered = []
for pick in picks:
net = pick.waveformID().networkCode()
sta = pick.waveformID().stationCode()
loc = pick.waveformID().locationCode()
cha = pick.waveformID().channelCode()
filtered = False
if self.streamFilter:
stream = f"{net}.{sta}.{loc}.{cha}"
for chaRe in channelsRe:
if chaRe.match(stream):
filtered = True
continue
elif self.network:
if net != self.network:
continue
if self.station and sta != self.station:
continue
filtered = True
if filtered:
pickFiltered.append(pick)
else:
print(
f"Ignoring channel {stream}: not considered by configuration",
file=sys.stderr,
)
picks = pickFiltered
if not picks:
raise ValueError("Info: All picks are filtered out")
# calculate minimum and maximum pick time
minTime = None
maxTime = None
for pick in picks:
if minTime is None or minTime > pick.time().value():
minTime = pick.time().value()
if maxTime is None or maxTime < pick.time().value():
maxTime = pick.time().value()
# add time margin(s), no need for None check since pick time is
# mandatory and at least on pick exists
minTime = minTime - core.TimeSpan(float(self.margin[0]))
maxTime = maxTime + core.TimeSpan(float(self.margin[-1]))
# convert times to string dependend on requested output format
if self.caps:
timeFMT = "%Y,%m,%d,%H,%M,%S"
elif self.fdsnws:
timeFMT = "%FT%T"
else:
timeFMT = "%F %T"
minTime = minTime.toString(timeFMT)
maxTime = maxTime.toString(timeFMT)
inv = client.Inventory.Instance().inventory()
lines = set()
for pick in picks:
net = pick.waveformID().networkCode()
station = pick.waveformID().stationCode()
loc = pick.waveformID().locationCode()
streams = [pick.waveformID().channelCode()]
rawStream = streams[0][:2]
if self.allComponents:
if resolveWildcards:
iloc = datamodel.getSensorLocation(inv, pick)
if iloc:
tc = datamodel.ThreeComponents()
datamodel.getThreeComponents(
tc, iloc, rawStream, pick.time().value()
)
streams = []
if tc.vertical():
streams.append(tc.vertical().code())
if tc.firstHorizontal():
streams.append(tc.firstHorizontal().code())
if tc.secondHorizontal():
streams.append(tc.secondHorizontal().code())
else:
streams = [rawStream + "?"]
if self.allLocations:
loc = "*"
if self.allStations:
station = "*"
if self.allNetworks:
net = "*"
station = "*"
loc = "*"
# FDSNWS requires empty location to be encoded by 2 dashes
if not loc and self.fdsnws:
loc = "--"
# line format
if self.caps:
lineFMT = "{0} {1} {2} {3} {4} {5}"
elif self.fdsnws:
lineFMT = "{2} {3} {4} {5} {0} {1}"
else:
lineFMT = "{0};{1};{2}.{3}.{4}.{5}"
for s in streams:
if self.allStreams or self.allNetworks:
s = "*"
lines.add(lineFMT.format(minTime, maxTime, net, station, loc, s))
for s in self.streams:
if s == rawStream:
continue
if self.allStreams or self.allNetworks:
s = "*"
lines.add(
lineFMT.format(
minTime, maxTime, net, station, loc, s + streams[0][2]
)
)
for line in sorted(lines):
print(line, file=sys.stdout)
return True
def readXML(self):
if self.inputFormat == "xml":
ar = io.XMLArchive()
elif self.inputFormat == "zxml":
ar = io.XMLArchive()
ar.setCompression(True)
elif self.inputFormat == "binary":
ar = io.VBinaryArchive()
else:
raise TypeError(f"unknown input format '{self.inputFormat}'")
if not ar.open(self.inputFile):
raise IOError("unable to open input file")
obj = ar.readObject()
if obj is None:
raise TypeError("invalid input file format")
ep = datamodel.EventParameters.Cast(obj)
if ep is None:
raise ValueError("no event parameters found in input file")
# we require at least one origin which references to picks via arrivals
if ep.originCount() == 0:
raise ValueError("no origin found in input file")
originIDs = []
# search for a specific event id
if self.eventID:
ev = datamodel.Event.Find(self.eventID)
if ev:
originIDs = [
ev.originReference(i).originID()
for i in range(ev.originReferenceCount())
]
else:
raise ValueError(f"Event ID {self.eventID} not found in input file")
# use first event/origin if no id was specified
else:
# no event, use first available origin
if ep.eventCount() == 0:
if ep.originCount() > 1:
print(
"WARNING: Input file contains no event but more than "
"1 origin. Considering only first origin",
file=sys.stderr,
)
originIDs.append(ep.origin(0).publicID())
# use origin references of first available event
else:
if ep.eventCount() > 1:
print(
"WARNING: Input file contains more than 1 event. "
"Considering only first event",
file=sys.stderr,
)
ev = ep.event(0)
originIDs = [
ev.originReference(i).originID()
for i in range(ev.originReferenceCount())
]
# collect pickIDs
pickIDs = set()
for oID in originIDs:
o = datamodel.Origin.Find(oID)
if o is None:
continue
for i in range(o.arrivalCount()):
pickIDs.add(o.arrival(i).pickID())
# lookup picks
picks = []
for pickID in pickIDs:
pick = datamodel.Pick.Find(pickID)
if pick:
picks.append(pick)
return picks
if __name__ == "__main__":
try:
app = EventStreams(len(sys.argv), sys.argv)
sys.exit(app())
except (ValueError, TypeError) as e:
print(f"ERROR: {e}", file=sys.stderr)
sys.exit(1)

38
bin/scgitinit Executable file
View File

@ -0,0 +1,38 @@
#!/bin/bash
# Initializes a GIT repository in $SEISCOMP_ROOT and adds important
# configuration files from 'etc' and 'share' directory
#
# Author: Stephan Herrnkind <herrnkind@gempa.de>
# search for SeisComP path
if [ x"$SEISCOMP_ROOT" = x ]; then
echo "SEISCOMP_ROOT not set"
exit 1
fi
# search git binary
which git > /dev/null
if [ $? -ne 0 ]; then
echo "git binary not found"
exit 2
fi
cd $SEISCOMP_ROOT || exit 3
# initialize git if necessary
[ -d .git ] || git rev-parse --git-dir > /dev/null 2>&1
if [ $? -eq 0 ]; then
echo "GIT repository in $SEISCOMP_ROOT already initialized"
else
git init || exit 4
fi
# add files
git add etc
find share -type f -regex \
".*\.\(bna\|cfg\|conf\|htaccess\|kml\|py\|sh\|tpl\|tvel\|txt\|xml\)" \
-execdir git add {} +
echo "files added to GIT, use 'git status' to get an overview and " \
"'git commit' to commit them"

BIN
bin/scheli Executable file

Binary file not shown.

BIN
bin/scimex Executable file

Binary file not shown.

BIN
bin/scimport Executable file

Binary file not shown.

BIN
bin/scinv Executable file

Binary file not shown.

BIN
bin/scm Executable file

Binary file not shown.

BIN
bin/scmag Executable file

Binary file not shown.

BIN
bin/scmapcut Executable file

Binary file not shown.

BIN
bin/scmaster Executable file

Binary file not shown.

84
bin/scml2inv Executable file
View File

@ -0,0 +1,84 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import getopt
import seiscomp.io
import seiscomp.datamodel
usage = """scml2inv [options] input output=stdout
Options:
-h [ --help ] Produce help message
-f Enable formatted XML output
"""
def main(argv):
formatted = False
# parse command line options
try:
opts, args = getopt.getopt(argv[1:], "hf", ["help"])
except getopt.error as msg:
sys.stderr.write(f"{msg}\n")
sys.stderr.write("for help use --help\n")
return 1
for o, a in opts:
if o in ["-h", "--help"]:
sys.stderr.write(f"{usage}\n")
return 1
elif o in ["-f"]:
formatted = True
argv = args
if len(argv) < 1:
sys.stderr.write("Missing input file\n")
return 1
ar = seiscomp.io.XMLArchive()
if not ar.open(argv[0]):
sys.stderr.write(f"Unable to parse input file: {argv[0]}\n")
return 2
obj = ar.readObject()
ar.close()
if obj is None:
sys.stderr.write(f"Empty document in {argv[0]}\n")
return 3
inv = seiscomp.datamodel.Inventory.Cast(obj)
if inv is None:
sys.stderr.write(f"No inventory found in {argv[0]}\n")
return 4
if len(argv) < 2:
output_file = "-"
else:
output_file = argv[1]
ar.create(output_file)
ar.setFormattedOutput(formatted)
ar.writeObject(inv)
ar.close()
return 0
if __name__ == "__main__":
sys.exit(main(sys.argv))

BIN
bin/scmm Executable file

Binary file not shown.

532
bin/scmssort Executable file
View File

@ -0,0 +1,532 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import argparse
import os
import re
import sys
import traceback
from seiscomp import core, io
VERBOSITY = 0
INFO = 1
DEBUG = 2
TRACE = 3
def log(level, msg):
print(f"[{level}] {msg}", file=sys.stderr)
def info_enabled():
return VERBOSITY >= INFO
def debug_enabled():
return VERBOSITY >= DEBUG
def trace_enabled():
return VERBOSITY >= TRACE
def error(msg):
log("error", msg)
def warning(msg):
log("warning", msg)
def info(msg):
if info_enabled():
log("info", msg)
def debug(msg):
if debug_enabled():
log("debug", msg)
def trace(msg):
if trace_enabled():
log("trace", msg)
def parse_args():
description = (
"Read unsorted and possibly multiplexed miniSEED files. Sort data by time "
"(multiplexing) and filter the individual records by time and/or streams. "
"Apply this before playbacks and waveform archiving."
)
epilog = """Examples:
Read data from multiple files, extract streams by time, sort records by start time, \
ignore duplicated and empty records
cat f1.mseed f2.mseed f3.mseed | \
scmssort -v -t 2007-03-28T15:48~2007-03-28T16:18' -ui > sorted.mseed
Extract streams by time, stream code and sort records by end time
echo CX.PB01..BH? | \
scmssort -v -E -t '2007-03-28T15:48~2007-03-28T16:18' \
-u -l - test.mseed > sorted.mseed
"""
p = argparse.ArgumentParser(
description=description,
epilog=epilog,
formatter_class=argparse.RawDescriptionHelpFormatter,
)
p.add_argument(
"file",
nargs="*",
default="-",
help="miniSEED file(s) to sort. If no file name or '-' is specified then "
"standard input is used.",
)
p.add_argument(
"-E",
"--sort-by-end-time",
action="store_true",
help="Sort according to record end time; default is start time.",
)
p.add_argument(
"-i",
"--ignore",
action="store_true",
help="Ignore all records which have no data samples.",
)
p.add_argument(
"-l",
"--list",
action="store",
help="Filter records by a list of stream codes specified in a file or on stdin "
"(-). One stream per line of format: NET.STA.LOC.CHA - wildcards and regular "
"expressions are considered. Example: CX.*..BH?.",
)
p.add_argument(
"-o",
"--output",
action="store",
help="Name of output file for miniSEED data (default is stdout).",
)
p.add_argument(
"-r",
"--rm",
action="store_true",
help="Remove all traces in stream list given by '--list' instead of keeping "
"them.",
)
p.add_argument(
"-t",
"--time-window",
action="store",
help="Time window to filter the records, format: <START TIME> ~ <END TIME>. "
"Time values are in UTC, must start with an ISO date and may include time "
"components starting on the hour down to milliseconds. Example: "
"2023-01-15T12:15",
)
p.add_argument(
"-u",
"--uniqueness",
action="store_true",
help="Ensure uniqueness of output by skipping duplicate records.",
)
p.add_argument(
"-v",
"--verbose",
action="count",
default=0,
help="Run in verbose mode. This option may be repeated several time to "
"increase the level of verbosity. Example: -vvv.",
)
opt = p.parse_args()
global VERBOSITY
VERBOSITY += int(opt.verbose)
if opt.rm and not opt.list:
error("The '--rm' requires the '--list' option to be present as well.")
sys.exit(1)
return opt
def rec2id(record):
return (
f"{record.networkCode()}.{record.stationCode()}."
f"{record.locationCode()}.{record.channelCode()}"
)
def str2time(timeString):
return core.Time.FromString(timeString)
def time2str(time):
"""
Convert a seiscomp.core.Time to a string
"""
if not time:
return ""
return time.toString("%Y-%m-%dT%H:%M:%S.%f000")[:23]
def read_time_window(opt):
if not opt.time_window:
return None, None
toks = opt.time_window.split("~")
if len(toks) != 2:
if len(toks) < 2:
raise ValueError(
"Time window has wrong format: Use (~) for separating start and end time"
)
raise ValueError("Time window has wrong format: Too many tildes (~) found")
start = core.Time.FromString(toks[0])
end = core.Time.FromString(toks[1])
if start is None or end is None:
error(f"Could not read time window: {toks}")
if debug_enabled():
debug(traceback.format_exc())
sys.exit(1)
return start, end
def read_lines(file):
# read from stdin
if file == "-":
yield from sys.stdin
return
# read from file
with open(file, "r", encoding="utf-8") as f:
yield from f
return
def compile_stream_pattern(opt):
if not opt.list:
return None
streams = []
pattern = None
try:
line_number = -1
for line in map(str.strip, read_lines(opt.list)):
line_number += 1
# ignore empty lines and comments
if not line or line.startswith("#"):
continue
toks = line.split(".")
if len(toks) != 4:
raise ValueError(
f"Invalid stream definition at line {line_number}. Expected the 4 "
"stream components NET.STA.LOC.CHA separated by a dot, "
"got: {line}."
)
streams.append(line)
if not streams:
raise ValueError("No stream definition found.")
pattern = re.compile("|".join(streams))
except Exception as e:
error(f"Could not compile pattern from stream list file '{opt.list}': {e}")
if debug_enabled():
debug(traceback.format_exc())
sys.exit(1)
info(
f"Using stream id {'DENY' if opt.rm else 'ALLOW'} list with {len(streams)} "
"stream masks"
)
if debug_enabled():
masks = "\n + ".join(streams)
debug(f"Stream masks:\n + {masks}")
return pattern
def record_input(file, datatype=core.Array.INT):
"""
Simple record iterator that reads from a file (or stdin in case of '-')
"""
stream = io.RecordStream.Create("file")
if not stream:
raise IOError("Failed to create a RecordStream")
if file != "-" and not os.path.exists(file):
raise FileNotFoundError("Could not find file")
if not stream.setSource(file):
raise ValueError("Could not set record stream source")
it = io.RecordInput(stream, datatype, core.Record.SAVE_RAW)
if trace_enabled():
while True:
record = it.next()
if not record:
return
trace(
f" + {time2str(record.startTime())}~{time2str(record.endTime())} "
f"{rec2id(record)}"
)
yield record
else:
while True:
record = it.next()
if not record:
return
yield record
def unique(sequence):
seen = set()
return [x for x in sequence if not (x in seen or seen.add(x))]
def main():
# parse commandline
opt = parse_args()
# time window
t_min, t_max = read_time_window(opt)
if t_max and t_min and t_max <= t_min:
error(
f"Invalid time window: {time2str(t_min)}~{time2str(t_max)}\n"
" + end time must be greater than start time"
)
return False
info(f"Filtering records by time window: {time2str(t_min)}~{time2str(t_max)}")
# stream filter
pattern = compile_stream_pattern(opt)
outputFile = None
if opt.output:
outputFile = opt.output
# record buffer to be sorted later on, each item is a tuple of
# (delta_time, raw_binary_record_data)
rec_buf = []
# statistics
records_read = 0
records_window = 0
records_empty = 0
# statistics (info mode)
networks = set()
stations = set()
streams = set()
buf_min = None
buf_max = None
# make sure to read from stdin only once
files = [x for x in opt.file if x != "-"]
if len(files) == len(opt.file):
info(f"Reading data from {len(opt.file)} file(s)")
elif not files:
files = "-"
info("Reading data from stdin. Use Ctrl + C to interrupt.")
else:
info(
f"Reading data from stdin and {len(files)} files. Use Ctrl + C to "
"interrupt."
)
files.insert(opt.file.index("-"), "-")
# time or first valid record use as reference for sorting
ref_time = None
# read records from input file
for file in files:
records_file = 0
records_empty_file = 0
try:
for rec in record_input(file):
records_file += 1
stream_id = ""
# skip record if outside time window
if (t_min and rec.endTime() < t_min) or (
t_max and rec.startTime() > t_max
):
continue
if pattern or info_enabled():
records_window += 1
stream_id = rec2id(rec)
if pattern and bool(pattern.match(stream_id)) == bool(opt.rm):
continue
if not rec.sampleCount():
trace(
f" + found empty record staring at {time2str(rec.startTime())} "
f"{rec2id(rec)}"
)
records_empty_file += 1
if opt.ignore:
trace(" + ignored")
continue
# record time reference set to start or end time depending on sort
# option
t = rec.endTime() if opt.sort_by_end_time else rec.startTime()
if ref_time is None:
ref_time = core.Time(t)
t = 0
else:
t = float(t - ref_time) # float needs less memory
# buffer tuple of (time delta, binary record data)
rec_buf.append((t, rec.raw().str()))
# collect statistics for debug mode
if info_enabled():
networks.add(rec.networkCode())
stations.add(f"{rec.networkCode()}.{rec.stationCode()}")
streams.add(stream_id)
# copy of time object is required because record may be freed before
if not buf_min or rec.startTime() < buf_min:
buf_min = core.Time(rec.startTime())
if not buf_max or rec.startTime() > buf_max:
buf_max = core.Time(rec.endTime())
name = "<stdin>" if file == "-" else file
empty = f", empty: {records_empty_file}" if records_empty_file else ""
debug(f" + {name}: {records_file} records{empty}")
except Exception as e:
error(f"Could not read file '{file}: {e}")
if debug_enabled():
debug(traceback.format_exc())
return 1
records_read += records_file
records_empty += records_empty_file
# stop if no records have been read
if not records_read:
warning("No records found in input file(s).")
return 0
buf_len = len(rec_buf)
# statistics about records read and filtered
if info_enabled() and buf_len != records_read:
info(
f"""{records_read-buf_len}/{records_read} records filtered:
+ by time window: {records_read-records_window}
+ by stream id {'DENY' if opt.rm else 'ALLOW'} list: {records_window-buf_len}"""
)
# stop if no record passed the filter
if not buf_len:
warning("All records filtered, nothing to write.")
return 0
# network, station and stream information
if info_enabled():
info(
f"Found data for {len(networks)} networks, {len(stations)} stations "
f"and {len(streams)} streams",
)
if debug_enabled() and streams:
streamList = "\n + ".join(streams)
debug(f"streams:\n + {streamList}")
# sort records by time only
if buf_len > 1:
info(f"Sorting {buf_len} records")
rec_buf.sort()
# write sorted records, count duplicates and optional remove them
info(f"Writing {buf_len} records")
prev_rec = None
duplicates = 0
if outputFile:
print(f"Output data to file: {outputFile}", file=sys.stderr)
try:
out = open(outputFile, "wb")
except Exception:
print("Cannot create output file {outputFile}", file=sys.stderr)
return -1
else:
out = sys.stdout.buffer
for _t, rec in rec_buf:
if rec == prev_rec:
duplicates += 1
if opt.uniqueness:
continue
else:
prev_rec = rec
out.write(rec)
# statistics about records written
if info_enabled():
records_written = buf_len - duplicates if opt.uniqueness else buf_len
msg = f"""Wrote {records_written} records
+ time window: {time2str(buf_min)}~{time2str(buf_max)}"""
if opt.uniqueness:
msg += f"""
+ found and removed {duplicates} duplicate records"""
elif not duplicates:
msg += """
+ no duplicate records found"""
if opt.ignore:
msg += f"""
+ {records_empty} empty records found and ignored"""
info(msg)
# additional warning output
if records_empty and not opt.ignore:
warning(f"Found {records_empty} empty records - remove with: scmssort -i")
# This is an important hint which should always be printed
if duplicates > 0 and not opt.uniqueness:
warning(f"Found {duplicates} duplicate records - remove with: scmssort -u")
return 0
if __name__ == "__main__":
sys.exit(main())

BIN
bin/scmv Executable file

Binary file not shown.

BIN
bin/scolv Executable file

Binary file not shown.

BIN
bin/scorg2nll Executable file

Binary file not shown.

231
bin/scorgls Executable file
View File

@ -0,0 +1,231 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import os
import sys
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
def readXML(self):
ar = seiscomp.io.XMLArchive()
if not ar.open(self._inputFile):
print(f"Unable to open input file {self._inputFile}")
return []
obj = ar.readObject()
if obj is None:
raise TypeError("invalid input file format")
ep = seiscomp.datamodel.EventParameters.Cast(obj)
if ep is None:
raise ValueError("no event parameters found in input file")
originIDs = []
for i in range(ep.originCount()):
org = ep.origin(i)
# check time requirements
orgTime = org.time().value()
if orgTime < self._startTime:
continue
if orgTime > self._endTime:
continue
# check author requirements
if self.author:
try:
author = org.creationInfo().author()
except Exception:
continue
if author != self.author:
continue
try:
originIDs.append(org.publicID())
except Exception:
continue
return originIDs
class OriginList(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, False)
self.setDaemonEnabled(False)
self._startTime = seiscomp.core.Time()
self._endTime = seiscomp.core.Time.GMT()
self._delimiter = None
self._inputFile = None
def createCommandLineDescription(self):
self.commandline().addGroup("Input")
self.commandline().addStringOption(
"Input",
"input,i",
"Name of input XML file. Read from stdin if '-' is given. Deactivates "
"reading events from database",
)
self.commandline().addGroup("Origins")
self.commandline().addStringOption(
"Origins",
"begin",
"The lower bound of the time interval. Format: '1970-01-01 00:00:00'.",
)
self.commandline().addStringOption(
"Origins",
"end",
"The upper bound of the time interval. Format: '1970-01-01 00:00:00'.",
)
self.commandline().addStringOption(
"Origins", "author", "The author of the origins."
)
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output",
"delimiter,D",
"The delimiter of the resulting origin IDs. Default: '\\n')",
)
return True
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
try:
self._inputFile = self.commandline().optionString("input")
except RuntimeError:
pass
if self._inputFile:
self.setDatabaseEnabled(False, False)
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
start = self.commandline().optionString("begin")
except RuntimeError:
start = "1900-01-01T00:00:00Z"
self._startTime = seiscomp.core.Time.FromString(start)
if self._startTime is None:
seiscomp.logging.error(f"Wrong 'begin' format '{start}'")
return False
try:
end = self.commandline().optionString("end")
except RuntimeError:
end = "2500-01-01T00:00:00Z"
self._endTime = seiscomp.core.Time.FromString(end)
if self._endTime is None:
seiscomp.logging.error(f"Wrong 'end' format '{end}'")
return False
if self._endTime <= self._startTime:
seiscomp.logging.error(
f"Invalid search interval: {self._startTime} - {self._endTime}"
)
return False
try:
self.author = self.commandline().optionString("author")
seiscomp.logging.debug(f"Filtering origins by author {self.author}")
except RuntimeError:
self.author = False
try:
self._delimiter = self.commandline().optionString("delimiter")
except RuntimeError:
self._delimiter = "\n"
return True
def printUsage(self):
print(
f"""Usage:
{os.path.basename(__file__)} [options]
List origin IDs available in a given time range and print to stdout."""
)
seiscomp.client.Application.printUsage(self)
print(
f"""Examples:
Print all origin IDs from year 2022 and thereafter
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \
--begin "2022-01-01 00:00:00"
Print IDs of all events in XML file
{os.path.basename(__file__)} -i origins.xml
"""
)
def run(self):
if self._inputFile:
out = readXML(self)
print(f"{self._delimiter.join(out)}\n", file=sys.stdout)
return True
seiscomp.logging.debug(f"Search interval: {self._startTime} - {self._endTime}")
out = []
q = (
"select PublicObject.%s, Origin.* from Origin, PublicObject where Origin._oid=PublicObject._oid and Origin.%s >= '%s' and Origin.%s < '%s'"
% (
self.database().convertColumnName("publicID"),
self.database().convertColumnName("time_value"),
self.database().timeToString(self._startTime),
self.database().convertColumnName("time_value"),
self.database().timeToString(self._endTime),
)
)
if self.author:
q += " and Origin.%s = '%s' " % (
self.database().convertColumnName("creationInfo_author"),
self.query().toString(self.author),
)
for obj in self.query().getObjectIterator(
q, seiscomp.datamodel.Origin.TypeInfo()
):
org = seiscomp.datamodel.Origin.Cast(obj)
if org:
out.append(org.publicID())
print(f"{self._delimiter.join(out)}\n", file=sys.stdout)
return True
def main():
app = OriginList(len(sys.argv), sys.argv)
app()
if __name__ == "__main__":
main()

BIN
bin/scplot Executable file

Binary file not shown.

380
bin/scproclat Executable file
View File

@ -0,0 +1,380 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import time, sys, os, traceback
import seiscomp.core, seiscomp.client, seiscomp.datamodel
import seiscomp.logging, seiscomp.system
def createDirectory(dir):
if os.access(dir, os.W_OK):
return True
try:
os.makedirs(dir)
return True
except:
return False
def timeToString(t):
return t.toString("%T.%6f")
def timeSpanToString(ts):
neg = ts.seconds() < 0 or ts.microseconds() < 0
secs = abs(ts.seconds())
days = secs / 86400
daySecs = secs % 86400
hours = daySecs / 3600
hourSecs = daySecs % 3600
mins = hourSecs / 60
secs = hourSecs % 60
usecs = abs(ts.microseconds())
if neg:
return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
else:
return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
class ProcLatency(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(False, False)
self.setAutoApplyNotifierEnabled(False)
self.setInterpretNotifierEnabled(True)
self.addMessagingSubscription("PICK")
self.addMessagingSubscription("AMPLITUDE")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.addMessagingSubscription("EVENT")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self._directory = ""
self._nowDirectory = ""
self._triggeredDirectory = ""
self._logCreated = False
def createCommandLineDescription(self):
try:
self.commandline().addGroup("Storage")
self.commandline().addStringOption(
"Storage", "directory,o", "Specify the storage directory"
)
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
def initConfiguration(self):
if not seiscomp.client.Application.initConfiguration(self):
return False
try:
self._directory = self.configGetString("directory")
except:
pass
try:
self._logCreated = self.configGetBool("logMsgLatency")
except:
pass
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self._directory = self.commandline().optionString("directory")
except:
pass
try:
if self._directory[-1] != "/":
self._directory = self._directory + "/"
except:
pass
if self._directory:
self._directory = seiscomp.system.Environment.Instance().absolutePath(
self._directory
)
sys.stderr.write(f"Logging latencies to {self._directory}\n")
return True
def addObject(self, parentID, obj):
try:
self.logObject(parentID, obj, False)
except:
sys.stderr.write(f"{traceback.format_exc()}\n")
def updateObject(self, parentID, obj):
try:
self.logObject("", obj, True)
except:
sys.stderr.write(f"{traceback.format_exc()}\n")
def logObject(self, parentID, obj, update):
now = seiscomp.core.Time.GMT()
time = None
pick = seiscomp.datamodel.Pick.Cast(obj)
if pick:
phase = ""
try:
phase = pick.phaseHint().code()
except:
pass
created = None
if self._logCreated:
try:
created = pick.creationInfo().creationTime()
except:
pass
self.logStation(
now,
created,
pick.time().value(),
pick.publicID() + ";P;" + phase,
pick.waveformID(),
update,
)
return
amp = seiscomp.datamodel.Amplitude.Cast(obj)
if amp:
created = None
if self._logCreated:
try:
created = amp.creationInfo().creationTime()
except:
pass
try:
self.logStation(
now,
created,
amp.timeWindow().reference(),
amp.publicID()
+ ";A;"
+ amp.type()
+ ";"
+ f"{amp.amplitude().value():.2f}",
amp.waveformID(),
update,
)
except:
pass
return
org = seiscomp.datamodel.Origin.Cast(obj)
if org:
status = ""
lat = f"{org.latitude().value():.2f}"
lon = f"{org.longitude().value():.2f}"
try:
depth = "%d" % org.depth().value()
except:
pass
try:
status = seiscomp.datamodel.EOriginStatusNames.name(org.status())
except:
pass
self.logFile(
now,
org.time().value(),
org.publicID() + ";O;" + status + ";" + lat + ";" + lon + ";" + depth,
update,
)
return
mag = seiscomp.datamodel.Magnitude.Cast(obj)
if mag:
count = ""
try:
count = "%d" % mag.stationCount()
except:
pass
self.logFile(
now,
None,
mag.publicID()
+ ";M;"
+ mag.type()
+ ";"
+ f"{mag.magnitude().value():.4f}"
+ ";"
+ count,
update,
)
return
orgref = seiscomp.datamodel.OriginReference.Cast(obj)
if orgref:
self.logFile(now, None, parentID + ";OR;" + orgref.originID(), update)
return
evt = seiscomp.datamodel.Event.Cast(obj)
if evt:
self.logFile(
now,
None,
evt.publicID()
+ ";E;"
+ evt.preferredOriginID()
+ ";"
+ evt.preferredMagnitudeID(),
update,
)
return
def logStation(self, received, created, triggered, text, waveformID, update):
streamID = (
waveformID.networkCode()
+ "."
+ waveformID.stationCode()
+ "."
+ waveformID.locationCode()
+ "."
+ waveformID.channelCode()
)
aNow = received.get()
aTriggered = triggered.get()
nowDirectory = self._directory + "/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
triggeredDirectory = (
self._directory + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
)
logEntry = timeSpanToString(received - triggered) + ";"
if created is not None:
logEntry = logEntry + timeSpanToString(received - created) + ";"
else:
logEntry = logEntry + ";"
if update:
logEntry = logEntry + "U"
else:
logEntry = logEntry + "A"
logEntry = logEntry + ";" + text
sys.stdout.write(f"{timeToString(received)};{logEntry}\n")
if nowDirectory != self._nowDirectory:
if createDirectory(nowDirectory) == False:
seiscomp.logging.error(f"Unable to create directory {nowDirectory}")
return False
self._nowDirectory = nowDirectory
self.writeLog(
self._nowDirectory + streamID + ".rcv",
timeToString(received) + ";" + logEntry,
)
if triggeredDirectory != self._triggeredDirectory:
if createDirectory(triggeredDirectory) == False:
seiscomp.logging.error(
f"Unable to create directory {triggeredDirectory}"
)
return False
self._triggeredDirectory = triggeredDirectory
self.writeLog(
self._triggeredDirectory + streamID + ".trg",
timeToString(triggered) + ";" + logEntry,
)
return True
def logFile(self, received, triggered, text, update):
aNow = received.get()
nowDirectory = self._directory + "/".join(["%.2d" % i for i in aNow[1:4]]) + "/"
triggeredDirectory = None
# logEntry = timeToString(received)
logEntry = ""
if not triggered is None:
aTriggered = triggered.get()
triggeredDirectory = (
self._directory + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
)
logEntry = logEntry + timeSpanToString(received - triggered)
logEntry = logEntry + ";"
if update:
logEntry = logEntry + "U"
else:
logEntry = logEntry + "A"
logEntry = logEntry + ";" + text
sys.stdout.write(f"{timeToString(received)};{logEntry}\n")
if nowDirectory != self._nowDirectory:
if createDirectory(nowDirectory) == False:
seiscomp.logging.error(f"Unable to create directory {nowDirectory}")
return False
self._nowDirectory = nowDirectory
self.writeLog(
self._nowDirectory + "objects.rcv", timeToString(received) + ";" + logEntry
)
if triggeredDirectory:
if triggeredDirectory != self._triggeredDirectory:
if createDirectory(triggeredDirectory) == False:
seiscomp.logging.error(
f"Unable to create directory {triggeredDirectory}"
)
return False
self._triggeredDirectory = triggeredDirectory
self.writeLog(
self._triggeredDirectory + "objects.trg",
timeToString(triggered) + ";" + logEntry,
)
return True
def writeLog(self, file, text):
of = open(file, "a")
if of:
of.write(text)
of.write("\n")
of.close()
app = ProcLatency(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/scqc Executable file

Binary file not shown.

BIN
bin/scqcv Executable file

Binary file not shown.

BIN
bin/scquery Executable file

Binary file not shown.

292
bin/scqueryqc Executable file
View File

@ -0,0 +1,292 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) 2021 by gempa GmbH #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# adopted from scqcquery #
# Author: Dirk Roessler, gempa GmbH #
# Email: roessler@gempa.de #
############################################################################
import os
import sys
import re
import seiscomp.core
import seiscomp.client
import seiscomp.io
import seiscomp.datamodel
qcParamsDefault = (
"latency,delay,timing,offset,rms,availability,"
"'gaps count','gaps interval','gaps length',"
"'overlaps count','overlaps interval','overlaps length',"
"'spikes count','spikes interval','spikes amplitude'"
)
def getStreamsFromInventory(self):
try:
dbr = seiscomp.datamodel.DatabaseReader(self.database())
inv = seiscomp.datamodel.Inventory()
dbr.loadNetworks(inv)
streamList = set()
for inet in range(inv.networkCount()):
network = inv.network(inet)
dbr.load(network)
for ista in range(network.stationCount()):
station = network.station(ista)
try:
start = station.start()
except Exception:
continue
try:
end = station.end()
if not start <= self._end <= end and end >= self._start:
continue
except Exception:
pass
for iloc in range(station.sensorLocationCount()):
location = station.sensorLocation(iloc)
for istr in range(location.streamCount()):
stream = location.stream(istr)
streamID = (
network.code()
+ "."
+ station.code()
+ "."
+ location.code()
+ "."
+ stream.code()
)
streamList.add(streamID)
return list(streamList)
except Exception:
return False
class WfqQuery(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(False)
self.setDatabaseEnabled(True, False)
self.setLoggingToStdErr(True)
self.setDaemonEnabled(False)
self._streams = False
self._fromInventory = False
self._outfile = "-"
self._parameter = qcParamsDefault
self._start = "1900-01-01T00:00:00Z"
self._end = str(seiscomp.core.Time.GMT())
self._formatted = False
def createCommandLineDescription(self):
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output",
"output,o",
"output file name for XML. Writes to stdout if not given.",
)
self.commandline().addOption("Output", "formatted,f", "write formatted XML")
self.commandline().addGroup("Query")
self.commandline().addStringOption(
"Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'"
)
self.commandline().addStringOption(
"Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'"
)
self.commandline().addStringOption(
"Query",
"stream-id,i",
"Waveform stream ID to search for QC parameters: net.sta.loc.cha -"
" [networkCode].[stationCode].[sensorLocationCode].[channelCode]. "
"Provide a single ID or a comma-separated list. Overrides "
"--streams-from-inventory",
)
self.commandline().addStringOption(
"Query",
"parameter,p",
"QC parameter to output: (e.g. delay, rms, 'gaps count' ...). "
"Provide a single parameter or a comma-separated list. Defaults "
"apply if parameter is not given.",
)
self.commandline().addOption(
"Query",
"streams-from-inventory",
"Read streams from inventory. Superseded by stream-id.",
)
return True
def printUsage(self):
print(
"""Usage:
{os.path.basename(__file__)} [options]
Query a database for waveform quality control (QC) parameters.""",
file=sys.stderr,
)
seiscomp.client.Application.printUsage(self)
print(
f"""Default QC parameters: {qcParamsDefault}\n""",
file=sys.stderr,
)
print(
f"""Examples:
Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from \
'2021-11-20 00:00:00' until current
{os.path.basename(__file__)} -d localhost -b '2021-11-20 00:00:00' -p rms,delay \
-i AU.AS18..SHZ,AU.AS19..SHZ""",
file=sys.stderr,
)
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
try:
self._streams = self.commandline().optionString("stream-id").split(",")
except RuntimeError:
pass
try:
self._fromInventory = self.commandline().hasOption("streams-from-inventory")
except RuntimeError:
pass
if not self._streams and not self._fromInventory:
print(
"Provide streamID(s): --stream-id or --streams-from-inventory",
file=sys.stderr,
)
return False
try:
self._outfile = self.commandline().optionString("output")
except RuntimeError:
print("No output file name given: Sending to stdout", file=sys.stderr)
try:
self._start = self.commandline().optionString("begin")
except RuntimeError:
print(
f"No begin time given, considering: {self._start}",
file=sys.stderr,
)
try:
self._end = self.commandline().optionString("end")
except RuntimeError:
print(
f"No end time given, considering 'now': {self._end}",
file=sys.stderr,
)
try:
self._parameter = self.commandline().optionString("parameter")
except RuntimeError:
print("No QC parameter given, using default", file=sys.stderr)
try:
self._formatted = self.commandline().hasOption("formatted")
except RuntimeError:
pass
return True
def run(self):
if not self.query():
print("No database connection!\n", file=sys.stderr)
return False
streams = self._streams
if not streams and self._fromInventory:
try:
streams = getStreamsFromInventory(self)
except RuntimeError:
print("No streams read from database!\n", file=sys.stderr)
return False
if not streams:
print("Empty stream list")
return False
for stream in streams:
if re.search("[*?]", stream):
print(
f"Wildcards in streamID are not supported: {stream}\n",
file=sys.stderr,
)
return False
print("Request:", file=sys.stderr)
print(f" streams: {str(streams)}", file=sys.stderr)
print(f" number of streams: {len(streams)}", file=sys.stderr)
print(f" begin time: {str(self._start)}", file=sys.stderr)
print(f" end time: {str(self._end)}", file=sys.stderr)
print(f" parameters: {str(self._parameter)}", file=sys.stderr)
print("Output:", file=sys.stderr)
print(f" file: {self._outfile}", file=sys.stderr)
print(f" formatted XML: {self._formatted}", file=sys.stderr)
# create archive
xarc = seiscomp.io.XMLArchive()
if not xarc.create(self._outfile, True, True):
print(f"Unable to write XML to {self._outfile}!\n", file=sys.stderr)
return False
xarc.setFormattedOutput(self._formatted)
qc = seiscomp.datamodel.QualityControl()
# write parameters
for parameter in self._parameter.split(","):
for stream in streams:
start = seiscomp.core.Time.FromString(self._start)
if start is None:
seiscomp.logging.error(f"Wrong 'start' format '{self._start}'")
return False
end = seiscomp.core.Time.FromString(self._end)
if end is None:
seiscomp.logging.error(f"Wrong 'end' format '{self._end}'")
return False
(net, sta, loc, cha) = stream.split(".")
it = self.query().getWaveformQuality(
seiscomp.datamodel.WaveformStreamID(net, sta, loc, cha, ""),
parameter,
start,
end,
)
while it.get():
try:
wfq = seiscomp.datamodel.WaveformQuality.Cast(it.get())
qc.add(wfq)
except Exception:
pass
it.step()
xarc.writeObject(qc)
xarc.close()
return True
app = WfqQuery(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/screloc Executable file

Binary file not shown.

BIN
bin/screpick Executable file

Binary file not shown.

BIN
bin/scrttv Executable file

Binary file not shown.

126
bin/scsendjournal Executable file
View File

@ -0,0 +1,126 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
class SendJournal(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setDatabaseEnabled(False, False)
self.setMessagingEnabled(True)
self.setMessagingUsername("")
self.setPrimaryMessagingGroup("EVENT")
self.params = None
self.filename = None
def createCommandLineDescription(self):
self.commandline().addGroup("Input")
self.commandline().addStringOption(
"Input",
"input,i",
"Read parameters from given file instead of command line.",
)
def init(self):
if seiscomp.client.Application.init(self) == False:
return False
return True
def printUsage(self):
print(
"""Usage:
scsendjournal [options] {objectID} {action} [parameters]
Send journaling information to the messaging to manipulate SeisComP objects like events and origins."""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Set the type of the event with ID gempa2021abcd to 'earthquake'
scsendjournal -H localhost gempa2021abcd EvType "earthquake"
Set the type of the event with ID gempa2021abcd and read the type from file
scsendjournal -H localhost gempa2021abcd EvType -i input.txt
"""
)
def run(self):
msg = seiscomp.datamodel.NotifierMessage()
entry = seiscomp.datamodel.JournalEntry()
entry.setCreated(seiscomp.core.Time.GMT())
entry.setObjectID(self.params[0])
entry.setSender(self.author())
entry.setAction(self.params[1])
print(
f"Sending entry ({entry.objectID()},{entry.action()})",
file=sys.stderr,
)
if self.filename:
try:
with open(self.filename, "r") as f:
entry.setParameters(f.read().rstrip())
except Exception as err:
print(f"{str(err)}", file=sys.stderr)
return False
elif len(self.params) > 2:
entry.setParameters(self.params[2])
n = seiscomp.datamodel.Notifier(
seiscomp.datamodel.Journaling.ClassName(), seiscomp.datamodel.OP_ADD, entry
)
msg.attach(n)
self.connection().send(msg)
return True
def validateParameters(self):
if seiscomp.client.Application.validateParameters(self) == False:
return False
try:
self.filename = self.commandline().optionString("input")
except RuntimeError:
pass
self.params = self.commandline().unrecognizedOptions()
if len(self.params) < 2:
print(
f"{self.name()} [opts] {{objectID}} {{action}} [parameters]",
file=sys.stderr,
)
return False
return True
def main(argc, argv):
app = SendJournal(argc, argv)
return app()
if __name__ == "__main__":
sys.exit(main(len(sys.argv), sys.argv))

109
bin/scsendorigin Executable file
View File

@ -0,0 +1,109 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import seiscomp.core
import seiscomp.datamodel
import seiscomp.client
import seiscomp.logging
class SendOrigin(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setDatabaseEnabled(False, False)
self.setMessagingEnabled(True)
self.setPrimaryMessagingGroup("GUI")
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
cstr = self.commandline().optionString("coord")
tstr = self.commandline().optionString("time")
except:
print(
"Must specify origin using '--coord lat,lon,dep --time time'",
file=sys.stderr,
)
return False
self.origin = seiscomp.datamodel.Origin.Create()
ci = seiscomp.datamodel.CreationInfo()
ci.setAgencyID(self.agencyID())
ci.setCreationTime(seiscomp.core.Time.GMT())
self.origin.setCreationInfo(ci)
lat, lon, dep = list(map(float, cstr.split(",")))
self.origin.setLongitude(seiscomp.datamodel.RealQuantity(lon))
self.origin.setLatitude(seiscomp.datamodel.RealQuantity(lat))
self.origin.setDepth(seiscomp.datamodel.RealQuantity(dep))
time = seiscomp.core.Time.FromString(tstr)
if time is None:
seiscomp.logging.error(f"Wrong time format: '{tstr}'")
return False
self.origin.setTime(seiscomp.datamodel.TimeQuantity(time))
return True
def createCommandLineDescription(self):
try:
self.commandline().addGroup("Parameters")
self.commandline().addStringOption(
"Parameters", "coord", "Latitude,longitude,depth of origin"
)
self.commandline().addStringOption("Parameters", "time", "time of origin")
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
def printUsage(self):
print(
"""Usage:
scsendorigin [options]
Create an artificial origin and send to the messaging"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Send an artificial origin with hypocenter parameters to the messaging
scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10
"""
)
def run(self):
msg = seiscomp.datamodel.ArtificialOriginMessage(self.origin)
self.connection().send(msg)
seiscomp.logging.debug(
f"""Origin sent with
lat: {self.origin.latitude().value()}
lon: {self.origin.longitude().value()}
depth: {self.origin.depth().value()}
time: {self.origin.time().value().iso()}"""
)
return True
app = SendOrigin(len(sys.argv), sys.argv)
# app.setName("scsendorigin")
app.setMessagingUsername("scsendorg")
sys.exit(app())

BIN
bin/scshowevent Executable file

Binary file not shown.

414
bin/scsohlog Executable file
View File

@ -0,0 +1,414 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys, os, re
import seiscomp.core, seiscomp.client, seiscomp.logging, seiscomp.system
"""
Monitor application that connects to the messaging and collects all
information on the STATUS_GROUP to create an XML file ever N seconds.
It can furthermore call a configured script to trigger processing of the
produced XML file.
"""
inputRegEx = re.compile("in\((?P<params>[^\)]*)\)")
outputRegEx = re.compile("out\((?P<params>[^\)]*)\)")
# Define all units of measure for available system SOH tags. Tags that are
# not given here are not processed.
Tests = {
"cpuusage": "%",
"clientmemoryusage": "kB",
"sentmessages": "cnt",
"receivedmessages": "cnt",
"messagequeuesize": "cnt",
"objectcount": "cnt",
"uptime": "s",
"dbadds": "row/s",
"dbupdates": "row/s",
"dbdeletes": "row/s",
}
# ----------------------------------------------------------------------------
# Class TestLog to hold the properties of a test. It also creates XML.
# ----------------------------------------------------------------------------
class TestLog:
def __init__(self):
self.value = None
self.uom = None
self.update = None
def toXML(self, f, name):
f.write(f'<test name="{name}"')
if self.value:
try:
# Try to convert to float
fvalue = float(self.value)
if fvalue % 1.0 >= 1e-6:
f.write(f' value="{fvalue:f}"')
else:
f.write(' value="%d"' % int(fvalue))
except:
f.write(f' value="{self.value}"')
if self.uom:
f.write(f' uom="{self.uom}"')
if self.update:
f.write(f' updateTime="{self.update}"')
f.write("/>")
# ----------------------------------------------------------------------------
# Class ObjectLog to hold the properties of a object log. It also creates
# XML.
# ----------------------------------------------------------------------------
class ObjectLog:
def __init__(self):
self.count = None
self.average = None
self.timeWindow = None
self.last = None
self.update = None
def toXML(self, f, name, channel):
f.write("<object")
if name:
f.write(f' name="{name}"')
if channel:
f.write(f' channel="{channel}"')
if not self.count is None:
f.write(f' count="{self.count}"')
if not self.timeWindow is None:
f.write(f' timeWindow="{self.timeWindow}"')
if not self.average is None:
f.write(f' average="{self.average}"')
if self.last:
f.write(f' lastTime="{self.last}"')
f.write(f' updateTime="{self.update}"')
f.write("/>")
# ----------------------------------------------------------------------------
# Class Client that holds all tests and object logs of a particular client
# (messaging user name).
# ----------------------------------------------------------------------------
class Client:
def __init__(self):
self.pid = None
self.progname = None
self.host = None
self.inputLogs = dict()
self.outputLogs = dict()
self.tests = dict()
# ----------------------------------------------------------------------------
# Update/add (system) tests based on the passed tests dictionary retrieved
# from a status message.
# ----------------------------------------------------------------------------
def updateTests(self, updateTime, tests):
for name, value in list(tests.items()):
if name == "pid":
self.pid = value
elif name == "programname":
self.progname = value
elif name == "hostname":
self.host = value
if name not in Tests:
continue
# Convert d:h:m:s to seconds
if name == "uptime":
try:
t = [int(v) for v in value.split(":")]
except:
continue
if len(t) != 4:
continue
value = str(t[0] * 86400 + t[1] * 3600 + t[2] * 60 + t[3])
if name not in self.tests:
log = TestLog()
log.uom = Tests[name]
self.tests[name] = log
else:
log = self.tests[name]
log.value = value
log.update = updateTime
# ----------------------------------------------------------------------------
# Update/add object logs based on the passed log text. The content is parsed.
# ----------------------------------------------------------------------------
def updateObjects(self, updateTime, log):
# Check input structure
v = inputRegEx.search(log)
if not v:
# Check out structure
v = outputRegEx.search(log)
if not v:
return
logs = self.outputLogs
else:
logs = self.inputLogs
try:
tmp = v.group("params").split(",")
except:
return
params = dict()
for p in tmp:
try:
param, value = p.split(":", 1)
except:
continue
params[param] = value
name = params.get("name", "")
channel = params.get("chan", "")
if (name, channel) not in logs:
logObj = ObjectLog()
logs[(name, channel)] = logObj
else:
logObj = logs[(name, channel)]
logObj.update = updateTime
logObj.count = params.get("cnt")
logObj.average = params.get("avg")
logObj.timeWindow = params.get("tw")
logObj.last = params.get("last")
def toXML(self, f, name):
f.write(f'<service name="{name}"')
if self.host:
f.write(f' host="{self.host}"')
if self.pid:
f.write(f' pid="{self.pid}"')
if self.progname:
f.write(f' prog="{self.progname}"')
f.write(">")
for name, log in list(self.tests.items()):
log.toXML(f, name)
if len(self.inputLogs) > 0:
f.write("<input>")
for id, log in list(self.inputLogs.items()):
log.toXML(f, id[0], id[1])
f.write("</input>")
if len(self.outputLogs) > 0:
f.write("<output>")
for id, log in list(self.outputLogs.items()):
log.toXML(f, id[0], id[1])
f.write("</output>")
f.write("</service>")
# ----------------------------------------------------------------------------
# SC3 application class Monitor
# ----------------------------------------------------------------------------
class Monitor(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setDatabaseEnabled(False, False)
self.setMembershipMessagesEnabled(True)
self.addMessagingSubscription(seiscomp.client.Protocol.STATUS_GROUP)
self.setMessagingUsername("")
self.setPrimaryMessagingGroup(seiscomp.client.Protocol.LISTENER_GROUP)
self._clients = dict()
self._outputScript = None
self._outputFile = "@LOGDIR@/server.xml"
self._outputInterval = 60
def createCommandLineDescription(self):
try:
self.commandline().addGroup("Output")
self.commandline().addStringOption(
"Output", "file,o", "Specify the output file to create"
)
self.commandline().addIntOption(
"Output",
"interval,i",
"Specify the output interval in seconds (default: 60)",
)
self.commandline().addStringOption(
"Output",
"script",
"Specify an output script to be called after the output file is generated",
)
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
return True
def initConfiguration(self):
if not seiscomp.client.Application.initConfiguration(self):
return False
try:
self._outputFile = self.configGetString("monitor.output.file")
except:
pass
try:
self._outputInterval = self.configGetInt("monitor.output.interval")
except:
pass
try:
self._outputScript = self.configGetString("monitor.output.script")
except:
pass
return True
def init(self):
if not seiscomp.client.Application.init(self):
return False
try:
self._outputFile = self.commandline().optionString("file")
except:
pass
try:
self._outputInterval = self.commandline().optionInt("interval")
except:
pass
try:
self._outputScript = self.commandline().optionString("script")
except:
pass
self._outputFile = seiscomp.system.Environment.Instance().absolutePath(
self._outputFile
)
seiscomp.logging.info(f"Output file: {self._outputFile}")
if self._outputScript:
self._outputScript = seiscomp.system.Environment.Instance().absolutePath(
self._outputScript
)
seiscomp.logging.info(f"Output script: {self._outputScript}")
self._monitor = self.addInputObjectLog(
"status", seiscomp.client.Protocol.STATUS_GROUP
)
self.enableTimer(self._outputInterval)
seiscomp.logging.info(
"Starting output timer with %d secs" % self._outputInterval
)
return True
def printUsage(self):
print(
"""Usage:
scsohlog [options]
Connect to the messaging collecting information sent from connected clients"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Create an output XML file every 60 seconds and execute a custom script to process the XML file
scsohlog -o stat.xml -i 60 --script process-stat.sh
"""
)
def handleNetworkMessage(self, msg):
# A state of health message
if msg.type == seiscomp.client.Packet.Status:
data = filter(None, msg.payload.split("&"))
self.updateStatus(msg.subject, data)
# If a client disconnected, remove it from the list
elif msg.type == seiscomp.client.Packet.Disconnected:
if msg.subject in self._clients:
del self._clients[msg.subject]
def handleDisconnect(self):
# If we got disconnected all client states are deleted
self._clients = dict()
# ----------------------------------------------------------------------------
# Timeout handler called by the Application class.
# Write XML to configured output file and trigger configured script.
# ----------------------------------------------------------------------------
def handleTimeout(self):
if self._outputFile == "-":
self.toXML(sys.stdout)
sys.stdout.write("\n")
return
try:
f = open(self._outputFile, "w")
except:
seiscomp.logging.error(
f"Unable to create output file: {self._outputFile}"
)
return
self.toXML(f)
f.close()
if self._outputScript:
os.system(self._outputScript + " " + self._outputFile)
# ----------------------------------------------------------------------------
# Write XML to stream f
# ----------------------------------------------------------------------------
def toXML(self, f):
f.write('<?xml version="1.0" encoding="UTF-8"?>')
f.write(f'<server name="seiscomp" host="{self.messagingURL()}">')
for name, client in list(self._clients.items()):
client.toXML(f, name)
f.write("</server>")
def updateStatus(self, name, items):
if name not in self._clients:
self._clients[name] = Client()
now = seiscomp.core.Time.GMT()
client = self._clients[name]
self.logObject(self._monitor, now)
params = dict()
objs = []
for t in items:
try:
param, value = t.split("=", 1)
params[param] = value
except:
objs.append(t)
if "time" in params:
update = params["time"]
del params["time"]
else:
update = now.iso()
client.updateTests(update, params)
for o in objs:
client.updateObjects(update, o)
# client.toXML(sys.stdout, name)
app = Monitor(len(sys.argv), sys.argv)
sys.exit(app())

541
bin/scvoice Executable file
View File

@ -0,0 +1,541 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import subprocess
import traceback
from seiscomp import client, core, datamodel, logging, seismology, system, math
class VoiceAlert(client.Application):
def __init__(self, argc, argv):
client.Application.__init__(self, argc, argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setLoadRegionsEnabled(True)
self.setMessagingUsername("")
self.setPrimaryMessagingGroup(client.Protocol.LISTENER_GROUP)
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
self.setLoadCitiesEnabled(True)
self.setLoadRegionsEnabled(True)
self._ampType = "snr"
self._citiesMaxDist = 20
self._citiesMinPopulation = 50000
self._cache = None
self._eventDescriptionPattern = None
self._ampScript = None
self._alertScript = None
self._eventScript = None
self._ampProc = None
self._alertProc = None
self._eventProc = None
self._newWhenFirstSeen = False
self._prevMessage = {}
self._agencyIDs = []
def createCommandLineDescription(self):
self.commandline().addOption(
"Generic",
"first-new",
"calls an event a new event when it is " "seen the first time",
)
self.commandline().addGroup("Alert")
self.commandline().addStringOption(
"Alert",
"amp-type",
"specify the amplitude type to listen to",
self._ampType,
)
self.commandline().addStringOption(
"Alert",
"amp-script",
"specify the script to be called when a "
"stationamplitude arrived, network-, stationcode and amplitude are "
"passed as parameters $1, $2 and $3",
)
self.commandline().addStringOption(
"Alert",
"alert-script",
"specify the script to be called when a "
"preliminary origin arrived, latitude and longitude are passed as "
"parameters $1 and $2",
)
self.commandline().addStringOption(
"Alert",
"event-script",
"specify the script to be called when an "
"event has been declared; the message string, a flag (1=new event, "
"0=update event), the EventID, the arrival count and the magnitude "
"(optional when set) are passed as parameter $1, $2, $3, $4 and $5",
)
self.commandline().addGroup("Cities")
self.commandline().addStringOption(
"Cities",
"max-dist",
"maximum distance for using the distance " "from a city to the earthquake",
)
self.commandline().addStringOption(
"Cities",
"min-population",
"minimum population for a city to " "become a point of interest",
)
self.commandline().addGroup("Debug")
self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
return True
def init(self):
if not client.Application.init(self):
return False
try:
self._newWhenFirstSeen = self.configGetBool("firstNew")
except BaseException:
pass
try:
agencyIDs = self.configGetStrings("agencyIDs")
for item in agencyIDs:
item = item.strip()
if item not in self._agencyIDs:
self._agencyIDs.append(item)
except BaseException:
pass
try:
if self.commandline().hasOption("first-new"):
self._newWhenFirstSeen = True
except BaseException:
pass
try:
self._eventDescriptionPattern = self.configGetString("poi.message")
except BaseException:
pass
try:
self._citiesMaxDist = self.configGetDouble("poi.maxDist")
except BaseException:
pass
try:
self._citiesMaxDist = self.commandline().optionDouble("max-dist")
except BaseException:
pass
try:
self._citiesMinPopulation = self.configGetInt("poi.minPopulation")
except BaseException:
pass
try:
self._citiesMinPopulation = self.commandline().optionInt("min-population")
except BaseException:
pass
try:
self._ampType = self.commandline().optionString("amp-type")
except BaseException:
pass
try:
self._ampScript = self.commandline().optionString("amp-script")
except BaseException:
try:
self._ampScript = self.configGetString("scripts.amplitude")
except BaseException:
logging.warning("No amplitude script defined")
if self._ampScript:
self._ampScript = system.Environment.Instance().absolutePath(
self._ampScript
)
try:
self._alertScript = self.commandline().optionString("alert-script")
except BaseException:
try:
self._alertScript = self.configGetString("scripts.alert")
except BaseException:
logging.warning("No alert script defined")
if self._alertScript:
self._alertScript = system.Environment.Instance().absolutePath(
self._alertScript
)
try:
self._eventScript = self.commandline().optionString("event-script")
except BaseException:
try:
self._eventScript = self.configGetString("scripts.event")
logging.info(f"Using event script: {self._eventScript}")
except BaseException:
logging.warning("No event script defined")
if self._eventScript:
self._eventScript = system.Environment.Instance().absolutePath(
self._eventScript
)
logging.info("Creating ringbuffer for 100 objects")
if not self.query():
logging.warning("No valid database interface to read from")
self._cache = datamodel.PublicObjectRingBuffer(self.query(), 100)
if self._ampScript and self.connection():
self.connection().subscribe("AMPLITUDE")
if self._newWhenFirstSeen:
logging.info("A new event is declared when I see it the first time")
if not self._agencyIDs:
logging.info("agencyIDs: []")
else:
logging.info(f"agencyIDs: {' '.join(self._agencyIDs)}")
return True
def printUsage(self):
print(
"""Usage:
scvoice [options]
Alert the user acoustically in real time.
"""
)
client.Application.printUsage(self)
print(
"""Examples:
Execute scvoice on command line with debug output
scvoice --debug
"""
)
def run(self):
try:
try:
eventID = self.commandline().optionString("eventid")
event = self._cache.get(datamodel.Event, eventID)
if event:
self.notifyEvent(event)
except BaseException:
pass
return client.Application.run(self)
except BaseException:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
return False
def runAmpScript(self, net, sta, amp):
if not self._ampScript:
return
if self._ampProc is not None:
if self._ampProc.poll() is None:
logging.warning("AmplitudeScript still in progress -> skipping message")
return
try:
self._ampProc = subprocess.Popen([self._ampScript, net, sta, f"{amp:.2f}"])
logging.info("Started amplitude script with pid %d" % self._ampProc.pid)
except BaseException:
logging.error(f"Failed to start amplitude script '{self._ampScript}'")
def runAlert(self, lat, lon):
if not self._alertScript:
return
if self._alertProc is not None:
if self._alertProc.poll() is None:
logging.warning("AlertScript still in progress -> skipping message")
return
try:
self._alertProc = subprocess.Popen(
[self._alertScript, f"{lat:.1f}", f"{lon:.1f}"]
)
logging.info("Started alert script with pid %d" % self._alertProc.pid)
except BaseException:
logging.error(f"Failed to start alert script '{self._alertScript}'")
def done(self):
self._cache = None
client.Application.done(self)
def handleMessage(self, msg):
try:
dm = core.DataMessage.Cast(msg)
if dm:
for att in dm:
org = datamodel.Origin.Cast(att)
if not org:
continue
try:
if org.evaluationStatus() == datamodel.PRELIMINARY:
self.runAlert(
org.latitude().value(), org.longitude().value()
)
except BaseException:
pass
# ao = datamodel.ArtificialOriginMessage.Cast(msg)
# if ao:
# org = ao.origin()
# if org:
# self.runAlert(org.latitude().value(), org.longitude().value())
# return
client.Application.handleMessage(self, msg)
except BaseException:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def addObject(self, parentID, arg0):
# pylint: disable=W0622
try:
obj = datamodel.Amplitude.Cast(arg0)
if obj:
if obj.type() == self._ampType:
logging.debug(
f"got new {self._ampType} amplitude '{obj.publicID()}'"
)
self.notifyAmplitude(obj)
obj = datamodel.Origin.Cast(arg0)
if obj:
self._cache.feed(obj)
logging.debug(f"got new origin '{obj.publicID()}'")
try:
if obj.evaluationStatus() == datamodel.PRELIMINARY:
self.runAlert(obj.latitude().value(), obj.longitude().value())
except BaseException:
pass
return
obj = datamodel.Magnitude.Cast(arg0)
if obj:
self._cache.feed(obj)
logging.debug(f"got new magnitude '{obj.publicID()}'")
return
obj = datamodel.Event.Cast(arg0)
if obj:
org = self._cache.get(datamodel.Origin, obj.preferredOriginID())
agencyID = org.creationInfo().agencyID()
logging.debug(f"got new event '{obj.publicID()}'")
if not self._agencyIDs or agencyID in self._agencyIDs:
self.notifyEvent(obj, True)
except BaseException:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def updateObject(self, parentID, arg0):
try:
obj = datamodel.Event.Cast(arg0)
if obj:
org = self._cache.get(datamodel.Origin, obj.preferredOriginID())
agencyID = org.creationInfo().agencyID()
logging.debug(f"update event '{obj.publicID()}'")
if not self._agencyIDs or agencyID in self._agencyIDs:
self.notifyEvent(obj, False)
except BaseException:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
def notifyAmplitude(self, amp):
self.runAmpScript(
amp.waveformID().networkCode(),
amp.waveformID().stationCode(),
amp.amplitude().value(),
)
def notifyEvent(self, evt, newEvent=True):
try:
org = self._cache.get(datamodel.Origin, evt.preferredOriginID())
if not org:
logging.warning(
"unable to get origin %s, ignoring event "
"message" % evt.preferredOriginID()
)
return
preliminary = False
try:
if org.evaluationStatus() == datamodel.PRELIMINARY:
preliminary = True
except BaseException:
pass
if not preliminary:
nmag = self._cache.get(datamodel.Magnitude, evt.preferredMagnitudeID())
if nmag:
mag = nmag.magnitude().value()
mag = f"magnitude {mag:.1f}"
else:
if len(evt.preferredMagnitudeID()) > 0:
logging.warning(
"unable to get magnitude %s, ignoring event "
"message" % evt.preferredMagnitudeID()
)
else:
logging.warning(
"no preferred magnitude yet, ignoring event message"
)
return
# keep track of old events
if self._newWhenFirstSeen:
if evt.publicID() in self._prevMessage:
newEvent = False
else:
newEvent = True
dsc = seismology.Regions.getRegionName(
org.latitude().value(), org.longitude().value()
)
if self._eventDescriptionPattern:
try:
city, dist, _ = self.nearestCity(
org.latitude().value(),
org.longitude().value(),
self._citiesMaxDist,
self._citiesMinPopulation,
)
if city:
dsc = self._eventDescriptionPattern
region = seismology.Regions.getRegionName(
org.latitude().value(), org.longitude().value()
)
distStr = str(int(math.deg2km(dist)))
dsc = (
dsc.replace("@region@", region)
.replace("@dist@", distStr)
.replace("@poi@", city.name())
)
except BaseException:
pass
logging.debug(f"desc: {dsc}")
dep = org.depth().value()
now = core.Time.GMT()
otm = org.time().value()
dt = (now - otm).seconds()
# if dt > dtmax:
# return
if dt > 3600:
dt = "%d hours %d minutes ago" % (int(dt / 3600), int((dt % 3600) / 60))
elif dt > 120:
dt = "%d minutes ago" % int(dt / 60)
else:
dt = "%d seconds ago" % int(dt)
if preliminary:
message = "earthquake, preliminary, %%s, %s" % dsc
else:
message = "earthquake, %%s, %s, %s, depth %d kilometers" % (
dsc,
mag,
int(dep + 0.5),
)
# at this point the message lacks the "ago" part
if (
evt.publicID() in self._prevMessage
and self._prevMessage[evt.publicID()] == message
):
logging.info(f"Suppressing repeated message '{message}'")
return
self._prevMessage[evt.publicID()] = message
message = message % dt # fill the "ago" part
logging.info(message)
if not self._eventScript:
return
if self._eventProc is not None:
if self._eventProc.poll() is None:
logging.warning("EventScript still in progress -> skipping message")
return
try:
param2 = 0
param3 = 0
param4 = ""
if newEvent:
param2 = 1
org = self._cache.get(datamodel.Origin, evt.preferredOriginID())
if org:
try:
param3 = org.quality().associatedPhaseCount()
except BaseException:
pass
nmag = self._cache.get(datamodel.Magnitude, evt.preferredMagnitudeID())
if nmag:
param4 = f"{nmag.magnitude().value():.1f}"
self._eventProc = subprocess.Popen(
[
self._eventScript,
message,
"%d" % param2,
evt.publicID(),
"%d" % param3,
param4,
]
)
logging.info("Started event script with pid %d" % self._eventProc.pid)
except BaseException:
logging.error(
"Failed to start event script '%s %s %d %d %s'"
% (self._eventScript, message, param2, param3, param4)
)
except BaseException:
info = traceback.format_exception(*sys.exc_info())
for i in info:
sys.stderr.write(i)
app = VoiceAlert(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/scwfas Executable file

Binary file not shown.

BIN
bin/scwfparam Executable file

Binary file not shown.

BIN
bin/scxmldump Executable file

Binary file not shown.

BIN
bin/scxmlmerge Executable file

Binary file not shown.

BIN
bin/sczip Executable file

Binary file not shown.

55
bin/seiscomp Executable file
View File

@ -0,0 +1,55 @@
#!/bin/sh -e
# Resolve softlink to seiscomp executable first
if test -L "$0"
then
# $0 is a link
target="$(readlink "$0")"
case "$target" in
/*)
d="$target"
;;
*)
d="$(dirname "$0")/$target"
;;
esac
else
# $0 is NOT a link
case "$0" in
*/* | /*)
d="$0"
;;
*)
d="$(command -v "$0")"
;;
esac
fi
normalized_dirname() {
# Normalize directory name without following symlinks.
# Brute-force but portable.
cd "${1%/*}" && pwd || exit 1
}
# Determine the root directory of the 'seiscomp' utility.
d="$(normalized_dirname "$d")"
SEISCOMP_ROOT="$(realpath "${d%/bin}")"
export SEISCOMP_ROOT
export PATH="$SEISCOMP_ROOT/bin:$PATH"
export LD_LIBRARY_PATH="$SEISCOMP_ROOT/lib:$LD_LIBRARY_PATH"
export PYTHONPATH="$SEISCOMP_ROOT/lib/python:$PYTHONPATH"
export MANPATH="$SEISCOMP_ROOT/share/man:$MANPATH"
HOSTENV=$SEISCOMP_ROOT/etc/env/by-hostname/$(hostname)
test -f $HOSTENV && . $HOSTENV
case $1 in
exec)
shift
exec "$@"
;;
*)
exec $SEISCOMP_ROOT/bin/seiscomp-python "$SEISCOMP_ROOT/bin/seiscomp-control.py" "$@"
;;
esac

1641
bin/seiscomp-control.py Executable file

File diff suppressed because it is too large Load Diff

19
bin/seiscomp-python Executable file
View File

@ -0,0 +1,19 @@
#!/bin/sh
#
# This is a shell script that executes the Python interpreter as
# configured using cmake.
#
# In order to use this in your Python programs use this
# shebang line:
#!/usr/bin/env seiscomp-python
# Please note that this wrapper does *not* set the environment
# variables for you. To ensure that you run your script in the
# proper environment, please use 'seiscomp exec'. Alternatively
# you can also set your environment variables according to the
# output of 'seiscomp print env'.
python_executable="/usr/bin/python3"
exec $python_executable "$@"

962
bin/sh2proc Executable file
View File

@ -0,0 +1,962 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# Author: Alexander Jaeger, Stephan Herrnkind, #
# Lukas Lehmann, Dirk Roessler# #
# Email: herrnkind@gempa.de #
############################################################################
# from time import strptime
import sys
import traceback
import seiscomp.client
import seiscomp.core
import seiscomp.datamodel
import seiscomp.io
import seiscomp.logging
import seiscomp.math
TimeFormats = ["%d-%b-%Y_%H:%M:%S.%f", "%d-%b-%Y_%H:%M:%S"]
# SC3 has more event types available in the datamodel
EventTypes = {
"teleseismic quake": seiscomp.datamodel.EARTHQUAKE,
"local quake": seiscomp.datamodel.EARTHQUAKE,
"regional quake": seiscomp.datamodel.EARTHQUAKE,
"quarry blast": seiscomp.datamodel.QUARRY_BLAST,
"nuclear explosion": seiscomp.datamodel.NUCLEAR_EXPLOSION,
"mining event": seiscomp.datamodel.MINING_EXPLOSION,
}
def wfs2Str(wfsID):
return f"{wfsID.networkCode()}.{wfsID.stationCode()}.{wfsID.locationCode()}.{wfsID.channelCode()}"
###############################################################################
class SH2Proc(seiscomp.client.Application):
###########################################################################
def __init__(self):
seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, True)
self.setLoadInventoryEnabled(True)
self.setLoadConfigModuleEnabled(True)
self.setDaemonEnabled(False)
self.inputFile = "-"
self.streams = None
###########################################################################
def initConfiguration(self):
if not seiscomp.client.Application.initConfiguration(self):
return False
# If the database connection is passed via command line or configuration
# file then messaging is disabled. Messaging is only used to get
# the configured database connection URI.
if self.databaseURI() != "":
self.setMessagingEnabled(False)
else:
# A database connection is not required if the inventory is loaded
# from file
if not self.isInventoryDatabaseEnabled():
self.setMessagingEnabled(False)
self.setDatabaseEnabled(False, False)
return True
##########################################################################
def printUsage(self):
print(
"""Usage:
sh2proc [options]
Convert Seismic Handler event data to SeisComP XML format"""
)
seiscomp.client.Application.printUsage(self)
print(
"""Examples:
Convert the Seismic Handler file shm.evt to SCML. Receive the database
connection to read inventory and configuration information from messaging
sh2proc shm.evt
Read Seismic Handler data from stdin. Provide inventory and configuration in XML
cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml
"""
)
##########################################################################
def validateParameters(self):
if not seiscomp.client.Application.validateParameters(self):
return False
for opt in self.commandline().unrecognizedOptions():
if len(opt) > 1 and opt.startswith("-"):
continue
self.inputFile = opt
break
return True
###########################################################################
def loadStreams(self):
now = seiscomp.core.Time.GMT()
inv = seiscomp.client.Inventory.Instance()
self.streams = {}
# try to load streams by detecLocid and detecStream
mod = self.configModule()
if mod is not None and mod.configStationCount() > 0:
seiscomp.logging.info("loading streams using detecLocid and detecStream")
for i in range(mod.configStationCount()):
cfg = mod.configStation(i)
net = cfg.networkCode()
sta = cfg.stationCode()
if sta in self.streams:
seiscomp.logging.warning(
f"ambiguous stream id found for station {net}.{sta}"
)
continue
setup = seiscomp.datamodel.findSetup(cfg, self.name(), True)
if not setup:
seiscomp.logging.warning(
f"could not find station setup for {net}.{sta}"
)
continue
params = seiscomp.datamodel.ParameterSet.Find(setup.parameterSetID())
if not params:
seiscomp.logging.warning(
f"could not find station parameters for {net}.{sta}"
)
continue
detecLocid = ""
detecStream = None
for j in range(params.parameterCount()):
param = params.parameter(j)
if param.name() == "detecStream":
detecStream = param.value()
elif param.name() == "detecLocid":
detecLocid = param.value()
if detecStream is None:
seiscomp.logging.warning(
f"could not find detecStream for {net}.{sta}"
)
continue
loc = inv.getSensorLocation(net, sta, detecLocid, now)
if loc is None:
seiscomp.logging.warning(
f"could not find preferred location for {net}.{sta}"
)
continue
components = {}
tc = seiscomp.datamodel.ThreeComponents()
seiscomp.datamodel.getThreeComponents(tc, loc, detecStream[:2], now)
if tc.vertical():
cha = tc.vertical()
wfsID = seiscomp.datamodel.WaveformStreamID(
net, sta, loc.code(), cha.code(), ""
)
components[cha.code()[-1]] = wfsID
seiscomp.logging.debug(f"add stream {wfs2Str(wfsID)} (vertical)")
if tc.firstHorizontal():
cha = tc.firstHorizontal()
wfsID = seiscomp.datamodel.WaveformStreamID(
net, sta, loc.code(), cha.code(), ""
)
components[cha.code()[-1]] = wfsID
seiscomp.logging.debug(
f"add stream {wfs2Str(wfsID)} (first horizontal)"
)
if tc.secondHorizontal():
cha = tc.secondHorizontal()
wfsID = seiscomp.datamodel.WaveformStreamID(
net, sta, loc.code(), cha.code(), ""
)
components[cha.code()[-1]] = wfsID
seiscomp.logging.debug(
f"add stream {wfs2Str(wfsID)} (second horizontal)"
)
if len(components) > 0:
self.streams[sta] = components
return
# fallback loading streams from inventory
seiscomp.logging.warning(
"no configuration module available, loading streams "
"from inventory and selecting first available stream "
"matching epoch"
)
for iNet in range(inv.inventory().networkCount()):
net = inv.inventory().network(iNet)
seiscomp.logging.debug(
f"network {net.code()}: loaded {net.stationCount()} stations"
)
for iSta in range(net.stationCount()):
sta = net.station(iSta)
try:
start = sta.start()
if not start <= now:
continue
except:
continue
try:
end = sta.end()
if not now <= end:
continue
except:
pass
for iLoc in range(sta.sensorLocationCount()):
loc = sta.sensorLocation(iLoc)
for iCha in range(loc.streamCount()):
cha = loc.stream(iCha)
wfsID = seiscomp.datamodel.WaveformStreamID(
net.code(), sta.code(), loc.code(), cha.code(), ""
)
comp = cha.code()[2]
if sta.code() not in self.streams:
components = {}
components[comp] = wfsID
self.streams[sta.code()] = components
else:
# Seismic Handler does not support network,
# location and channel code: make sure network and
# location codes match first item in station
# specific steam list
oldWfsID = list(self.streams[sta.code()].values())[0]
if (
net.code() != oldWfsID.networkCode()
or loc.code() != oldWfsID.locationCode()
or cha.code()[:2] != oldWfsID.channelCode()[:2]
):
seiscomp.logging.warning(
f"ambiguous stream id found for station\
{sta.code()}, ignoring {wfs2Str(wfsID)}"
)
continue
self.streams[sta.code()][comp] = wfsID
seiscomp.logging.debug(f"add stream {wfs2Str(wfsID)}")
###########################################################################
def parseTime(self, timeStr):
time = seiscomp.core.Time()
for fmt in TimeFormats:
if time.fromString(timeStr, fmt):
break
return time
###########################################################################
def parseMagType(self, value):
if value == "m":
return "M"
if value == "ml":
return "ML"
if value == "mb":
return "mb"
if value == "ms":
return "Ms(BB)"
if value == "mw":
return "Mw"
if value == "bb":
return "mB"
return ""
###########################################################################
def sh2proc(self, file):
ep = seiscomp.datamodel.EventParameters()
origin = seiscomp.datamodel.Origin.Create()
event = seiscomp.datamodel.Event.Create()
origin.setCreationInfo(seiscomp.datamodel.CreationInfo())
origin.creationInfo().setCreationTime(seiscomp.core.Time.GMT())
originQuality = None
originCE = None
latFound = False
lonFound = False
depthError = None
originComments = {}
# variables, reset after 'end of phase'
pick = None
stationMag = None
staCode = None
compCode = None
stationMagBB = None
ampPeriod = None
ampBBPeriod = None
amplitudeDisp = None
amplitudeVel = None
amplitudeSNR = None
amplitudeBB = None
magnitudeMB = None
magnitudeML = None
magnitudeMS = None
magnitudeBB = None
# To avoid undefined warning
arrival = None
phase = None
km2degFac = 1.0 / seiscomp.math.deg2km(1.0)
# read file line by line, split key and value at colon
iLine = 0
for line in file:
iLine += 1
a = line.split(":", 1)
key = a[0].strip()
keyLower = key.lower()
value = None
# empty line
if len(keyLower) == 0:
continue
# end of phase
if keyLower == "--- end of phase ---":
if pick is None:
seiscomp.logging.warning(f"Line {iLine}: found empty phase block")
continue
if staCode is None or compCode is None:
seiscomp.logging.warning(
f"Line {iLine}: end of phase, stream code incomplete"
)
continue
if not staCode in self.streams:
seiscomp.logging.warning(
f"Line {iLine}: end of phase, station code {staCode} not found in inventory"
)
continue
if not compCode in self.streams[staCode]:
seiscomp.logging.warning(
f"Line {iLine}: end of phase, component\
{compCode} of station {staCode} not found in inventory"
)
continue
streamID = self.streams[staCode][compCode]
pick.setWaveformID(streamID)
ep.add(pick)
arrival.setPickID(pick.publicID())
arrival.setPhase(phase)
origin.add(arrival)
if amplitudeSNR is not None:
amplitudeSNR.setPickID(pick.publicID())
amplitudeSNR.setWaveformID(streamID)
ep.add(amplitudeSNR)
if amplitudeBB is not None:
amplitudeBB.setPickID(pick.publicID())
amplitudeBB.setWaveformID(streamID)
ep.add(amplitudeBB)
if stationMagBB is not None:
stationMagBB.setWaveformID(streamID)
origin.add(stationMagBB)
stationMagContrib = (
seiscomp.datamodel.StationMagnitudeContribution()
)
stationMagContrib.setStationMagnitudeID(stationMagBB.publicID())
if magnitudeBB is None:
magnitudeBB = seiscomp.datamodel.Magnitude.Create()
magnitudeBB.add(stationMagContrib)
if stationMag is not None:
if stationMag.type() in ["mb", "ML"] and amplitudeDisp is not None:
amplitudeDisp.setPickID(pick.publicID())
amplitudeDisp.setWaveformID(streamID)
amplitudeDisp.setPeriod(
seiscomp.datamodel.RealQuantity(ampPeriod)
)
amplitudeDisp.setType(stationMag.type())
ep.add(amplitudeDisp)
if stationMag.type() in ["Ms(BB)"] and amplitudeVel is not None:
amplitudeVel.setPickID(pick.publicID())
amplitudeVel.setWaveformID(streamID)
amplitudeVel.setPeriod(
seiscomp.datamodel.RealQuantity(ampPeriod)
)
amplitudeVel.setType(stationMag.type())
ep.add(amplitudeVel)
stationMag.setWaveformID(streamID)
origin.add(stationMag)
stationMagContrib = (
seiscomp.datamodel.StationMagnitudeContribution()
)
stationMagContrib.setStationMagnitudeID(stationMag.publicID())
magType = stationMag.type()
if magType == "ML":
if magnitudeML is None:
magnitudeML = seiscomp.datamodel.Magnitude.Create()
magnitudeML.add(stationMagContrib)
elif magType == "Ms(BB)":
if magnitudeMS is None:
magnitudeMS = seiscomp.datamodel.Magnitude.Create()
magnitudeMS.add(stationMagContrib)
elif magType == "mb":
if magnitudeMB is None:
magnitudeMB = seiscomp.datamodel.Magnitude.Create()
magnitudeMB.add(stationMagContrib)
pick = None
staCode = None
compCode = None
stationMag = None
stationMagBB = None
ampPeriod = None
ampBBPeriod = None
amplitudeDisp = None
amplitudeVel = None
amplitudeSNR = None
amplitudeBB = None
continue
# empty key
if len(a) == 1:
seiscomp.logging.warning(f"Line {iLine}: key without value")
continue
value = a[1].strip()
if pick is None:
pick = seiscomp.datamodel.Pick.Create()
arrival = seiscomp.datamodel.Arrival()
try:
##############################################################
# station parameters
# station code
if keyLower == "station code":
staCode = value
# pick time
elif keyLower == "onset time":
pick.setTime(seiscomp.datamodel.TimeQuantity(self.parseTime(value)))
# pick onset type
elif keyLower == "onset type":
found = False
for onset in [
seiscomp.datamodel.EMERGENT,
seiscomp.datamodel.IMPULSIVE,
seiscomp.datamodel.QUESTIONABLE,
]:
if value == seiscomp.datamodel.EPickOnsetNames_name(onset):
pick.setOnset(onset)
found = True
break
if not found:
raise Exception("Unsupported onset value")
# phase code
elif keyLower == "phase name":
phase = seiscomp.datamodel.Phase()
phase.setCode(value)
pick.setPhaseHint(phase)
# event type
elif keyLower == "event type":
evttype = EventTypes[value]
event.setType(evttype)
originComments[key] = value
# filter ID
elif keyLower == "applied filter":
pick.setFilterID(value)
# channel code, prepended by configured Channel prefix if only
# one character is found
elif keyLower == "component":
compCode = value
# pick evaluation mode
elif keyLower == "pick type":
found = False
for mode in [
seiscomp.datamodel.AUTOMATIC,
seiscomp.datamodel.MANUAL,
]:
if value == seiscomp.datamodel.EEvaluationModeNames_name(mode):
pick.setEvaluationMode(mode)
found = True
break
if not found:
raise Exception("Unsupported evaluation mode value")
# pick author
elif keyLower == "analyst":
creationInfo = seiscomp.datamodel.CreationInfo()
creationInfo.setAuthor(value)
pick.setCreationInfo(creationInfo)
# pick polarity
# isn't tested
elif keyLower == "sign":
if value == "positive":
sign = "0" # positive
elif value == "negative":
sign = "1" # negative
else:
sign = "2" # unknown
pick.setPolarity(float(sign))
# arrival weight
elif keyLower == "weight":
arrival.setWeight(float(value))
# arrival azimuth
elif keyLower == "theo. azimuth (deg)":
arrival.setAzimuth(float(value))
# pick theo backazimuth
elif keyLower == "theo. backazimuth (deg)":
if pick.slownessMethodID() == "corrected":
seiscomp.logging.debug(
f"Line {iLine}: ignoring parameter: {key}"
)
else:
pick.setBackazimuth(
seiscomp.datamodel.RealQuantity(float(value))
)
pick.setSlownessMethodID("theoretical")
# pick beam slowness
elif keyLower == "beam-slowness (sec/deg)":
if pick.slownessMethodID() == "corrected":
seiscomp.logging.debug(
f"Line {iLine}: ignoring parameter: {key}"
)
else:
pick.setHorizontalSlowness(
seiscomp.datamodel.RealQuantity(float(value))
)
pick.setSlownessMethodID("Array Beam")
# pick beam backazimuth
elif keyLower == "beam-azimuth (deg)":
if pick.slownessMethodID() == "corrected":
seiscomp.logging.debug(
f"Line {iLine}: ignoring parameter: {key}"
)
else:
pick.setBackazimuth(
seiscomp.datamodel.RealQuantity(float(value))
)
# pick epi slowness
elif keyLower == "epi-slowness (sec/deg)":
pick.setHorizontalSlowness(
seiscomp.datamodel.RealQuantity(float(value))
)
pick.setSlownessMethodID("corrected")
# pick epi backazimuth
elif keyLower == "epi-azimuth (deg)":
pick.setBackazimuth(seiscomp.datamodel.RealQuantity(float(value)))
# arrival distance degree
elif keyLower == "distance (deg)":
arrival.setDistance(float(value))
# arrival distance km, recalculates for degree
elif keyLower == "distance (km)":
if isinstance(arrival.distance(), float):
seiscomp.logging.debug(
f"Line {iLine - 1}: ignoring parameter: distance (deg)"
)
arrival.setDistance(float(value) * km2degFac)
# arrival time residual
elif keyLower == "residual time":
arrival.setTimeResidual(float(value))
# amplitude snr
elif keyLower == "signal/noise":
amplitudeSNR = seiscomp.datamodel.Amplitude.Create()
amplitudeSNR.setType("SNR")
amplitudeSNR.setAmplitude(
seiscomp.datamodel.RealQuantity(float(value))
)
# amplitude period
elif keyLower.startswith("period"):
ampPeriod = float(value)
# amplitude value for displacement
elif keyLower == "amplitude (nm)":
amplitudeDisp = seiscomp.datamodel.Amplitude.Create()
amplitudeDisp.setAmplitude(
seiscomp.datamodel.RealQuantity(float(value))
)
amplitudeDisp.setUnit("nm")
# amplitude value for velocity
elif keyLower.startswith("vel. amplitude"):
amplitudeVel = seiscomp.datamodel.Amplitude.Create()
amplitudeVel.setAmplitude(
seiscomp.datamodel.RealQuantity(float(value))
)
amplitudeVel.setUnit("nm/s")
elif keyLower == "bb amplitude (nm/sec)":
amplitudeBB = seiscomp.datamodel.Amplitude.Create()
amplitudeBB.setAmplitude(
seiscomp.datamodel.RealQuantity(float(value))
)
amplitudeBB.setType("mB")
amplitudeBB.setUnit("nm/s")
amplitudeBB.setPeriod(seiscomp.datamodel.RealQuantity(ampBBPeriod))
elif keyLower == "bb period (sec)":
ampBBPeriod = float(value)
elif keyLower == "broadband magnitude":
magType = self.parseMagType("bb")
stationMagBB = seiscomp.datamodel.StationMagnitude.Create()
stationMagBB.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
stationMagBB.setType(magType)
stationMagBB.setAmplitudeID(amplitudeBB.publicID())
# ignored
elif keyLower == "quality number":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
# station magnitude value and type
elif keyLower.startswith("magnitude "):
magType = self.parseMagType(key[10:])
stationMag = seiscomp.datamodel.StationMagnitude.Create()
stationMag.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
if len(magType) > 0:
stationMag.setType(magType)
if magType == "mb":
stationMag.setAmplitudeID(amplitudeDisp.publicID())
elif magType == "MS(BB)":
stationMag.setAmplitudeID(amplitudeVel.publicID())
else:
seiscomp.logging.debug(
f"Line {iLine}: Magnitude Type not known {magType}."
)
###############################################################
# origin parameters
# event ID, added as origin comment later on
elif keyLower == "event id":
originComments[key] = value
# magnitude value and type
elif keyLower == "mean bb magnitude":
magType = self.parseMagType("bb")
if magnitudeBB is None:
magnitudeBB = seiscomp.datamodel.Magnitude.Create()
magnitudeBB.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
magnitudeBB.setType(magType)
elif keyLower.startswith("mean magnitude "):
magType = self.parseMagType(key[15:])
if magType == "ML":
if magnitudeML is None:
magnitudeML = seiscomp.datamodel.Magnitude.Create()
magnitudeML.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
magnitudeML.setType(magType)
elif magType == "Ms(BB)":
if magnitudeMS is None:
magnitudeMS = seiscomp.datamodel.Magnitude.Create()
magnitudeMS.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
magnitudeMS.setType(magType)
elif magType == "mb":
if magnitudeMB is None:
magnitudeMB = seiscomp.datamodel.Magnitude.Create()
magnitudeMB.setMagnitude(
seiscomp.datamodel.RealQuantity(float(value))
)
magnitudeMB.setType(magType)
else:
seiscomp.logging.warning(
f"Line {iLine}: Magnitude type {magType} not defined yet."
)
# latitude
elif keyLower == "latitude":
origin.latitude().setValue(float(value))
latFound = True
elif keyLower == "error in latitude (km)":
origin.latitude().setUncertainty(float(value))
# longitude
elif keyLower == "longitude":
origin.longitude().setValue(float(value))
lonFound = True
elif keyLower == "error in longitude (km)":
origin.longitude().setUncertainty(float(value))
# depth
elif keyLower == "depth (km)":
origin.setDepth(seiscomp.datamodel.RealQuantity(float(value)))
if depthError is not None:
origin.depth().setUncertainty(depthError)
elif keyLower == "depth type":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
elif keyLower == "error in depth (km)":
depthError = float(value)
try:
origin.depth().setUncertainty(depthError)
except seiscomp.core.ValueException:
pass
# time
elif keyLower == "origin time":
origin.time().setValue(self.parseTime(value))
elif keyLower == "error in origin time":
origin.time().setUncertainty(float(value))
# location method
elif keyLower == "location method":
origin.setMethodID(str(value))
# region table, added as origin comment later on
elif keyLower == "region table":
originComments[key] = value
# region table, added as origin comment later on
elif keyLower == "region id":
originComments[key] = value
# source region, added as origin comment later on
elif keyLower == "source region":
originComments[key] = value
# used station count
elif keyLower == "no. of stations used":
if originQuality is None:
originQuality = seiscomp.datamodel.OriginQuality()
originQuality.setUsedStationCount(int(value))
# ignored
elif keyLower == "reference location name":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
# confidence ellipsoid major axis
elif keyLower == "error ellipse major":
if originCE is None:
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
originCE.setSemiMajorAxisLength(float(value))
# confidence ellipsoid minor axis
elif keyLower == "error ellipse minor":
if originCE is None:
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
originCE.setSemiMinorAxisLength(float(value))
# confidence ellipsoid rotation
elif keyLower == "error ellipse strike":
if originCE is None:
originCE = seiscomp.datamodel.ConfidenceEllipsoid()
originCE.setMajorAxisRotation(float(value))
# azimuthal gap
elif keyLower == "max azimuthal gap (deg)":
if originQuality is None:
originQuality = seiscomp.datamodel.OriginQuality()
originQuality.setAzimuthalGap(float(value))
# creation info author
elif keyLower == "author":
origin.creationInfo().setAuthor(value)
# creation info agency
elif keyLower == "source of information":
origin.creationInfo().setAgencyID(value)
# earth model id
elif keyLower == "velocity model":
origin.setEarthModelID(value)
# standard error
elif keyLower == "rms of residuals (sec)":
if originQuality is None:
originQuality = seiscomp.datamodel.OriginQuality()
originQuality.setStandardError(float(value))
# ignored
elif keyLower == "phase flags":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
# ignored
elif keyLower == "location input params":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
# missing keys
elif keyLower == "ampl&period source":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
elif keyLower == "location quality":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
elif keyLower == "reference latitude":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
elif keyLower == "reference longitude":
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
elif keyLower.startswith("amplitude time"):
seiscomp.logging.debug(f"Line {iLine}: ignoring parameter: {key}")
# unknown key
else:
seiscomp.logging.warning(
"Line {iLine}: ignoring unknown parameter: {key}"
)
except ValueError:
seiscomp.logging.warning(f"Line {iLine}: can not parse {key} value")
except Exception:
seiscomp.logging.error("Line {iLine}: {str(traceback.format_exc())}")
return None
# check
if not latFound:
seiscomp.logging.warning("could not add origin, missing latitude parameter")
elif not lonFound:
seiscomp.logging.warning(
"could not add origin, missing longitude parameter"
)
elif not origin.time().value().valid():
seiscomp.logging.warning(
"could not add origin, missing origin time parameter"
)
else:
if magnitudeMB is not None:
origin.add(magnitudeMB)
if magnitudeML is not None:
origin.add(magnitudeML)
if magnitudeMS is not None:
origin.add(magnitudeMS)
if magnitudeBB is not None:
origin.add(magnitudeBB)
ep.add(event)
ep.add(origin)
if originQuality is not None:
origin.setQuality(originQuality)
if originCE is not None:
uncertainty = seiscomp.datamodel.OriginUncertainty()
uncertainty.setConfidenceEllipsoid(originCE)
origin.setUncertainty(uncertainty)
for k, v in originComments.items():
comment = seiscomp.datamodel.Comment()
comment.setId(k)
comment.setText(v)
origin.add(comment)
return ep
###########################################################################
def run(self):
self.loadStreams()
try:
if self.inputFile == "-":
f = sys.stdin
else:
f = open(self.inputFile)
except IOError as e:
seiscomp.logging.error(str(e))
return False
ep = self.sh2proc(f)
if ep is None:
return False
ar = seiscomp.io.XMLArchive()
ar.create("-")
ar.setFormattedOutput(True)
ar.writeObject(ep)
ar.close()
return True
###############################################################################
def main():
try:
app = SH2Proc()
return app()
except:
sys.stderr.write(str(traceback.format_exc()))
return 1
if __name__ == "__main__":
sys.exit(main())
# vim: ts=4 et

BIN
bin/slarchive Executable file

Binary file not shown.

BIN
bin/slinktool Executable file

Binary file not shown.

486
bin/slmon Executable file
View File

@ -0,0 +1,486 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
from getopt import getopt, GetoptError
from time import time, gmtime
from datetime import datetime
import os, sys, signal, glob, re
from seiscomp.myconfig import MyConfig
import seiscomp.slclient
import seiscomp.kernel, seiscomp.config
usage_info = """
Usage:
slmon [options]
SeedLink monitor creating static web pages
Options:
-h, --help display this help message
-c ini_setup = arg
-s ini_stations = arg
-t refresh = float(arg) # XXX not yet used
-v verbose = 1
Examples:
Start slmon from the command line
slmon -c $SEISCOMP_ROOT/var/lib/slmon/config.ini
Restart slmon in order to update the web pages. Use crontab entries for
automatic restart, e.g.:
*/3 * * * * /home/sysop/seiscomp/bin/seiscomp check slmon >/dev/null 2>&1
"""
def usage(exitcode=0):
sys.stderr.write(usage_info)
exit(exitcode)
try:
seiscompRoot=os.environ["SEISCOMP_ROOT"]
except:
print("\nSEISCOMP_ROOT must be defined - EXIT\n", file=sys.stderr)
usage(exitcode=2)
ini_stations = os.path.join(seiscompRoot,'var/lib/slmon/stations.ini')
ini_setup = os.path.join(seiscompRoot,'var/lib/slmon/config.ini')
regexStreams = re.compile("[SLBVEH][HNLG][ZNE123]")
verbose = 0
class Module(seiscomp.kernel.Module):
def __init__(self, env):
seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__))
def printCrontab(self):
print("3 * * * * %s/bin/seiscomp check slmon >/dev/null 2>&1" % (self.env.SEISCOMP_ROOT))
class Status:
def __repr__(self):
return "%2s %-5s %2s %3s %1s %s %s" % \
(self.net, self.sta, self.loc, self.cha, self.typ, \
str(self.last_data), str(self.last_feed))
class StatusDict(dict):
def __init__(self, source=None):
if source:
self.read(source)
def fromSlinkTool(self,server="",stations=["GE_MALT","GE_MORC","GE_IBBN"]):
# later this shall use XML
cmd = "slinktool -nd 10 -nt 10 -Q %s" % server
print(cmd)
f = os.popen(cmd)
# regex = re.compile("[SLBVEH][HNLG][ZNE123]")
regex = regexStreams
for line in f:
net_sta = line[:2].strip() + "_" + line[3:8].strip()
if not net_sta in stations:
continue
typ = line[16]
if typ != "D":
continue
cha = line[12:15].strip()
if not regex.match(cha):
continue
d = Status()
d.net = line[ 0: 2].strip()
d.sta = line[ 3: 8].strip()
d.loc = line[ 9:11].strip()
d.cha = line[12:15]
d.typ = line[16]
d.last_data = seiscomp.slclient.timeparse(line[47:70])
d.last_feed = d.last_data
sec = "%s_%s" % (d.net, d.sta)
sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
self[sec] = d
def read(self, source):
if type(source) == str:
source = file(source)
if type(source) == file:
source = source.readlines()
if type(source) != list:
raise TypeError('cannot read from %s' % str(type(source)))
for line in source:
d = Status()
d.net = line[ 0: 2]
d.sta = line[ 3: 8].strip()
d.loc = line[ 9:11].strip()
d.cha = line[12:15]
d.typ = line[16]
d.last_data = seiscomp.slclient.timeparse(line[18:41])
d.last_feed = seiscomp.slclient.timeparse(line[42:65])
if d.last_feed < d.last_data:
d.last_feed = d.last_data
sec = "%s_%s:%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ)
self[sec] = d
def write(self, f):
if type(f) is str:
f = file(f, "w")
lines = []
for key in list(self.keys()):
lines.append(str(self[key]))
lines.sort()
f.write('\n'.join(lines)+'\n')
def colorLegend(htmlfile):
htmlfile.write("<p><center>Latencies:<br>\n" \
"<table cellpadding='2' cellspacing='1' border='0'" \
" bgcolor='#000000'>\n<tr>\n" \
"<td bgcolor='#FFFFFF'><b>&le; 1 min&nbsp</b></td>\n" \
"<td bgcolor='#EBD6FF'><b>&gt; 1 min&nbsp</b></td>\n" \
"<td bgcolor='#9470BB'><font color='#FFFFFF'><b>&gt; 10 min&nbsp</b></font></td>\n" \
"<td bgcolor='#3399FF'><font color='#FFFFFF'><b>&gt; 30 min&nbsp</b></font></td>\n" \
"<td bgcolor='#00FF00'><b>&gt; 1 hour&nbsp</b></td>\n" \
"<td bgcolor='#FFFF00'><b>&gt; 2 hours&nbsp</b></td>\n" \
"<td bgcolor='#FF9966'><b>&gt; 6 hours&nbsp</b></td>\n" \
"<td bgcolor='#FF3333'><b>&gt; 1 day&nbsp</b></td>\n" \
"<td bgcolor='#FFB3B3'><b>&gt; 2 days&nbsp</b></td>\n" \
"<td bgcolor='#CCCCCC'><b>&gt; 3 days&nbsp</b></td>\n" \
"<td bgcolor='#999999'><font color='#FFFFFF'><b>&gt; 4 days&nbsp</b></font></td>\n" \
"<td bgcolor='#666666'><font color='#FFFFFF'><b>&gt; 5 days&nbsp</b></font></td>\n" \
"</tr>\n</table>\n</center></p>\n")
# encodes an email address so that it cannot (easily) be extracted
# from the web page. This is meant to be a spam protection.
def encode(txt): return ''.join(["&#%d;" % ord(c) for c in txt])
def total_seconds(td): return td.seconds + (td.days*86400)
def pageTrailer(htmlfile, config):
htmlfile.write("<hr>\n" \
"<table width='99%%' cellpaddding='2' cellspacing='1' border='0'>\n" \
"<tr>\n<td>Last updated %04d/%02d/%02d %02d:%02d:%02d UTC</td>\n" \
" <td align='right'><a href='%s' " \
"target='_top'>%s</a></td>\n</tr>\n" \
"</table>\n</body></html>\n" % (gmtime()[:6] + (config['setup']['linkurl'],) + (config['setup']['linkname'],)) )
def getColor(delta):
delay = total_seconds(delta)
if delay > 432000: return '#666666' # > 5 days
elif delay > 345600: return '#999999' # > 4 days
elif delay > 259200: return '#CCCCCC' # > 3 days
elif delay > 172800: return '#FFB3B3' # > 2 days
elif delay > 86400: return '#FF3333' # > 1 day
elif delay > 21600: return '#FF9966' # > 6 hours
elif delay > 7200: return '#FFFF00' # > 2 hours
elif delay > 3600: return '#00FF00' # > 1 hour
elif delay > 1800: return '#3399FF' # > 30 minutes
elif delay > 600: return '#9470BB' # > 10 minutes
elif delay > 60: return '#EBD6FF' # > 1 minute
else: return '#FFFFFF' # <= 1 minute
TDdummy = "<td align='center' bgcolor='%s'><tt>n/a</tt></td>"
def TDf(delta, col="#ffffff"):
if delta is None: return TDdummy % col
t = total_seconds(delta)
if t > 86400: x = "%.1f d" % (t/86400.)
elif t > 7200: x = "%.1f h" % (t/3600.)
elif t > 120: x = "%.1f m" % (t/60.)
else: x = "%.1f s" % (t)
return "<td align='right' bgcolor='%s'><tt> &nbsp;%s&nbsp;</tt></td>" % \
(col,x)
def TDt(t, col="#ffffff"):
if t is None: return TDdummy % col
x = t.strftime("%Y/%m/%d %H:%M:%S")
return "<td align='center' bgcolor='%s'><tt>&nbsp;%s&nbsp;</tt></td>" % \
(col,x)
def myrename(name1, name2):
# fault-tolerant rename that doesn't cause an exception if it fails, which
# may happen e.g. if the target is on a non-reachable NFS directory
try:
os.rename(name1, name2)
except OSError:
print("failed to rename(%s,%s)" % (name1, name2), file=sys.stderr)
def makeMainHTML(config):
global status
now = datetime.utcnow()
stations = []
streams = [ x for x in list(status.keys()) if regexStreams.search(x) ]
streams.sort()
tmp_rt = []
tmp_du = []
for label in streams:
lat1 = now - status[label].last_data # XXX
lat2 = now - status[label].last_feed # XXX
lat3 = lat1-lat2 # XXX
if lat3 == 0.: lat3 = lat2 = None
if label[-2]=='.' and label[-1] in "DE":
label = label[:-2]
n,s,x,x = label.split(".")
if s in stations: continue # avoid duplicates for different locations
stations.append(s)
net_sta = "%s_%s" % (n,s)
line = "<tr bgcolor='#ffffff'><td><tt>&nbsp;%s <a " \
"href='%s.html'>%s</a>&nbsp;</td>%s%s%s</tr>" \
% (n, net_sta, s, TDf(lat1, getColor(lat1)),
TDf(lat2, getColor(lat2)),
TDf(lat3, getColor(lat3)))
if config.station[net_sta]['type'][:4] == 'real':
tmp_rt.append(line)
else: tmp_du.append(line)
makeStatHTML(net_sta, config)
try: os.makedirs(config['setup']['wwwdir'])
except: pass
temp = "%s/tmp.html" % config['setup']['wwwdir']
dest = "%s/index.html" % config['setup']['wwwdir']
table_begin = """
<table cellpaddding='2' cellspacing='1' border='0' bgcolor='#000000'>
<tr>
<th bgcolor='#ffffff' rowspan='2' align='center'>Station</th>
<th bgcolor='#ffffff' colspan='3' align='center'>Latencies</th>
</tr>
<tr>
<th bgcolor='#ffffff' align='center'>Data</th>
<th bgcolor='#ffffff' align='center'>Feed</th>
<th bgcolor='#ffffff' align='center'>Diff.</th>
</tr>
"""
table_end = """
</table>
"""
htmlfile = open(temp, "w")
htmlfile.write("""<html>
<head>
<title>%s</title>
<meta http-equiv='refresh' content='%d'>
<link rel='SHORTCUT ICON' href='%s'>
</head>
<body bgcolor='#ffffff'>
<center><font size='+2'>%s</font></center>\n""" % \
( config['setup']['title'], int(config['setup']['refresh']),
config['setup']['icon'], config['setup']['title']))
htmlfile.write("<center><table cellpaddding='5' cellspacing='5'><tr>\n")
if len(tmp_rt):
htmlfile.write("<td valign='top' align='center'>\n" \
"<font size='+1'>Real-time stations<font>\n</td>\n")
if len(tmp_du):
htmlfile.write("<td valign='top' align='center'>\n" \
"<font size='+1'>Dial-up stations<font>\n</td>\n")
htmlfile.write("</tr><tr>")
if len(tmp_rt):
htmlfile.write("<td valign='top' align='center'>\n")
htmlfile.write(table_begin)
htmlfile.write("\n".join(tmp_rt))
htmlfile.write(table_end)
htmlfile.write("</td>\n")
if len(tmp_du):
htmlfile.write("<td valign='top' align='center'>\n")
htmlfile.write(table_begin)
htmlfile.write("\n".join(tmp_du))
htmlfile.write(table_end)
htmlfile.write("</td>\n")
htmlfile.write("</tr></table></center>\n")
colorLegend(htmlfile)
pageTrailer(htmlfile, config)
htmlfile.close()
myrename(temp, dest)
def makeStatHTML(net_sta, config):
global status
try: os.makedirs(config['setup']['wwwdir'])
except: pass
temp = "%s/tmp2.html" % config['setup']['wwwdir']
dest = "%s/%s.html" % ( config['setup']['wwwdir'], net_sta)
htmlfile = open(temp, "w")
htmlfile.write("""<html>
<head>
<title>%s - Station %s</title>
<meta http-equiv='refresh' content='%d'>
<link rel='SHORTCUT ICON' href='%s'>
</head>
<body bgcolor='#ffffff'>
<center><font size='+2'>%s - Station %s</font>\n""" % \
( config['setup']['title'], net_sta, int(config['setup']['refresh']),
config['setup']['icon'],
config['setup']['title'], net_sta.split("_")[-1]))
try:
name = config.station[net_sta]['info']
htmlfile.write("<br><font size='+1'>%s</font>" % name)
except: pass
htmlfile.write("</center>\n")
if 'text' in config.station[net_sta]:
htmlfile.write("<P>%s</P>\n" % config.station[net_sta]['text'])
htmlfile.write("""<p><center>
<table cellpadding='2' cellspacing='1' border='0' bgcolor='#000000'>
<tr>
<th bgcolor='#ffffff' align='center' rowspan='2'>Station/<br>Channel</th>
<th bgcolor='#ffffff' align='center' colspan='2'>Data</th>
<th bgcolor='#ffffff' align='center' colspan='2'>Feed</th>
<th bgcolor='#ffffff' align='center' rowspan='2'>Diff.</th>
</tr>
<tr>
<th bgcolor='#ffffff' align='center'>Last Sample</th>
<th bgcolor='#ffffff' align='center'>Latency</th>
<th bgcolor='#ffffff' align='center'>Last Received</th>
<th bgcolor='#ffffff' align='center'>Latency</th>
</tr>""")
now = datetime.utcnow()
netsta2=net_sta.replace("_",".")
streams = [ x for x in list(status.keys()) if x.find(netsta2)==0 ]
streams.sort()
for label in streams:
tim1 = status[label].last_data
tim2 = status[label].last_feed
lat1, lat2, lat3 = now-tim1, now-tim2, tim2-tim1
col1, col2, col3 = getColor(lat1), getColor(lat2), getColor(lat3)
if lat1==lat2: lat2 = lat3 = None
if label[-2]=='.' and label[-1] in "DE":
label = label[:-2]
n,s,loc,c = label.split(".")
c = ("%s.%s" % (loc,c)).strip(".")
htmlfile.write("<tr bgcolor='#ffffff'><td>" \
"<tt>&nbsp;%s %s&nbsp;</td>%s%s%s%s%s</tr>\n" \
% (s, c, TDt(tim1, col1), TDf(lat1, col1),
TDt(tim2, col2), TDf(lat2, col2),
TDf(lat3, col3)))
htmlfile.write("</table></p>\n")
colorLegend(htmlfile)
htmlfile.write("<p>\nHow to <a href='http://geofon.gfz-potsdam.de/waveform/status/latency.php' target='_blank'>interpret</a> " \
"these numbers?<br>\n")
if 'liveurl' in config['setup']:
# substitute '%s' in live_url by station name
url = config['setup']['liveurl'] % s
htmlfile.write("View a <a href='%s' target='_blank'>live seismogram</a> of "
"station %s</center>\n" % (url, s))
htmlfile.write("</p>\n")
pageTrailer(htmlfile, config)
htmlfile.close()
myrename(temp, dest)
def read_ini():
global config, ini_setup, ini_stations
print("\nreading setup config from '%s'" % ini_setup)
if not os.path.isfile(ini_setup):
print("[error] setup config '%s' does not exist" % ini_setup, file=sys.stderr)
usage(exitcode=2)
config = MyConfig(ini_setup)
print("reading station config from '%s'" % ini_stations)
if not os.path.isfile(ini_stations):
print("[error] station config '%s' does not exist" % ini_stations, file=sys.stderr)
usage(exitcode=2)
config.station = MyConfig(ini_stations)
def SIGINT_handler(signum, frame):
global status
print("received signal #%d => will write status file and exit" % signum)
# status.write("status.tab")
sys.exit(0)
try:
opts, args = getopt(sys.argv[1:], "c:s:t:hv")
except GetoptError:
print("\nUnknown option in "+str(sys.argv[1:])+" - EXIT.", file=sys.stderr)
usage(exitcode=2)
for flag, arg in opts:
if flag == "-c": ini_setup = arg
if flag == "-s": ini_stations = arg
if flag == "-t": refresh = float(arg) # XXX not yet used
if flag == "-h": usage(exitcode=0)
if flag == "-v": verbose = 1
signal.signal(signal.SIGHUP, SIGINT_handler)
signal.signal(signal.SIGINT, SIGINT_handler)
signal.signal(signal.SIGQUIT, SIGINT_handler)
signal.signal(signal.SIGTERM, SIGINT_handler)
read_ini()
cha = "???"
loc = ""
s = config.station
net_sta = ["%s_%s" % (s[k]['net'],s[k]['sta']) for k in s]
s_arg = ','.join(net_sta)
streams = [ (s[k]['net'],s[k]['sta'],loc,cha) for k in s ]
if 'server' in config['setup']:
server = config['setup']['server']
else: server = "localhost"
#def read_initial(config):
#
# for s in config.station:
# print s,glob.glob("/home/dcop/seedlink/%s/segments/*" % s)
# for f in glob.glob("/home/dcop/seedlink/%s/segments/*" % s):
# print f
#
#read_initial(config)
#print "reading initial time windows from file 'status.tab'"
#status = StatusDict("status.tab")
status = StatusDict()
#if verbose: status.write(sys.stderr)
print("generating output to '%s'" % config['setup']['wwwdir'])
print("getting initial time windows from SeedLink server '%s'" % server)
status.fromSlinkTool(server, stations=net_sta)
if verbose: status.write(sys.stderr)
nextTimeGenerateHTML = time()
print("setting up connection to SeedLink server '%s'" % server)
input = seiscomp.slclient.Input(server, streams)
for rec in input:
id = '.'.join([rec.net, rec.sta, rec.loc, rec.cha, rec.rectype])
# if not id in status: continue # XXX XXX XXX
try:
status[id].last_data = rec.end_time
status[id].last_feed = datetime.utcnow()
except:
continue
if time() > nextTimeGenerateHTML:
makeMainHTML(config)
nextTimeGenerateHTML = time() + int(config['setup']['refresh'])

88
bin/tab2inv Executable file
View File

@ -0,0 +1,88 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
import sys
from optparse import OptionParser
from nettab.tab import Tab
import seiscomp.io
def main():
# Creating the parser
parser = OptionParser(usage="Tab to Inventory (sc3) converter", version="1.0", add_help_option=True)
parser.add_option("-i", "--ip", type="string",
help="Prefix to be added to each instrument generated.", dest="instrumentPrefix", default=None)
parser.add_option("-f", "--filterf", type="string",
help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None)
parser.add_option("-x", "--xmlf", type="string",
help="Indicates a folder containing the XML inventory files (needed for station group support)", dest="xfolder", default=None)
parser.add_option("-D", "--database", type="string",
help="Database URL for inventory (needed for station group support)", dest="database", default=None)
parser.add_option("", "--force", action="store_true",
help="Don't stop on error of individual files", dest="force", default=False)
parser.add_option("-g", "--generate", action="store_true",
help="Generate XML file at the end", dest="generate", default=False)
parser.add_option("-c", "--check", action="store_true",
help="Check the loaded files", dest="check", default=False)
parser.add_option("-d", "--default", type="string",
help="Indicates the default file", dest="defaultFile", default=None)
parser.add_option("-o", "--output", type="string",
help="Indicates the output file", dest="outFile", default="-")
# Parsing & Error check
(options, args) = parser.parse_args()
error = False
if len(args) < 1:
print("No input file(s) to digest", file=sys.stderr)
error = True
if error:
print("Use -h for help on usage", file=sys.stderr)
return 1
# Execution
try:
inv = None
t=Tab(options.instrumentPrefix, options.defaultFile, options.ffolder, options.xfolder, options.database)
for f in args:
try:
t.digest(f)
except Exception as e:
print("Error digesting %s:\n %s" % (f, e), file=sys.stderr)
if not options.force:
raise e
if options.check:
t.check()
return
if options.generate:
inv = t.sc3Obj()
if inv:
ar = seiscomp.io.XMLArchive()
print("Generating file: %s" % options.outFile, file=sys.stderr)
ar.create(options.outFile)
ar.setFormattedOutput(True)
ar.setCompression(False)
ar.writeObject(inv)
ar.close()
except Exception as e:
print("Error: " + str(e), file=sys.stderr)
return 1
finally:
print("Ending.", file=sys.stderr)
return 0
if __name__ == "__main__":
ret = main()
sys.exit(ret)

526
bin/tab2tab Executable file
View File

@ -0,0 +1,526 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
import os
import sys
from datetime import datetime
from nettab.convertUtils import StationAttributes, NetworkAttributes, StationMappings, parseDate, formatDate, quote, hummanStr
from nettab.tab import Tab
from optparse import OptionParser
from nettab.nodesi import Instruments
class TabConverter:
def __init__(self, networkCode):
self.__fmt__ = None
self.takeSugestions = None
self.filename = None
self.networkCode = networkCode
self.stationList = None
self.nat = None
self.sat = None
self.sma = None
self.inst = None
self.defaultEpoch = parseDate("1980/001")
self.start=0
self.code=0
self.description=0
self.datalogger=0
self.sensor=0
self.channel=0
self.gaind = 0
self.longitude=0
self.latitude=0
self.elevation=0
self.end=0
self.depth=0
self.orientation=0
## default dates
self.startDate = parseDate("1980/001")
self.endDate = parseDate(None)
def loadStationMapping(self, filename):
if self.networkCode is None: raise Exception("Cannot load Station mapping without network code")
if self.stationList is None: raise Exception("Cannot load Station mapping without station list")
try:
sm = StationMappings(self.networkCode, self.stationList, filename)
self.sma = sm
except Exception as e:
raise e
def loadStationAttribute(self, filename):
if self.networkCode is None: raise Exception("Cannot load Station att without network code")
if self.stationList is None: raise Exception("Cannot load Station att without station list")
try:
sa = StationAttributes(self.networkCode, self.stationList, filename)
self.sat = sa
except Exception as e:
raise e
def loadNetworkAttribute(self, filename):
if self.networkCode is None: raise Exception("Cannot load Network att without network code")
if self.stationList is None: raise Exception("Cannot load Network att without station list")
try:
na = NetworkAttributes(self.networkCode, filename)
self.nat = na
except Exception as e:
raise e
def loadInstrumentsFile(self, filename, filterFolder):
tab = Tab(filterFolder=filterFolder)
tab.digest(filename)
if tab.i:
self.inst = tab.i
def __fmtline__(self):
if not self.__fmt__:
fmt = "Sl: "
fmt += "%%-%ds" % self.code
fmt += " %%-%ds" % self.description
fmt += " %%-%ds" % self.datalogger
fmt += " %%-%ds" % self.sensor
fmt += " %%-%ds" % self.channel
fmt += " %%-%ds" % self.orientation
fmt += " %%-%ds" % self.latitude
fmt += " %%-%ds" % self.longitude
fmt += " %%-%ds" % self.elevation
fmt += " %%-%ds" % self.depth
fmt += " %%-%ds" % self.start
fmt += " %%-%ds" % self.end
self.__fmt__ = fmt
return self.__fmt__
def __analyseLine__(self, items):
inputLine = " ".join(items)
if len(items) < 4:
raise Exception("Invalid items count on line %s" % inputLine)
if len(items) <= 5:
netCode = items[2]
if netCode != self.networkCode:
raise Exception("Tab file (%s) doesn't match class (%s) -- %s" % (netCode,self.networkCode,inputLine))
return [None, None, None]
else:
if len(items) < 6:
raise Exception("Invalid Station line %s" % inputLine)
stationCode = items.pop(0)
code = len(stationCode)
self.code=max(self.code,code)
description = len(quote(hummanStr(items.pop(0))))
self.description=max(self.description, description)
datalogger = len(items.pop(0))
self.datalogger=max(self.datalogger, datalogger)
sensor = len(items.pop(0))
self.sensor=max(self.sensor, sensor)
# Gain
gaind = items.pop(0)
if float(gaind) != 1.0:
self.datalogger = max (self.datalogger, datalogger + len(gaind))
channel = len(items.pop(0))
self.channel=max(self.channel, channel)
latitude = len(items.pop(0))
self.latitude=max(self.latitude, latitude)
longitude = len(items.pop(0))
self.longitude=max(self.longitude, longitude)
elevation = len(items.pop(0))
self.elevation=max(self.elevation, elevation)
#Orientation
depth = items.pop(0)
try:
float(depth)
orientation="ZNE"
except:
orientation = "Z"
(depth,a1,a2) = depth.split("/")
a1n = float(a1)
a2n = float(a2)
orientation+="1"
if a1n != 0.0: orientation += "(0.0,%s)"%a1
orientation+="2"
if a2n != 90.0: orientation+="(0.0,%s)"%a1
orientation = len(orientation)
self.orientation=max(self.orientation, orientation)
depth = len(depth)
self.depth=max(self.depth, depth)
# Start
try:
start = parseDate(items.pop(0))
self.start = max (self.start, len(formatDate(start)))
except:
raise Exception ("Invalid Station line start date %s" % inputLine)
# End
try:
end = parseDate(items.pop(0))
except:
end=parseDate("")
pass
self.end = max (self.end, len(formatDate(end)))
return [stationCode, start, end]
def preload(self, filename, takeSugestions):
self.takeSugestions = takeSugestions
sugestedStart = datetime.now()
sugestedEnd = self.defaultEpoch
stationList = []
error = []
# Some initialization
if self.filename is not None:
raise Exception("Cannot pre-load two different files (current one is %s)" % self.filename)
print("Analysing ... ", file=sys.stderr)
fd = open(filename)
for line in fd:
line = line.strip()
if not line or line[0] == "#": continue
try:
(stationCode, start, end) = self.__analyseLine__(line.split())
except Exception as e:
error.append(str(e))
continue
if not stationCode: continue
if stationCode not in stationList:
stationList.append(stationCode)
sugestedStart = min(sugestedStart, start)
if end and sugestedEnd:
sugestedEnd = max(sugestedEnd, end)
else:
sugestedEnd = None
fd.close()
if len(error):
raise Exception("\n".join(error))
print(" Loaded %d different stations" % len(stationList), file=sys.stderr)
if takeSugestions:
print(" Taking suggestion start date of %s " % formatDate(self.startDate), file=sys.stderr)
self.startDate = sugestedStart
print(" Taking suggestion end date of %s " % formatDate(self.endDate), file=sys.stderr)
self.endDate = sugestedEnd
self.filename = filename
self.stationList = stationList
print("Done.", file=sys.stderr)
def __convertHeader__(self, line, fdo):
# Split line
items = line.split()
if not self.takeSugestions:
if self.nat.hasStart:
print(" Using start from attribute.", file=sys.stderr)
self.startDate = self.nat.startDate
if self.nat.hasEnd:
print(" Using end from attribute.", file=sys.stderr)
self.endDate = self.nat.endDate
nCode = items[2].strip()
if nCode != self.networkCode:
raise Exception("Wrong network code found: %s != %s" % (self.networkCode, nCode))
fdo.write("Nw: %s %s %s" % (nCode, formatDate(self.startDate), formatDate(self.endDate)) + "\n")
self.nat.dump(fdo)
def __convertLine__(self, line, fdo, atFront):
lnfmt = self.__fmtline__()
# Split line
items = line.split()
try:
code = items.pop(0)
except Exception as e:
raise Exception ("Missing Code on %s" % line)
if code not in self.stationList:
raise Exception("Unknow station code $s" % code)
try:
hummanStr(items.pop(0))
except Exception as e:
raise Exception ("Missing Gain on %s" % line)
try:
datalogger = items.pop(0)
except Exception as e:
raise Exception ("Missing Datalogger on %s" % line)
try:
sensor = items.pop(0)
except Exception as e:
raise Exception ("Missing Sensor on %s" % line)
try:
gaind = items.pop(0)
if float(gaind) != 1.0:
if not self.inst:
raise Exception("Instrument database needed to convert gain")
try:
dte = self.inst.dls[str(datalogger).split("%")[0]]
except Exception as e:
print(e, file=sys.stderr)
raise Exception("Datalogger %s not found" % str(datalogger).split("%")[0])
datalogger += "%%%s" % (float(dte.gain) * float(gaind))
print(" Converting gain multiplier to real gain using instrument DB on %s" % code, file=sys.stderr)
except Exception as e:
raise Exception ("Missing Gain on %s (%s)" % (line,str(e)))
try:
channel = items.pop(0)
except Exception as e:
raise Exception ("Missing Channel on %s" % line)
try:
latitude = items.pop(0)
except Exception as e:
raise Exception ("Missing Latitude on %s" % line)
try:
longitude = items.pop(0)
except Exception as e:
raise Exception ("Missing Longitude on %s" % line)
try:
elevation = items.pop(0)
except Exception as e:
raise Exception ("Missing Elevation on %s" % line)
try:
depth = items.pop(0)
except Exception as e:
raise Exception ("Missing Depth on %s" % line)
#Orientation
try:
float(depth)
orientation = "ZNE"
except:
orientation = "Z"
(depth,a1,a2) = depth.split("/")
a1n = float(a1)
if a1n == 0.0:
orientation+="1"
else:
orientation+="1(0.0,%s)"%a1
a2n = float(a2)
if a2n == 90.0:
orientation+="2"
else:
orientation+="2(0.0,%s)"%a2
# Start
try:
start = items.pop(0)
except Exception:
raise Exception ("Missing Start on %s" % line)
try:
start = parseDate(start)
except Exception as e:
raise Exception("Invalide Start date: %s (%s) on %s" % (start, e, line))
#End
try:
end = items.pop(0)
except:
end = ""
try:
end = parseDate(end)
except Exception as e:
raise Exception("Invalide End date: %s (%s) on %s" % (end, e, line))
[place, country] = self.sat.parseStationLine(line.split())
description = "%s/%s" % (place, country)
## Prepare necessary output
if not atFront:
self.sma.dump(fdo, code)
self.sat.dump(fdo, code)
for (start, end) in self.sma.getMappings(code, start, end):
fdo.write(lnfmt % (code, quote(description), datalogger, sensor, channel, orientation, latitude, longitude, elevation, depth, formatDate(start), formatDate(end)) + "\n")
return code
def convert(self, fdo, keepcomments = False, atFront = True):
if self.filename is None:
raise Exception("You should pre-load a tab file before before converting.")
## Obtain additional attribute classes if needed
if not self.nat:
self.nat = NetworkAttributes(self.networkCode, None)
if not self.sat:
self.sat = StationAttributes(self.networkCode, self.stationList, None)
if not self.sma:
self.sma = StationMappings(self.networkCode, self.stationList, None)
# Parse in again the station lines and network header by the additional classes
print("Pre-Parsing Station/Network lines ... ", file=sys.stderr)
fd = open(self.filename)
for line in fd:
line = line.strip()
if not line or line[0] == "#":
continue
items = line.split()
if len(items) <= 5:
self.nat.parseNetworkLine(items)
elif len(items) <= 12:
self.sma.parseStationLine(items)
self.sat.parseStationLine(items)
fd.close()
fd = open(self.filename)
oldcode="" # Station code of the last printed line
last="" # Type of the last printed line
print("Converting ... ", file=sys.stderr)
for line in fd:
line = line.strip()
if not line or line[0] == "#":
if last == "l" or last == "a" or last == "h": fdo.write("\n")
if keepcomments: fdo.write(line + "\n")
last = "c"
continue
items = line.split()
if len(items) <= 5:
self.__convertHeader__(line, fdo)
last = "h"
if (atFront):
fdo.write("\n")
self.sma.dump(fdo, None)
self.sat.dump(fdo, None)
last = "a"
fdo.write("\n")
elif len(items) <= 12:
if (last == "l" and items[0].strip() != oldcode) or last == "h": fdo.write("\n")
oldcode = self.__convertLine__(line, fdo, atFront)
last = "l"
pass
else:
print("input at %s" % line, file=sys.stderr)
fd.close()
def main():
# Creating the parser
parser = OptionParser(usage="Old tab to New tab converter", version="1.0", add_help_option=True)
parser.add_option("", "--instdb", type="string",
help="Indicates the instrument databases file to use", dest="inst", default=None)
parser.add_option("", "--smap", type="string",
help="Indicates the station attribute file to use", dest="smap", default=None)
parser.add_option("", "--sat", type="string",
help="Indicates the station attribute file to use", dest="sat", default=None)
parser.add_option("", "--nat", type="string",
help="Indicates the station attribute file to use", dest="nat", default=None)
parser.add_option("-t", "--tab", type="string",
help="Indicates the tab file to convert", dest="tabFile", default=None)
parser.add_option("-f", "--filterf", type="string",
help="Indicates a folder containing the filters coefficients files", dest="ffolder", default=None)
parser.add_option("-n", "--net", type="string",
help="Indicates a two leter station code", dest="netCode", default=None)
parser.add_option("-g", "--globalsa", action="store_true",
help="Indicate that we should put a condensed version of the station attributes just below the network definition", dest="globalSa", default=False)
parser.add_option("-a", "--autotime", action="store_true",
help="Guess the start and end times for a network from the channel times", dest="autoTime", default=False)
parser.add_option("-c", "--clean", action="store_true",
help="Remove the comments and blank lines", dest="cleanFile", default=False)
# Parsing & Error check
(options, args) = parser.parse_args()
error = False
if len(args) != 1:
print("need an Output Filename or '-' for stdout", file=sys.stderr)
error = True
if not options.tabFile:
print("tab file name not supplied", file=sys.stderr)
error = True
if options.inst and not options.ffolder:
print("Filter folder not supplied.", file=sys.stderr)
error = True
if options.tabFile and not os.path.isfile(options.tabFile):
print("supplied tab file (%s) is not a file" % options.tabFile, file=sys.stderr)
error = True
if not options.netCode:
print("network code not supplied", file=sys.stderr)
error = True
#if options.autoTime and (options.netStart or options.netEnd):
# print >> sys.stderr, "options Auto Time and Network Start/End times are exclusive"
# return
if error:
print("use -h for getting a help on usage", file=sys.stderr)
return
if args[0] != "-":
fdo = open(args[0], "w")
else:
fdo = sys.stdout
# Execution
try:
cnv = TabConverter(options.netCode.upper())
cnv.preload(options.tabFile, options.autoTime)
if options.inst or options.smap or options.nat or options.sat:
print("Loading optional files: ", file=sys.stderr)
if options.inst and os.path.isfile(options.inst):
cnv.loadInstrumentsFile(options.inst, options.ffolder)
if options.smap and os.path.isfile(options.smap):
cnv.loadStationMapping(options.smap)
if options.nat and os.path.isfile(options.nat):
cnv.loadNetworkAttribute(options.nat)
if options.sat and os.path.isfile(options.sat):
cnv.loadStationAttribute(options.sat)
print("Done.", file=sys.stderr)
cnv.convert(fdo, not options.cleanFile, options.globalSa)
except Exception as e:
print("", file=sys.stderr)
print("Error on processing: %s" % e, file=sys.stderr)
fdo.close()
if __name__ == "__main__":
main()

380
bin/tabinvmodifier Executable file
View File

@ -0,0 +1,380 @@
#!/usr/bin/env seiscomp-python
################################################################################
# Copyright (C) 2012-2013, 2020 Helmholtz-Zentrum Potsdam - Deutsches GeoForschungsZentrum GFZ
#
# tabinvmodifier -- Tool for inventory modification using nettab files.
#
# This software is free software and comes with ABSOLUTELY NO WARRANTY.
#
# Author: Marcelo Bianchi
# Email: mbianchi@gfz-potsdam.de
################################################################################
from __future__ import print_function
import os
import sys
import datetime, time
from nettab.lineType import Nw, Sa, Na, Ia
from nettab.basesc3 import sc3
import seiscomp.datamodel, seiscomp.io, seiscomp.client, seiscomp.core, seiscomp.logging
class Rules(object):
def __init__(self, relaxed = False):
self.relaxed = relaxed
self.attributes = {}
self.iattributes = []
return
@staticmethod
def _overlaps(pstart, pend, cstart, cend):
if pend:
if pend > cstart:
if not cend or pstart < cend:
return True
else:
if not cend or pstart < cend:
return True
return False
def Nw(self, nw):
key = (nw.code, nw.start, nw.end)
if key in self.attributes:
raise Exception("Nw (%s/%s-%s) is already defined." % key)
self.attributes[key] = {}
self.attributes[key]["Sa"] = []
self.attributes[key]["Na"] = []
return key
def Sa(self, key, sa):
try:
items = self.attributes[key]["Sa"]
except KeyError:
raise Exception ("Nw %s/%s-%s not found in Ruleset" % key)
items.append(sa)
def Na(self, key, na):
try:
items = self.attributes[key]["Na"]
except KeyError:
raise Exception ("Nw %s/%s-%s not found in Ruleset" % key)
items.append(na)
def Ia(self, ia):
self.iattributes.append(ia);
def findKey(self, ncode, nstart, nend):
for (code, start, end) in self.attributes:
if code == ncode and self._overlaps(start, end, nstart, nend):
return (code, start, end)
return None
def getInstrumentsAttributes(self, elementId, elementType):
att = {}
for item in self.iattributes:
if item.match(elementId, elementType):
att[item.Key] = item.Value
return att
def getNetworkAttributes(self, key):
att = {}
for item in self.attributes[key]["Na"]:
att[item.Key] = item.Value
return att
def getStationAttributes(self, key, ncode, scode, lcode, ccode, start, end):
att = {}
for item in self.attributes[key]["Sa"]:
if item.match(scode, lcode, ccode, start, end, self.relaxed):
att[item.Key] = item.Value
return att
class InventoryModifier(seiscomp.client.Application):
def __init__(self, argc, argv):
seiscomp.client.Application.__init__(self, argc, argv)
self.setMessagingUsername("iModify")
self.rules = None
self.relaxed = False
self.outputFile = None
def _digest(self, tabFilename, rules = None):
if not tabFilename or not os.path.isfile(tabFilename):
raise Exception("Supplied filename is invalid.")
if not rules:
rules = Rules(self.relaxed)
try:
fd = open(tabFilename)
for line in fd:
obj = None
line = line.strip()
if not line or line[0] == "#": continue
if str(line).find(":") == -1:
raise Exception("Invalid line format '%s'" % line)
(Type, Content) = line.split(":",1)
if Type == "Nw":
nw = Nw(Content)
key = rules.Nw(nw)
elif Type == "Sg":
raise Exception("Type not supported.")
elif Type == "Na":
na = Na(Content)
rules.Na(key, na)
elif Type == "Sa":
sa = Sa(Content)
rules.Sa(key, sa)
elif Type == "Sr":
raise Exception("Type not supported.")
elif Type == "Ia":
ia = Ia(Content)
rules.Ia(ia)
elif Type == "Se":
raise Exception("Type not supported.")
elif Type == "Dl":
raise Exception("Type not supported.")
elif Type == "Cl":
raise Exception("Type not supported.")
elif Type == "Ff":
raise Exception("Type not supported.")
elif Type == "If":
raise Exception("Type not supported.")
elif Type == "Pz":
raise Exception("Type not supported.")
except Exception as e:
raise e
finally:
if fd:
fd.close()
return rules
def validateParameters(self):
outputFile = None
rulesFile = None
if self.commandline().hasOption("rules"):
rulesFile = self.commandline().optionString("rules")
if self.commandline().hasOption("output"):
outputFile = self.commandline().optionString("output")
if self.commandline().hasOption("relaxed"):
self.relaxed = True
if self.commandline().hasOption("inventory-db") and outputFile is None:
print("Cannot send notifiers when loading inventory from file.", file=sys.stderr)
return False
if self.commandline().unrecognizedOptions():
print("Invalid options: ", end=' ', file=sys.stderr)
for i in self.commandline().unrecognizedOptions():
print(i, end=' ', file=sys.stderr)
print("", file=sys.stderr)
return False
if not rulesFile:
print("No rule file was supplied for processing", file=sys.stderr)
return False
if not os.path.isfile(rulesFile):
argv0 = os.path.basename(self.arguments()[0])
print("%s: %s: No such file or directory" % (argv0, rulesFile), file=sys.stderr)
return False
if self.commandline().hasOption("inventory-db"):
self.setDatabaseEnabled(False, False)
self.setMessagingEnabled(False)
self.rules = self._digest(rulesFile, self.rules)
self.outputFile = outputFile
return True
def createCommandLineDescription(self):
seiscomp.client.Application.createCommandLineDescription(self)
self.commandline().addGroup("Rules")
self.commandline().addStringOption("Rules", "rules,r", "Input XML filename")
self.commandline().addOption("Rules", "relaxed,e", "Relax rules for matching NSLC items")
self.commandline().addGroup("Dump")
self.commandline().addStringOption("Dump", "output,o", "Output XML filename")
def initConfiguration(self):
value = seiscomp.client.Application.initConfiguration(self)
self.setLoggingToStdErr(True)
self.setDatabaseEnabled(True, True)
self.setMessagingEnabled(True)
self.setLoadInventoryEnabled(True)
return value
def send(self, *args):
while not self.connection().send(*args):
seiscomp.logging.warning("send failed, retrying")
time.sleep(1)
def send_notifiers(self, group):
Nsize = seiscomp.datamodel.Notifier.Size()
if Nsize > 0:
seiscomp.logging.info("trying to apply %d change%s" % (Nsize,"s" if Nsize != 1 else "" ))
else:
seiscomp.logging.info("no changes to apply")
return 0
Nmsg = seiscomp.datamodel.Notifier.GetMessage(True)
it = Nmsg.iter()
msg = seiscomp.datamodel.NotifierMessage()
maxmsg = 100
sent = 0
mcount = 0
try:
try:
while it.get():
msg.attach(seiscomp.datamodel.Notifier_Cast(it.get()))
mcount += 1
if msg and mcount == maxmsg:
sent += mcount
seiscomp.logging.debug("sending message (%5.1f %%)" % (sent / float(Nsize) * 100.0))
self.send(group, msg)
msg.clear()
mcount = 0
next(it)
except:
pass
finally:
if msg.size():
seiscomp.logging.debug("sending message (%5.1f %%)" % 100.0)
self.send(group, msg)
msg.clear()
seiscomp.logging.info("done")
return mcount
@staticmethod
def _loop(obj, count):
return [ obj(i) for i in range(count) ]
@staticmethod
def _collect(obj):
code = obj.code()
start = datetime.datetime.strptime(obj.start().toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S")
try:
end = obj.end()
end = datetime.datetime.strptime(end.toString("%Y %m %d %H %M %S"), "%Y %m %d %H %M %S")
except:
end = None
return (code, start, end)
@staticmethod
def _modifyInventory(mode, obj, att):
valid = sc3._findValidOnes(mode)
if not att:
return
# Why repeat the code in basesc3.py (sc3::_fillSc3())?
# What about if there are existing comments/pids - won't
# this code get the count wrong?? *FIXME*
commentNum = 0
for (k,p) in att.items():
try:
if k == 'Comment':
# print('DEBUG: Adding comment', p)
if p.startswith('Grant'):
# 2020: These belong in DOI metadata, not here.
continue
c = seiscomp.datamodel.Comment()
c.setText(p)
c.setId(str(commentNum))
commentNum += 1
obj.add(c)
continue
if k == 'Pid':
print('DEBUG: Adding Pid as comment', p)
c = seiscomp.datamodel.Comment()
(typ, val) = p.split(':', 1)
s = '{"type":"%s", "value":"%s"}' % (typ.upper(), val)
c.setText(s)
c.setId('FDSNXML:Identifier/' + str(commentNum))
commentNum += 1
obj.add(c)
continue
p = valid['attributes'][k]['validator'](p)
getattr(obj, 'set'+k)(p)
except KeyError:
import string
hint = ''
if k[0] in string.lowercase:
hint = " (try '%s' instead)" % ( k[0].upper() + k[1:])
print('Modifying %s: \'%s\' is not a valid key%s' % (mode, k, hint), file=sys.stderr)
obj.update()
return
def run(self):
rules = self.rules
iv = seiscomp.client.Inventory.Instance().inventory()
if not rules:
return False
if not iv:
return False
seiscomp.logging.debug("Loaded %d networks" % iv.networkCount())
if self.outputFile is None:
seiscomp.datamodel.Notifier.Enable()
self.setInterpretNotifierEnabled(True)
for net in self._loop(iv.network, iv.networkCount()):
(ncode, nstart, nend) = self._collect(net)
key = rules.findKey(ncode, nstart, nend)
if not key: continue
att = rules.getNetworkAttributes(key)
self._modifyInventory("network", net, att)
seiscomp.logging.info("%s %s" % (ncode, att))
for sta in self._loop(net.station, net.stationCount()):
(scode, sstart, send) = self._collect(sta)
att = rules.getStationAttributes(key, ncode, scode, None, None, sstart, send)
self._modifyInventory("station", sta, att)
if att: seiscomp.logging.info(" %s %s" % (scode, att))
for loc in self._loop(sta.sensorLocation, sta.sensorLocationCount()):
(lcode, lstart, lend) = self._collect(loc)
att = rules.getStationAttributes(key, ncode, scode, lcode, None, lstart, lend)
self._modifyInventory("location", loc, att)
if att: seiscomp.logging.info(" %s %s" % (lcode, att))
for cha in self._loop(loc.stream, loc.streamCount()):
(ccode, cstart, cend) = self._collect(cha)
att = rules.getStationAttributes(key, ncode, scode, lcode, ccode, cstart, cend)
self._modifyInventory("channel", cha, att)
if att: seiscomp.logging.info(" %s %s" % (ccode, att))
for sensor in self._loop(iv.sensor, iv.sensorCount()):
att = rules.getInstrumentsAttributes(sensor.name(), "Se")
self._modifyInventory("sensor", sensor, att)
for datalogger in self._loop(iv.datalogger, iv.dataloggerCount()):
att = rules.getInstrumentsAttributes(datalogger.name(), "Dl")
self._modifyInventory("datalogger", datalogger, att)
return True
def done(self):
if self.outputFile:
ar = seiscomp.io.XMLArchive()
ar.create(self.outputFile)
ar.setFormattedOutput(True)
ar.writeObject(seiscomp.client.Inventory.Instance().inventory())
ar.close()
else:
self.send_notifiers("INVENTORY")
seiscomp.client.Application.done(self)
if __name__ == "__main__":
app = InventoryModifier(len(sys.argv), sys.argv)
sys.exit(app())

BIN
bin/tau_remodl Executable file

Binary file not shown.

BIN
bin/tau_setbrn Executable file

Binary file not shown.

BIN
bin/timeout Executable file

Binary file not shown.

BIN
bin/trylock Executable file

Binary file not shown.

BIN
bin/waitlock Executable file

Binary file not shown.

17
etc/defaults/fdsnws.cfg Normal file
View File

@ -0,0 +1,17 @@
# Defines a list of modules loaded at startup.
plugins = ${plugins}, fdsnxml
# SeisComP applications access waveform data through the RecordStream
# interface. Please consult the SeisComP documentation for a list of supported
# services and their configuration.
# This parameter configures the RecordStream URL, format:
# [service://]location[#type]. "service" is the name of the recordstream
# implementation. If "service" is not given "file://" is implied.
recordstream = sdsarchive://@ROOTDIR@/var/lib/archive
# Set the number of bytes to buffer for each chunk of waveform data served
# to the client. The lower the buffer the higher the overhead of Python Twisted.
# The higher the buffer the higher the memory usage per request. 100kB seems
# to be a good trade-off.
recordBulkSize = 102400

View File

@ -0,0 +1,5 @@
# UDP port for receiving GDRT messages. By default port 9999 will be used.
plugins.gdrt.udpport = 9999
# Location of station list file.
plugins.gdrt.stationsFrom = stations.txt

Some files were not shown because too many files have changed in this diff Show More