[installation] Change to nightly

This commit is contained in:
2025-10-30 12:04:59 +01:00
parent 2ff097f9d1
commit a31bc45cce
1441 changed files with 60368 additions and 56360 deletions

Binary file not shown.

Binary file not shown.

BIN
bin/ew2sc

Binary file not shown.

View File

@ -1,28 +0,0 @@
#!/usr/bin/env seiscomp-python
from __future__ import print_function
import sys
from seiscomp import mseedlite as mseed
open_files = {}
if len(sys.argv) != 2:
print("Usage: extr_file FILE")
sys.exit(1)
for rec in mseed.Input(open(sys.argv[1], "rb")):
oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
if oname not in open_files:
postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year,
rec.begin_time.timetuple()[7], rec.begin_time.hour,
rec.begin_time.minute)
open_files[oname] = open(oname + postfix, "ab")
ofile = open_files[oname]
ofile.write(rec.header + rec.data)
for oname in open_files:
open_files[oname].close()

View File

@ -40,7 +40,7 @@ import seiscomp.logging
import seiscomp.client
import seiscomp.system
from seiscomp.math import KM_OF_DEGREE
from seiscomp.math import WGS84_KM_OF_DEGREE
from seiscomp.fdsnws.utils import isRestricted, u_str, b_str
from seiscomp.fdsnws.dataselect import (
@ -72,6 +72,14 @@ from seiscomp.fdsnws.http import (
)
from seiscomp.fdsnws.log import Log
try:
from seiscomp.fdsnws.jwt import JWT
_jwtSupported = True
except ImportError:
_jwtSupported = False
def logSC3(entry):
try:
@ -411,6 +419,14 @@ class FDSNWS(seiscomp.client.Application):
self._access = None
self._checker = None
self._jwtEnabled = False
self._jwtIssuers = ["https://geofon.gfz.de/eas2", "https://login.earthscope.org/"]
self._jwtAudience = ["eas", "fdsn"]
self._jwtAlgorithms = ["RS256"]
self._jwtUpdateMin = 300
self._jwtUpdateMax = 86400
self._jwt = None
self._requestLog = None
self.__reloadRequested = False
self.__timeInventoryLoaded = None
@ -745,6 +761,42 @@ class FDSNWS(seiscomp.client.Application):
except Exception:
pass
# enable JWT extension?
try:
self._jwtEnabled = self.configGetBool("jwt.enable")
except Exception:
pass
# JWT issuers
try:
self._jwtIssuers = self.configGetStrings("jwt.issuers")
except Exception:
pass
# JWT audience
try:
self._jwtAudience = self.configGetStrings("jwt.audience")
except Exception:
pass
# JWT algorithms
try:
self._jwtAlgorithms = self.configGetStrings("jwt.algorithms")
except Exception:
pass
# JWT minimum update period
try:
self._jwtUpdateMin = self.configGetStrings("jwt.updateMinSeconds")
except Exception:
pass
# JWT maximum update period
try:
self._jwtUpdateMax = self.configGetStrings("jwt.updateMaxSeconds")
except Exception:
pass
# If the database connection is passed via command line or
# configuration file then messaging is disabled. Messaging is only used
# to get the configured database connection URI.
@ -878,7 +930,7 @@ Execute on command line with debug output
if self._invCoordinatePrecision is not None:
invCoordinatePrecisionStr = (
f"{self._invCoordinatePrecision} decimal places (≅"
f"{int(KM_OF_DEGREE * 1000 / 10**self._invCoordinatePrecision)}m)"
f"{int(WGS84_KM_OF_DEGREE * 1000 / 10**self._invCoordinatePrecision)}m)"
)
else:
invCoordinatePrecisionStr = "unlimited"
@ -926,6 +978,13 @@ configuration read:
auth
enabled : {self._authEnabled}
gnupgHome : {self._authGnupgHome}
JWT
enabled : {self._jwtEnabled}
issuers : {self._jwtIssuers}
audience : {self._jwtAudience}
algorithms : {self._jwtAlgorithms}
updateMinSeconds : {self._jwtUpdateMin}
updateMaxSeconds : {self._jwtUpdateMax}
requestLog : {self._requestLogFile}"""
)
@ -937,6 +996,17 @@ configuration read:
seiscomp.logging.error("all services disabled through configuration")
return None
if self._jwtEnabled:
if not _jwtSupported:
seiscomp.logging.error(
"JWT is not supported due to missing dependencies"
)
return None
self._jwt = JWT(
self._jwtIssuers, self._jwtAudience, self._jwtAlgorithms, self._jwtUpdateMin, self._jwtUpdateMax
)
# access logger if requested
if self._accessLogFile:
self._accessLog = Log(self._accessLogFile)
@ -1019,9 +1089,16 @@ configuration read:
dataselect.putChild(b"1", dataselect1)
# query
dataselect1.putChild(
b"query", FDSNDataSelect(dataSelectInv, self._recordBulkSize)
)
if self._jwtEnabled:
authSession = self._jwt.getAuthSessionWrapper(
FDSNDataSelect, dataSelectInv, self._recordBulkSize, self._access
)
dataselect1.putChild(b"query", authSession)
else:
dataselect1.putChild(
b"query", FDSNDataSelect(dataSelectInv, self._recordBulkSize)
)
# queryauth
if self._authEnabled:
@ -1050,7 +1127,8 @@ configuration read:
dataselect1.putChild(b"builder", fileRes)
if self._authEnabled:
from seiscomp.fdsnws.http import AuthResource
from seiscomp.fdsnws.authresource import AuthResource
dataselect1.putChild(
b"auth",
AuthResource(DataSelectVersion, self._authGnupgHome, self._userdb),
@ -1180,7 +1258,13 @@ configuration read:
availability.putChild(b"1", availability1)
# query
availability1.putChild(b"query", FDSNAvailabilityQuery())
if self._jwtEnabled:
authSession = self._jwt.getAuthSessionWrapper(
FDSNAvailabilityQuery, self._access
)
availability1.putChild(b"query", authSession)
else:
availability1.putChild(b"query", FDSNAvailabilityQuery())
# queryauth
if self._authEnabled:
@ -1192,7 +1276,13 @@ configuration read:
availability1.putChild(b"queryauth", authSession)
# extent
availability1.putChild(b"extent", FDSNAvailabilityExtent())
if self._jwtEnabled:
authSession = self._jwt.getAuthSessionWrapper(
FDSNAvailabilityExtent, self._access
)
availability1.putChild(b"extent", authSession)
else:
availability1.putChild(b"extent", FDSNAvailabilityExtent())
# extentauth
if self._authEnabled:

Binary file not shown.

View File

@ -1,105 +0,0 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import io
from seiscomp.legacy.fseed import *
from seiscomp.legacy.db.seiscomp3 import sc3wrap
from seiscomp.legacy.db.seiscomp3.inventory import Inventory
import seiscomp.datamodel
import seiscomp.io
ORGANIZATION = "EIDA"
def iterinv(obj):
return (j for i in obj.values() for j in i.values())
def main():
if len(sys.argv) < 1 or len(sys.argv) > 3:
print("Usage inv2dlsv [in_xml [out_dataless]]", file=sys.stderr)
return 1
if len(sys.argv) > 1:
inFile = sys.argv[1]
else:
inFile = "-"
if len(sys.argv) > 2:
out = sys.argv[2]
else:
out = ""
sc3wrap.dbQuery = None
ar = seiscomp.io.XMLArchive()
if not ar.open(inFile):
raise IOError(inFile + ": unable to open")
obj = ar.readObject()
if obj is None:
raise TypeError(inFile + ": invalid format")
sc3inv = seiscomp.datamodel.Inventory.Cast(obj)
if sc3inv is None:
raise TypeError(inFile + ": invalid format")
inv = Inventory(sc3inv)
inv.load_stations("*", "*", "*", "*")
inv.load_instruments()
vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False)
for net in iterinv(inv.network):
for sta in iterinv(net.station):
for loc in iterinv(sta.sensorLocation):
for strm in iterinv(loc.stream):
try:
vol.add_chan(
net.code,
sta.code,
loc.code,
strm.code,
strm.start,
strm.end,
)
except SEEDError as exc:
print(
f"Error ({net.code},{sta.code},{loc.code},{strm.code}): {str(exc)}",
file=sys.stderr,
)
if not out or out == "-":
output = io.BytesIO()
vol.output(output)
stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout
stdout.write(output.getvalue())
stdout.flush()
output.close()
else:
with open(sys.argv[2], "wb") as fd:
vol.output(fd)
return 0
if __name__ == "__main__":
try:
sys.exit(main())
except Exception as e:
print(f"Error: {str(e)}", file=sys.stderr)
sys.exit(1)

Binary file not shown.

Binary file not shown.

View File

@ -107,19 +107,19 @@ def rt_simul(f, speed=1.0, jump=0.0, delaydict=None):
# ------------------------------------------------------------------------------
def usage():
print(
"""Usage:
msrtsimul [options] file
f"""Usage:
{os.path.basename(__file__)} [options] file
miniSEED real-time playback and simulation
msrtsimul reads sorted (and possibly multiplexed) miniSEED files and writes
individual records in pseudo-real-time. This is useful e.g. for testing and
{os.path.basename(__file__)} reads sorted (and possibly multiplexed) miniSEED files and
writes individual records in pseudo-real-time. This is useful e.g. for testing and
simulating data acquisition. Output is
$SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used.
Verbosity:
-h, --help Display this help message
-v, --verbose Verbose mode
-h, --help Display this help message.
-v, --verbose Verbose mode.
Playback:
-j, --jump Minutes to skip (float).
@ -131,14 +131,15 @@ Playback:
-m --mode Choose between 'realtime' and 'historic'.
-s, --speed Speed factor (float).
--test Test mode.
-u, --unlimited Allow miniSEED records which are not 512 bytes
-u, --unlimited Allow miniSEED records which are not 512 bytes. By default
seedlink supports 512 bytes only.
Examples:
Play back miniSEED waveforms in real time with verbose output
msrtsimul -v data.mseed
{os.path.basename(__file__)} -v data.mseed
Play back miniSEED waveforms in real time skipping the first 1.5 minutes
msrtsimul -j 1.5 data.mseed
{os.path.basename(__file__)} -j 1.5 data.mseed
"""
)
@ -170,7 +171,7 @@ def main():
"help",
"mode=",
"seedlink=",
"unlimited"
"unlimited",
],
)
except GetoptError:
@ -278,7 +279,7 @@ Check if SeedLink is running and configured for real-time playback.
time_diff = None
print(
f"Starting msrtsimul at {datetime.datetime.utcnow()}",
f"Starting msrtsimul at {datetime.datetime.now(datetime.UTC)}",
file=sys.stderr,
)
for rec in inp:
@ -292,7 +293,7 @@ starting on {str(rec.begin_time)}: length != 512 Bytes.",
if time_diff is None:
ms = 1000000.0 * (rec.nsamp / rec.fsamp)
time_diff = (
datetime.datetime.utcnow()
datetime.datetime.now(datetime.UTC).replace(tzinfo=None)
- rec.begin_time
- datetime.timedelta(microseconds=ms)
)

View File

@ -84,7 +84,7 @@ def main():
resp = seiscomp.datamodel.ResponsePAZ_Create()
resp.setType("A")
resp.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"])
resp.setGain(args.gain / header["dataScale"])
resp.setGainFrequency(0)
resp.setNormalizationFactor(1)
resp.setNormalizationFrequency(0)
@ -93,9 +93,9 @@ def main():
inv.add(resp)
sensor = seiscomp.datamodel.Sensor_Create()
sensor.setName(header["instrument"])
sensor.setDescription(header["instrument"])
sensor.setUnit(header["sensitivities"][0]["unit"])
sensor.setName(header["experiment"])
sensor.setDescription(header["measurement"])
sensor.setUnit(header["unit"])
sensor.setResponse(resp.publicID())
inv.add(sensor)
@ -131,8 +131,8 @@ def main():
cha = seiscomp.datamodel.Stream_Create()
cha.setCode(args.channel)
cha.setStart(net.start())
cha.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"])
cha.setGainUnit(header["sensitivities"][0]["unit"])
cha.setGain(args.gain / header["dataScale"])
cha.setGainUnit(header["unit"])
cha.setGainFrequency(0)
cha.setSensor(sensor.publicID())
cha.setDatalogger(datalogger.publicID())

BIN
bin/ql2sc

Binary file not shown.

Binary file not shown.

View File

@ -13,11 +13,14 @@
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import time
import sys
import os
import time
import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
import seiscomp.logging
from seiscomp.scbulletin import Bulletin, stationCount
@ -40,8 +43,8 @@ class ProcAlert(seiscomp.client.Application):
self.minPickCount = 25
self.procAlertScript = ""
ep = seiscomp.datamodel.EventParameters()
self.bulletin = None
self.cache = None
def createCommandLineDescription(self):
try:
@ -61,7 +64,8 @@ class ProcAlert(seiscomp.client.Application):
self.commandline().addStringOption(
"Publishing",
"procalert-script",
"Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2",
"Specify the script to publish an event. The ProcAlert file and the "
"event id are passed as parameter $1 and $2",
)
self.commandline().addOption(
"Publishing", "test", "Test mode, no messages are sent"
@ -174,13 +178,14 @@ class ProcAlert(seiscomp.client.Application):
return False
def send_procalert(self, txt, evid):
if self.procAlertScript:
tmp = f"/tmp/yyy{evid.replace('/', '_').replace(':', '-')}"
f = file(tmp, "w")
f.write(f"{txt}")
f.close()
if not self.procAlertScript:
return
os.system(self.procAlertScript + " " + tmp + " " + evid)
file = f"/tmp/yyy{evid.replace('/', '_').replace(':', '-')}"
with open(file, "w", encoding="utf8") as f:
print(txt, file=f)
os.system(self.procAlertScript + " " + file + " " + evid)
def coordinates(self, org):
return org.latitude().value(), org.longitude().value(), org.depth().value()
@ -194,7 +199,7 @@ class ProcAlert(seiscomp.client.Application):
seiscomp.logging.error("suspicious region/depth - ignored")
publish = False
if stationCount(org) < self.minPickCount:
if stationCount(org, 0.5) < self.minPickCount:
seiscomp.logging.error("too few picks - ignored")
publish = False

View File

@ -1 +0,0 @@
scml2inv

View File

@ -39,6 +39,8 @@ class ObjectAlert(seiscomp.client.Application):
self.addMessagingSubscription("EVENT")
self.addMessagingSubscription("LOCATION")
self.addMessagingSubscription("MAGNITUDE")
self.addMessagingSubscription("AMPLITUDE")
self.addMessagingSubscription("PICK")
self.setAutoApplyNotifierEnabled(True)
self.setInterpretNotifierEnabled(True)
@ -76,50 +78,57 @@ class ObjectAlert(seiscomp.client.Application):
self.commandline().addOption(
"Generic",
"first-new",
"calls an event a new event when it is seen the first time",
"Calls an event a new event when it is seen the first time.",
)
self.commandline().addGroup("Alert")
self.commandline().addStringOption(
"Alert", "amp-type", "amplitude type to listen to", self._ampType
"Alert",
"amp-type",
"Amplitude type to listen to.",
self._ampType,
)
self.commandline().addStringOption(
"Alert",
"pick-script",
"script to be called when a pick arrived, network-, station code pick "
"publicID are passed as parameters $1, $2, $3 and $4",
"Script to be called when a pick arrived, network-, station code pick "
"publicID are passed as parameters $1, $2, $3 and $4.",
)
self.commandline().addStringOption(
"Alert",
"amp-script",
"script to be called when a station amplitude arrived, network-, station "
"code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4",
"Script to be called when a station amplitude arrived, network-, station "
"code, amplitude and amplitude publicID are passed as parameters $1, $2, "
"$3 and $4.",
)
self.commandline().addStringOption(
"Alert",
"alert-script",
"script to be called when a preliminary origin arrived, latitude and "
"longitude are passed as parameters $1 and $2",
"Script to be called when a preliminary origin arrived, latitude and "
"longitude are passed as parameters $1 and $2.",
)
self.commandline().addStringOption(
"Alert",
"event-script",
"script to be called when an event has been declared; the message string, a "
"flag (1=new event, 0=update event), the EventID, the arrival count and the "
"magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5",
"Script to be called when an event has been declared; the message string, "
"a flag (1=new event, 0=update event), the EventID, the arrival count and "
"the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 "
"and $5.",
)
self.commandline().addGroup("Cities")
self.commandline().addStringOption(
"Cities",
"max-dist",
"maximum distance for using the distance from a city to the earthquake",
"Maximum distance for using the distance from a city to the earthquake.",
str(self._citiesMaxDist),
)
self.commandline().addStringOption(
"Cities",
"min-population",
"minimum population for a city to become a point of interest",
"Minimum population for a city to become a point of interest.",
str(self._citiesMinPopulation),
)
self.commandline().addGroup("Debug")
self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
self.commandline().addStringOption("Debug", "eventid,E", "Specify event ID.")
return True
def init(self):
@ -174,7 +183,7 @@ class ObjectAlert(seiscomp.client.Application):
phaseStreams = self.configGetStrings("constraints.phaseStreams")
for item in phaseStreams:
rule = item.strip()
# rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed
# allowned: NET.STA.LOC.CHA and the special charactes ? * | ( )
if not re.fullmatch(r"[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+", rule):
seiscomp.logging.error(
f"Wrong stream ID format in `constraints.phaseStreams`: {item}"
@ -559,7 +568,8 @@ class ObjectAlert(seiscomp.client.Application):
break
if not matched:
seiscomp.logging.debug(
f" + stream ID {waveformID} does not match constraints.phaseStreams rules"
f" + stream ID {waveformID} does not match "
"constraints.phaseStreams rules"
)
return
@ -568,7 +578,8 @@ class ObjectAlert(seiscomp.client.Application):
self.notifyPick(obj)
else:
seiscomp.logging.debug(
f" + phase hint {phaseHint} does not match '{self._phaseHints}'"
f" + phase hint {phaseHint} does not match "
f"'{self._phaseHints}'"
)
else:
seiscomp.logging.debug(
@ -739,6 +750,11 @@ class ObjectAlert(seiscomp.client.Application):
seiscomp.logging.debug(f"desc: {dsc}")
try:
evType = seiscomp.datamodel.EEventTypeNames.name(evt.type())
except Exception:
evType = "earthquake"
dep = org.depth().value()
now = seiscomp.core.Time.GMT()
otm = org.time().value()
@ -756,14 +772,10 @@ class ObjectAlert(seiscomp.client.Application):
dt = f"{int(dt)} seconds ago"
if preliminary:
message = f"earthquake, XXL, preliminary, {dt}, {dsc}"
message = f"{evType}, XXL, preliminary, {dt}, {dsc}"
else:
message = "earthquake, %s, %s, %s, depth %d kilometers" % (
dt,
dsc,
mag,
int(dep + 0.5),
)
message = f"{evType}, {dt}, {dsc}, {mag}, depth {int(dep + 0.5)} kilometers"
seiscomp.logging.info(message)
if not self._eventScript:

BIN
bin/scamp

Binary file not shown.

Binary file not shown.

Binary file not shown.

183
bin/scart
View File

@ -300,10 +300,15 @@ class StreamIterator:
self.file = workdir + file
# print "Starting at file %s" % self.file
self.record, self.index = ar.findIndex(begin, end, self.file)
if self.record:
self.current = self.record.startTime()
self.currentEnd = self.record.endTime()
while begin < end:
self.record, self.index = ar.findIndex(begin, end, self.file)
if self.record:
self.current = self.record.startTime()
self.currentEnd = self.record.endTime()
break
begin = self.archive.stepTime(begin)
workdir, file = ar.location(begin, net, sta, loc, cha)
self.file = workdir + file
def __next__(self):
while True:
@ -458,7 +463,8 @@ class RecordRenamer:
def printRules(self):
for r in self.renameRules:
print(
f"Renaming {(r.pattern.pattern if r.pattern is not None else '*.*.*.*')} "
"Renaming "
f"{(r.pattern.pattern if r.pattern is not None else '*.*.*.*')} "
f"to {r.newNet}.{r.newSta}.{r.newLoc}.{r.newCha}",
file=sys.stderr,
)
@ -805,10 +811,9 @@ Usage:
{os.path.basename(__file__)} -d [options] [archive]
{os.path.basename(__file__)} --check [options] [archive]
Import miniSEED waveforms or dump records from an SDS structure, sort them,
modify the time and replay them. Also check files and archives.
For Import and Dump mode the data streams can be selected in three ways
using the combinations of options: -n -c -t or --nslc -t or --list
Import or export miniSEED waveforms into/from an SDS structure. Also check files and
archives. Data streams can be selected in three ways using the combinations of options:
-n -c -t or --nslc -t or --list.
Verbosity:
-h, --help Display this help message.
@ -843,7 +848,7 @@ Processing:
2007-03-28 15:48;2007-03-28 16:18;GE.LAST.*.*
2007-03-28 15:48;2007-03-28 16:18;GE.PMBI..BH?
-m, --modify Dump mode: Modify the record time for real time playback
when dumping.
when dumping. Implicitly sets the speed parameter to 1.
-n arg Import, dump mode: Data stream selection as a comma separated
list "stream1,stream2,streamX" where each stream can be NET or
NET.STA or NET.STA.LOC or NET.STA.LOC.CHA. If CHA is omitted,
@ -858,16 +863,18 @@ Processing:
A rule is "[match-stream:]rename-stream" and match-stream
is optional. match-stream and rename-stream are in the
"NET.STA.LOC.CHA" format. match-stream supports special
charactes "?" "*" "|" "(" ")". rename-stream supports the
characters "?" "*" "|" "(" ")". rename-stream supports the
special character "-" that can be used in place of NET, STA,
LOC, CHA codes with the meaning of not renaming those.
"-" can also be used as the last character in CHA code.
Multiple rules can be provided as a comma separated list
or by providing multiple --rename options.
-s, --sort Dump mode: Sort records.
--speed arg Dump mode: Specify the speed to dump the records. A value
of 0 means no delay. Otherwise speed is a multiplier of
the real time difference between the records.
--speed arg Dump mode: Specify the speed to dump the records as a
multiplier of the real time difference between the records.
A value > 1 will speed up the playback while a value > 0
and < 1 will slow the playback down. This option implies
sorting of the records.
-t, --time-window t1~t2
Import, dump mode: UTC time window filter to be applied to
the data streams. Format: "StartTime~EndTime". Example:
@ -886,7 +893,7 @@ Output:
--print-streams.
--with-filecheck Import mode: Check all accessed files after import. Unsorted
or unreadable files are reported to stderr. Checks are only
complete for files containing exactly one stream. More
complete for files containing exactly one stream. More
complete checks are made with scmssort.
--with-filename Import mode: Print all accessed files to sterr after import.
@ -901,11 +908,16 @@ Import miniSEED data into a SDS archive, check all modified files for errors
{os.path.basename(__file__)} -I file.mseed --with-filecheck $SEISCOMP_ROOT/var/lib/archive
Import miniSEED data from FDSNWS into a SDS archive for specific time range and streams
{os.path.basename(__file__)} -I fdsnws://geofon.gfz-potsdam.de \
{os.path.basename(__file__)} -I fdsnws://geofon.gfz.de \
-t 2022-03-28T15:48~2022-03-28T16:18 --nslc list.file $SEISCOMP_ROOT/var/lib/archive
Check an archive for files with out-of-order records
{os.path.basename(__file__)} --check /archive
Play back miniSEED data from archive at normal speed as in real time and pipe \
them into another application, here scrttv
{os.path.basename(__file__)} -dmv -t 2026-05-01~2026-05-02 /archive | scrttv -I - --offline --no-inventory
"""
@ -964,7 +976,7 @@ def main():
# default = stdin
recordURL = "file://-"
speed = 0
speed = None
stdout = False
outputFile = None
ignoreRecords = False
@ -1038,7 +1050,23 @@ def main():
else:
usage(exitcode=1)
if not dump and not checkSDS and not importMode:
if dump:
if modifyTime and speed is None:
speed = 1
sort = True
elif speed is not None:
if speed <= 0:
print("'--speed' must be greater than 0", file=sys.stderr)
return -1
sort = True
if modifyTime and speed != 1:
print(
"Modify time requested with '--speed' value other than 1. Gaps "
"or overlaps will be created.",
file=sys.stderr,
)
elif not checkSDS and not importMode:
importMode = True
if files:
@ -1116,18 +1144,21 @@ def main():
print(f"Stream file: '{nslcFile}'", file=sys.stderr)
if dump:
if not sort and not modifyTime:
print("Mode: DUMP", file=sys.stderr)
elif sort and not modifyTime:
print("Mode: DUMP & SORT", file=sys.stderr)
elif not sort and modifyTime:
print("Mode: DUMP & MODIFY_TIME", file=sys.stderr)
elif sort and modifyTime:
print("Mode: DUMP & SORT & MODIFY_TIME", file=sys.stderr)
flags = []
if speed:
flags.append(f"speed={speed}")
if sort:
flags.append("sort")
if modifyTime:
flags.append("modify time")
flagStr = ""
if flags:
flagStr = f" ({', '.join(flags)})"
print(f"Mode: DUMP{flagStr}", file=sys.stderr)
print(f"Archive: {archiveDirectory}", file=sys.stderr)
if checkSDS:
print("Mode: Check", file=sys.stderr)
print("Mode: CHECK", file=sys.stderr)
if importMode:
print("Mode: IMPORT", file=sys.stderr)
@ -1157,7 +1188,7 @@ def main():
else:
out = sys.stdout.buffer
# list file witht times takes priority over nslc list
# list file with times takes priority over nslc list
if listFile:
nslcFile = None
@ -1174,7 +1205,8 @@ def main():
for stream in streamFilter:
if stream.tmin >= stream.tmax:
print(
f"Info: ignoring {stream.net}.{stream.sta}.{stream.loc}.{stream.cha} - "
"Info: "
f"ignoring {stream.net}.{stream.sta}.{stream.loc}.{stream.cha} - "
f"start {stream.tmin} after end {stream.tmax}",
file=sys.stderr,
)
@ -1228,8 +1260,9 @@ def main():
f"{stream.cha} {stream.tmin} - {stream.tmax}",
file=sys.stderr,
)
stime = None
realTime = seiscomp.core.Time.GMT()
firstRecordEndTime = None
startTime = seiscomp.core.Time.UTC()
if sort:
records = Sorter(archiveIterator)
@ -1245,36 +1278,34 @@ def main():
if ignoreRecords:
continue
etime = seiscomp.core.Time(rec.endTime())
etime = rec.endTime()
if stime is None:
stime = etime
if not firstRecordEndTime:
firstRecordEndTime = seiscomp.core.Time(etime)
if verbose:
print(f"First record: {stime.iso()}", file=sys.stderr)
print(
f"First record end time: {firstRecordEndTime.iso()}",
file=sys.stderr,
)
dt = etime - stime
if speed:
dt = (etime - firstRecordEndTime).length()
playTime = startTime + seiscomp.core.TimeSpan(dt / speed)
now = seiscomp.core.Time.GMT()
if modifyTime:
recLength = etime - rec.startTime()
rec.setStartTime(seiscomp.core.Time(playTime) - recLength)
if speed > 0:
playTime = (realTime + dt).toDouble() / speed
else:
playTime = now.toDouble()
sleepTime = playTime - now.toDouble()
if sleepTime > 0:
time.sleep(sleepTime)
if modifyTime:
recLength = etime - rec.startTime()
rec.setStartTime(seiscomp.core.Time(playTime) - recLength)
sleepSeconds = (playTime - seiscomp.core.Time.UTC()).length()
if sleepSeconds > 0:
time.sleep(sleepSeconds)
if verbose:
etime = rec.endTime()
print(
f"{rec.streamID()} time current: "
f"{seiscomp.core.Time.LocalTime().iso()} start: "
f"{rec.startTime().iso()} end: {etime.iso()}",
f"{rec.streamID()} "
f"current time: {seiscomp.core.Time.LocalTime().iso()}"
f", rec start: {rec.startTime().iso()}"
f", rec end: {rec.startTime().iso()}",
file=sys.stderr,
)
@ -1529,7 +1560,8 @@ def main():
f = open(archiveDirectory + file, "ab")
except BaseException:
print(
f"File {archiveDirectory + file} could not be opened for writing",
f"File {archiveDirectory + file} could not be opened "
f"for writing",
file=sys.stderr,
)
return -1
@ -1605,8 +1637,8 @@ def main():
print(fileName, file=sys.stderr)
if printStreams and streamDict:
minTime = seiscomp.core.Time.GMT()
maxTime = str2time("1970-01-01 00:00:00")
minTime = None
maxTime = None
totalRecs = 0
totalSamples = 0
totalChans = set()
@ -1624,8 +1656,12 @@ def main():
file=sys.stderr,
)
maxTime = max(maxTime, str2time(end))
minTime = min(minTime, str2time(start))
if minTime:
minTime = min(minTime, str2time(start))
maxTime = max(maxTime, str2time(end))
else:
minTime = str2time(start)
maxTime = str2time(end)
totalChans.add(key)
totalNetworks.add(key.split(".")[0])
@ -1637,28 +1673,17 @@ def main():
"# Summary",
file=sys.stderr,
)
if minTime and maxTime:
print(
f"# time range: {minTime.iso()} - {maxTime.iso()}",
file=sys.stderr,
)
print(
f"# time range: {minTime.iso()} - {maxTime.iso()}",
file=sys.stderr,
)
print(
f"# networks: {len(totalNetworks)}",
file=sys.stderr,
)
print(
f"# stations: {len(totalStations)}",
file=sys.stderr,
)
print(
f"# streams: {len(totalChans)}",
file=sys.stderr,
)
print(
f"# records: {totalRecs}",
file=sys.stderr,
)
print(
f"# samples: {totalSamples}",
f""""# networks: {len(totalNetworks)}
# stations: {len(totalStations)}
# streams: {len(totalChans)}
# records: {totalRecs}
# samples: {totalSamples}""",
file=sys.stderr,
)

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
bin/sccnv

Binary file not shown.

Binary file not shown.

BIN
bin/scdb

Binary file not shown.

View File

@ -81,25 +81,20 @@ class MySQLDB(QueryInterface):
return tmp_tables
def deleteObjectQuery(self, *v):
if v[0]:
q = (
"delete "
+ v[0]
+ " from "
+ ", ".join(v)
+ " where "
+ v[0]
+ "._oid="
+ v[1]
+ "._oid and "
)
else:
q = "delete " + v[1] + " from " + ", ".join(v[1:]) + " where "
q = (
"delete "
+ v[0]
+ " from "
+ ", ".join(v)
+ " where "
+ v[0]
+ "._oid="
+ v[1]
+ "._oid"
)
for i in range(1, len(v) - 1):
if i > 1:
q += " and "
q += v[i] + "._oid=" + v[i + 1] + "._oid"
q += " and " + v[i] + "._oid=" + v[i + 1] + "._oid"
return q
@ -211,25 +206,20 @@ class PostgresDB(QueryInterface):
return tmp_tables
def deleteObjectQuery(self, *v):
if v[0]:
q = (
"delete from "
+ v[0]
+ " using "
+ ", ".join(v[1:])
+ " where "
+ v[0]
+ "._oid="
+ v[1]
+ "._oid and "
)
else:
q = "delete from " + v[1] + " using " + ", ".join(v[2:]) + " where "
q = (
"delete from "
+ v[0]
+ " using "
+ ", ".join(v[1:])
+ " where "
+ v[0]
+ "._oid="
+ v[1]
+ "._oid"
)
for i in range(1, len(v) - 1):
if i > 1:
q += " and "
q += v[i] + "._oid=" + v[i + 1] + "._oid"
q += " and " + v[i] + "._oid=" + v[i + 1] + "._oid"
return q
@ -333,6 +323,8 @@ class DBCleaner(seiscomp.client.Application):
self._invertMode = False
self._stripEP = True
self._stripQC = True
self._keepModes = [] # Array with modes to keep
self._keepStatus = [] # Array with status to keep
self._steps = 0
self._currentStep = 0
@ -368,6 +360,18 @@ class DBCleaner(seiscomp.client.Application):
"Event-IDs to keep in the database. Combining with 'qc-only' "
"is invalid.",
)
self.commandline().addStringOption(
"Objects",
"keep-event-modes",
"Keep all events where is evaluation mode of the preferred origin is "
"one of the given modes."
)
self.commandline().addStringOption(
"Objects",
"keep-event-status",
"Keep all events where is evaluation status of the preferred origin is "
"one of the given status."
)
self.commandline().addOption(
"Objects",
"qc-only,Q",
@ -473,7 +477,7 @@ class DBCleaner(seiscomp.client.Application):
f"""Usage:
{os.path.basename(__file__)} [options]
Remove event and waveform quality parameters from the database in a timespan. Use
Remove event and waveform quality parameters from the database in a timespan. Use
scardac for removing data availability parameters."""
)
@ -543,6 +547,19 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
except RuntimeError:
pass
try:
status = self.commandline().optionString("keep-event-status")
self._keepStatus = [s.strip() for s in status.split(",")]
print(status, self._keepStatus)
except RuntimeError:
pass
try:
modes = self.commandline().optionString("keep-event-modes")
self._keepModes = [m.strip() for m in modes.split(",")]
except RuntimeError:
pass
try:
dateTime = self.commandline().optionString("datetime")
except RuntimeError:
@ -694,6 +711,11 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
self.beginMessage("Search objects")
if not self.runCommand(tmp_object):
return False
tmp_object = "create index idx_oid on tmp_object(_oid)"
if not self.runCommand(tmp_object):
return False
self.endMessage(self.globalCount("tmp_object"))
for table in tables:
@ -783,6 +805,7 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
self.beginMessage("Deleting waveform quality parameters")
if not self.runCommand(
self._query.deleteObjectQuery("Object", "WaveformQuality")
+ " and "
+ timeRangeSelection(f"WaveformQuality.{self.cnvCol('end')}")
):
return False
@ -822,9 +845,28 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
+ " not in ('%s')" % "','".join(self._keepEvents)
)
if len(self._keepModes) > 0:
old_events += (
" and Origin."
+ self.cnvCol("evaluationMode")
+ " not in ('%s')" % "','".join(self._keepModes)
)
if len(self._keepStatus) > 0:
old_events += (
" and Origin."
+ self.cnvCol("evaluationStatus")
+ " not in ('%s')" % "','".join(self._keepStatus)
)
self.beginMessage("Find old events")
if not self.runCommand(old_events):
return False
old_events = "create index idx_oid on old_events(_oid)"
if not self.runCommand(old_events):
return False
self.endMessage(self.globalCount("old_events"))
# Delete OriginReferences of old events
@ -879,6 +921,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
self.beginMessage("Find unassociated focal mechanisms")
if not self.runCommand(tmp_fm):
return False
tmp_fm = "create index idx_oid on tmp_fm(_oid)"
if not self.runCommand(tmp_fm):
return False
@ -990,6 +1036,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
self.beginMessage("Find unassociated origins")
if not self.runCommand(tmp_origin):
return False
tmp_origin = "create index idx_oid on tmp_origin(_oid)"
if not self.runCommand(tmp_origin):
return False
@ -998,7 +1048,7 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
update tmp_origin set used=1 \
where ("
+ self.cnvCol("publicID")
+ " in (select distinct "
+ " in (select "
+ self.cnvCol("originID")
+ " from OriginReference)) \
or ("
@ -1093,6 +1143,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
)
)
if not self.runCommand(tmp_pick):
return False
tmp_pick = "create index idx_oid on tmp_pick(_oid)"
if not self.runCommand(tmp_pick):
return False
@ -1145,6 +1199,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
)
)
if not self.runCommand(tmp_amp):
return False
tmp_amp = "create index idx_oid on tmp_amp(_oid)"
if not self.runCommand(tmp_amp):
return False
@ -1287,27 +1345,27 @@ Remove all waveform QC paramters older than 30 days but do not effect event para
self._query.deleteJournalQuery("PublicObject", *v) + " and used=0"
)
self.runCommand(
self._query.deleteObjectQuery(None, "Object", *v) + " and used=0"
self._query.deleteObjectQuery("Object", *v) + " and used=0"
)
self.runCommand(
self._query.deleteObjectQuery(None, "PublicObject", *v) + " and used=0"
self._query.deleteObjectQuery("PublicObject", *v) + " and used=0"
)
def deleteObjects(self, *v):
self.runCommand(self._query.deleteJournalQuery("PublicObject", *v))
self.runCommand(self._query.deleteObjectQuery("Object", *v))
self.runCommand(self._query.deleteObjectQuery("PublicObject", *v))
self.runCommand(self._query.deleteObjectQuery(None, *v))
self.runCommand(self._query.deleteObjectQuery(*v))
self.runCommand(self._query.deleteObjectQuery("PublicObject", *v[1:]))
self.runCommand(self._query.deleteObjectQuery("Object", *v[1:]))
def deleteUnusedObjects(self, *v):
self.runCommand(
self._query.deleteJournalQuery("PublicObject", *v) + " and used=0"
)
self.runCommand(self._query.deleteObjectQuery("Object", *v) + " and used=0")
self.runCommand(self._query.deleteObjectQuery(*v) + " and used=0")
self.runCommand(
self._query.deleteObjectQuery("PublicObject", *v) + " and used=0"
self._query.deleteObjectQuery("PublicObject", *v[1:]) + " and used=0"
)
self.runCommand(self._query.deleteObjectQuery(None, *v) + " and used=0")
self.runCommand(self._query.deleteObjectQuery("Object", *v[1:]) + " and used=0")
def delete(self, message, func, *v):
self.beginMessage(message)

Binary file not shown.

View File

@ -1,84 +0,0 @@
#!/usr/bin/env seiscomp-python
# -*- coding: utf-8 -*-
############################################################################
# Copyright (C) GFZ Potsdam #
# All rights reserved. #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import sys
import seiscomp.client, seiscomp.datamodel, seiscomp.io
class ObjectDumper(seiscomp.client.Application):
def __init__(self):
seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv)
self.setMessagingEnabled(True)
self.setDatabaseEnabled(True, False)
self.setMessagingUsername("")
def createCommandLineDescription(self):
seiscomp.client.Application.createCommandLineDescription(self)
self.commandline().addGroup("Dump")
self.commandline().addStringOption("Dump", "public-id,P", "publicID")
def loadEventParametersObject(self, publicID):
for tp in (
seiscomp.datamodel.Pick,
seiscomp.datamodel.Amplitude,
seiscomp.datamodel.Origin,
seiscomp.datamodel.Event,
seiscomp.datamodel.FocalMechanism,
seiscomp.datamodel.Magnitude,
seiscomp.datamodel.StationMagnitude,
):
obj = self.query().loadObject(tp.TypeInfo(), publicID)
obj = tp.Cast(obj)
if obj:
ep = seiscomp.datamodel.EventParameters()
ep.add(obj)
return ep
def loadInventoryObject(self, publicID):
for tp in (
seiscomp.datamodel.Network,
seiscomp.datamodel.Station,
seiscomp.datamodel.Sensor,
seiscomp.datamodel.SensorLocation,
seiscomp.datamodel.Stream,
):
obj = self.query().loadObject(tp.TypeInfo(), publicID)
obj = tp.Cast(obj)
if obj:
return obj
def run(self):
publicID = self.commandline().optionString("public-id")
obj = self.loadEventParametersObject(publicID)
if obj is None:
obj = self.loadInventoryObject(publicID)
if obj is None:
raise ValueError("unknown object '" + publicID + "'")
# dump formatted XML archive to stdout
ar = seiscomp.io.XMLArchive()
ar.setFormattedOutput(True)
ar.create("-")
ar.writeObject(obj)
ar.close()
return True
if __name__ == "__main__":
app = ObjectDumper()
app()

BIN
bin/scesv

Binary file not shown.

Binary file not shown.

View File

@ -53,7 +53,7 @@ def readXML(self):
if self._eventType:
try:
eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type())
eventType = seiscomp.datamodel.EEventTypeNames.name(evt.type())
if eventType != self._eventType:
continue
except ValueError:
@ -108,10 +108,16 @@ class EventList(seiscomp.client.Application):
)
self.commandline().addGroup("Events")
self.commandline().addStringOption(
"Events", "begin", "Specify the lower bound of the time interval."
"Events",
"begin",
"Specify the lower bound of the time interval. Uses 1900-01-01T00:00:00 "
"unless given.",
)
self.commandline().addStringOption(
"Events", "end", "Specify the upper bound of the time interval."
"Events",
"end",
"Specify the upper bound of the time interval Uses 2500-01-01T00:00:00 "
"unless given.",
)
self.commandline().addStringOption(
"Events",
@ -266,7 +272,7 @@ List event IDs available in a given time range and print to stdout."""
f"""Examples:
Print all event IDs from year 2022 and thereafter
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \
--begin "2022-01-01 00:00:00"
--begin 2022-01-01T00:00:00
Print all event IDs with event type 'quarry blast'
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp --event-type 'quarry blast'
@ -303,7 +309,7 @@ Print IDs of all events in XML file
if self._eventType:
try:
eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type())
eventType = seiscomp.datamodel.EEventTypeNames.name(evt.type())
if eventType != self._eventType:
continue
except ValueError:

View File

@ -295,7 +295,7 @@ class EventStreams(client.Application):
"""Usage:
scevtstreams [options]
Extract stream information and time windows from an event"""
Extract stream information and time windows from picks of an event or solitary picks."""
)
client.Application.printUsage(self)
@ -305,8 +305,8 @@ Extract stream information and time windows from an event"""
Get the time windows for an event in the database:
scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp
Create lists compatible with fdsnws:
scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws
Get the time windows for all picks given in an XML file without origins and events:
scevtstreams -i picks.xml -m 120,500
"""
)
@ -314,10 +314,14 @@ Create lists compatible with fdsnws:
resolveWildcards = self.commandline().hasOption("resolve-wildcards")
picks = []
# read picks from input file
if self.inputFile:
picks = self.readXML()
try:
picks = self.readXML()
except IOError as e:
print(f"Error: {e}", file=sys.stderr)
return False
if not picks:
raise ValueError("Could not find picks in input file")
@ -327,6 +331,7 @@ Create lists compatible with fdsnws:
pick = datamodel.Pick.Cast(obj)
if pick is None:
continue
picks.append(pick)
if not picks:
@ -502,11 +507,18 @@ Create lists compatible with fdsnws:
ep = datamodel.EventParameters.Cast(obj)
if ep is None:
raise ValueError("no event parameters found in input file")
# pick may be provided as base object, only one can be read
pick = datamodel.Pick.Cast(obj)
if pick is None:
raise ValueError(
"Neither event parameters nor pick found in input file"
)
else:
return [pick]
# we require at least one origin which references to picks via arrivals
if ep.originCount() == 0:
raise ValueError("no origin found in input file")
if ep.originCount() == 0 and ep.pickCount() == 0:
raise ValueError("No origin found in input file")
originIDs = []
@ -524,7 +536,7 @@ Create lists compatible with fdsnws:
# use first event/origin if no id was specified
else:
# no event, use first available origin
if ep.eventCount() == 0:
if ep.eventCount() == 0 and ep.originCount() > 0:
if ep.originCount() > 1:
print(
"WARNING: Input file contains no event but more than "
@ -534,7 +546,7 @@ Create lists compatible with fdsnws:
originIDs.append(ep.origin(0).publicID())
# use origin references of first available event
else:
elif ep.eventCount() > 0 and ep.originCount() > 0:
if ep.eventCount() > 1:
print(
"WARNING: Input file contains more than 1 event. "
@ -546,10 +558,18 @@ Create lists compatible with fdsnws:
ev.originReference(i).originID()
for i in range(ev.originReferenceCount())
]
else:
print("Found no origins, trying to continue with picks only.")
if originIDs:
print(
f"Considering all arrivals from {len(originIDs)} origin(s).",
file=sys.stderr,
)
# collect pickIDs
pickIDs = set()
for oID in originIDs:
# collect pickIDs from origins
o = datamodel.Origin.Find(oID)
if o is None:
continue
@ -557,6 +577,11 @@ Create lists compatible with fdsnws:
for i in range(o.arrivalCount()):
pickIDs.add(o.arrival(i).pickID())
if len(pickIDs) == 0:
# try reading picks only
for i in range(ep.pickCount()):
pickIDs.add(ep.pick(i).publicID())
# lookup picks
picks = []
for pickID in pickIDs:
@ -564,6 +589,9 @@ Create lists compatible with fdsnws:
if pick:
picks.append(pick)
if len(pickIDs) == 0:
print("Found no picks.", file=sys.stderr)
return picks

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
bin/scinv

Binary file not shown.

BIN
bin/scm

Binary file not shown.

BIN
bin/scmag

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
bin/scmm

Binary file not shown.

144
bin/scmsdemux Executable file
View File

@ -0,0 +1,144 @@
#!/usr/bin/env seiscomp-python
############################################################################
# Copyright (C) gempa GmbH #
# All rights reserved. #
# Contact: gempa GmbH (seiscomp-dev@gempa.de) #
# #
# GNU Affero General Public License Usage #
# This file may be used under the terms of the GNU Affero #
# Public License version 3.0 as published by the Free Software Foundation #
# and appearing in the file LICENSE included in the packaging of this #
# file. Please review the following information to ensure the GNU Affero #
# Public License version 3.0 requirements will be met: #
# https://www.gnu.org/licenses/agpl-3.0.html. #
# #
# Other Usage #
# Alternatively, this file may be used in accordance with the terms and #
# conditions contained in a signed written agreement between you and #
# gempa GmbH. #
############################################################################
import os
import sys
from getopt import gnu_getopt, GetoptError
from seiscomp import mseedlite as mseed
def usage():
print(
f"""Usage:
{os.path.basename(__file__)} source
Demultiplex all miniSEED records found in the given source by stream code writing them
into separate new files. The source can be files or stdin. One file per stream is
generated. File names are derived from stream codes and the begin time of the records.
Verbosity:
-h, --help Display this help message.
-v, --verbose Verbose mode.
Examples:
Demultiplex the miniSEED records contained in data.mseed and additionally print the
names of created files to stderr
{os.path.basename(__file__)} -v data.mseed
Demultiplex the miniSEED records received from stdin
scmssort -u -E data.mseed | {os.path.basename(__file__)} -
"""
)
def main():
try:
opts, args = gnu_getopt(
sys.argv[1:],
"hv",
[
"help",
"verbose",
],
)
except GetoptError:
print(
f"{os.path.basename(__file__)}: Unknown option",
file=sys.stderr,
)
usage()
return False
verbosity = False
for flag, arg in opts:
if flag in ("-h", "--help"):
usage()
return True
if flag in ("-v", "--verbose"):
verbosity = True
inFile = sys.stdin.buffer
try:
if len(args[0]) > 0:
openFiles = {}
except Exception:
print(
f"{os.path.basename(__file__)}: Missing source",
file=sys.stderr,
)
usage()
sys.exit(1)
if len(args) == 1:
if args[0] != "-":
try:
inFile = open(args[0], "rb")
except IOError as e:
print(
f"Could not open input file '{args[0]}' for reading: {e}",
file=sys.stderr,
)
return False
else:
print(
"Waiting for miniSEED records on stdin. Use Ctrl + C to interrupt.",
file=sys.stderr,
)
elif len(args) != 0:
usage()
sys.exit(1)
try:
for rec in mseed.Input(inFile):
oName = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha)
if oName not in openFiles:
postfix = ".D.%04d.%03d.%02d%02d" % (
rec.begin_time.year,
rec.begin_time.timetuple()[7],
rec.begin_time.hour,
rec.begin_time.minute,
)
openFiles[oName] = open(oName + postfix, "ab")
oFile = openFiles[oName]
oFile.write(rec.header + rec.data)
if verbosity:
print("Generated output files:", file=sys.stderr)
for oName in openFiles:
if verbosity:
print(f" {oName}", file=sys.stderr)
openFiles[oName].close()
except KeyboardInterrupt:
return True
return True
if __name__ == "__main__":
sys.exit(main())

BIN
bin/scmv

Binary file not shown.

BIN
bin/scmvx Executable file

Binary file not shown.

BIN
bin/scolv

Binary file not shown.

Binary file not shown.

View File

@ -89,12 +89,14 @@ class OriginList(seiscomp.client.Application):
self.commandline().addStringOption(
"Origins",
"begin",
"The lower bound of the time interval. Format: '1970-01-01 00:00:00'.",
"The lower bound of the time interval. Uses 1900-01-01T00:00:00 unless "
"given.",
)
self.commandline().addStringOption(
"Origins",
"end",
"The upper bound of the time interval. Format: '1970-01-01 00:00:00'.",
"The upper bound of the time interval. Format: 1970-01-01T00:00:00. Uses "
"2500-01-01T00:00:00 unless given.",
)
self.commandline().addStringOption(
"Origins", "author", "The author of the origins."
@ -179,7 +181,7 @@ List origin IDs available in a given time range and print to stdout."""
f"""Examples:
Print all origin IDs from year 2022 and thereafter
{os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \
--begin "2022-01-01 00:00:00"
--begin 2022-01-01T00:00:00
Print IDs of all events in XML file
{os.path.basename(__file__)} -i origins.xml

Binary file not shown.

View File

@ -13,19 +13,25 @@
# https://www.gnu.org/licenses/agpl-3.0.html. #
############################################################################
import time, sys, os, traceback
import seiscomp.core, seiscomp.client, seiscomp.datamodel
import seiscomp.logging, seiscomp.system
import os
import sys
import traceback
import seiscomp.core
import seiscomp.client
import seiscomp.datamodel
import seiscomp.logging
import seiscomp.system
def createDirectory(dir):
if os.access(dir, os.W_OK):
def createDirectory(directory):
if os.access(directory, os.W_OK):
return True
try:
os.makedirs(dir)
os.makedirs(directory)
return True
except:
except OSError:
return False
@ -46,8 +52,8 @@ def timeSpanToString(ts):
if neg:
return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
else:
return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs)
class ProcLatency(seiscomp.client.Application):
@ -135,8 +141,6 @@ class ProcLatency(seiscomp.client.Application):
def logObject(self, parentID, obj, update):
now = seiscomp.core.Time.GMT()
time = None
pick = seiscomp.datamodel.Pick.Cast(obj)
if pick:
phase = ""
@ -199,7 +203,7 @@ class ProcLatency(seiscomp.client.Application):
pass
try:
status = seiscomp.datamodel.EOriginStatusNames.name(org.status())
status = seiscomp.datamodel.EEvaluationStatusNames.name(org.status())
except:
pass
@ -286,7 +290,7 @@ class ProcLatency(seiscomp.client.Application):
sys.stdout.write(f"{timeToString(received)};{logEntry}\n")
if nowDirectory != self._nowDirectory:
if createDirectory(nowDirectory) == False:
if not createDirectory(nowDirectory):
seiscomp.logging.error(f"Unable to create directory {nowDirectory}")
return False
@ -298,7 +302,7 @@ class ProcLatency(seiscomp.client.Application):
)
if triggeredDirectory != self._triggeredDirectory:
if createDirectory(triggeredDirectory) == False:
if not createDirectory(triggeredDirectory):
seiscomp.logging.error(
f"Unable to create directory {triggeredDirectory}"
)
@ -321,7 +325,7 @@ class ProcLatency(seiscomp.client.Application):
# logEntry = timeToString(received)
logEntry = ""
if not triggered is None:
if triggered is not None:
aTriggered = triggered.get()
triggeredDirectory = (
self._directory + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/"
@ -341,7 +345,7 @@ class ProcLatency(seiscomp.client.Application):
sys.stdout.write(f"{timeToString(received)};{logEntry}\n")
if nowDirectory != self._nowDirectory:
if createDirectory(nowDirectory) == False:
if not createDirectory(nowDirectory):
seiscomp.logging.error(f"Unable to create directory {nowDirectory}")
return False
@ -353,7 +357,7 @@ class ProcLatency(seiscomp.client.Application):
if triggeredDirectory:
if triggeredDirectory != self._triggeredDirectory:
if createDirectory(triggeredDirectory) == False:
if not createDirectory(triggeredDirectory):
seiscomp.logging.error(
f"Unable to create directory {triggeredDirectory}"
)
@ -369,11 +373,8 @@ class ProcLatency(seiscomp.client.Application):
return True
def writeLog(self, file, text):
of = open(file, "a")
if of:
of.write(text)
of.write("\n")
of.close()
with open(file, "a", encoding="utf8") as of:
of.print(text, file=of)
app = ProcLatency(len(sys.argv), sys.argv)

BIN
bin/scqc

Binary file not shown.

BIN
bin/scqcv

Binary file not shown.

Binary file not shown.

View File

@ -105,10 +105,14 @@ class WfqQuery(seiscomp.client.Application):
self.commandline().addGroup("Query")
self.commandline().addStringOption(
"Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'"
"Query",
"begin,b",
"Begin time of query. Uses 1900-01-01T00:00:00 unless given.",
)
self.commandline().addStringOption(
"Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'"
"Query",
"end,e",
"End time of query. Uses current time unless given.",
)
self.commandline().addStringOption(
"Query",
@ -116,7 +120,7 @@ class WfqQuery(seiscomp.client.Application):
"Waveform stream ID to search for QC parameters: net.sta.loc.cha -"
" [networkCode].[stationCode].[sensorLocationCode].[channelCode]. "
"Provide a single ID or a comma-separated list. Overrides "
"--streams-from-inventory",
"--streams-from-inventory.",
)
self.commandline().addStringOption(
"Query",
@ -151,8 +155,8 @@ Query a database for waveform quality control (QC) parameters.""",
print(
f"""Examples:
Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from \
'2021-11-20 00:00:00' until current
{os.path.basename(__file__)} -d localhost -b '2021-11-20 00:00:00' -p rms,delay \
2021-11-20 00:00:00 until current
{os.path.basename(__file__)} -d localhost -b 2021-11-20T00:00:00 -p rms,delay \
-i AU.AS18..SHZ,AU.AS19..SHZ""",
file=sys.stderr,
)

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -69,8 +69,8 @@ class SendOrigin(seiscomp.client.Application):
"Parameters", "coord", "Latitude,longitude,depth of origin"
)
self.commandline().addStringOption("Parameters", "time", "time of origin")
except:
seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}")
except Exception:
seiscomp.logging.warning(f"Caught unexpected error {sys.exc_info()}")
def printUsage(self):
print(
@ -85,7 +85,7 @@ Create an artificial origin and send to the messaging"""
print(
"""Examples:
Send an artificial origin with hypocenter parameters to the messaging
scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10
scsendorigin --time 2022-05-01T10:00:00 --coord 52,12,10
"""
)

Binary file not shown.

BIN
bin/scsmdump Executable file

Binary file not shown.

View File

@ -359,9 +359,7 @@ Create an output XML file every 60 seconds and execute a custom script to proces
try:
f = open(self._outputFile, "w")
except:
seiscomp.logging.error(
f"Unable to create output file: {self._outputFile}"
)
seiscomp.logging.error(f"Unable to create output file: {self._outputFile}")
return
self.toXML(f)

View File

@ -62,50 +62,52 @@ class VoiceAlert(client.Application):
self.commandline().addOption(
"Generic",
"first-new",
"calls an event a new event when it is " "seen the first time",
"Calls an event a new event when it is seen the first time.",
)
self.commandline().addGroup("Alert")
self.commandline().addStringOption(
"Alert",
"amp-type",
"specify the amplitude type to listen to",
"Specify the amplitude type to listen to.",
self._ampType,
)
self.commandline().addStringOption(
"Alert",
"amp-script",
"specify the script to be called when a "
"Specify the script to be called when a "
"stationamplitude arrived, network-, stationcode and amplitude are "
"passed as parameters $1, $2 and $3",
"passed as parameters $1, $2 and $3.",
)
self.commandline().addStringOption(
"Alert",
"alert-script",
"specify the script to be called when a "
"Specify the script to be called when a "
"preliminary origin arrived, latitude and longitude are passed as "
"parameters $1 and $2",
"parameters $1 and $2.",
)
self.commandline().addStringOption(
"Alert",
"event-script",
"specify the script to be called when an "
"Specify the script to be called when an "
"event has been declared; the message string, a flag (1=new event, "
"0=update event), the EventID, the arrival count and the magnitude "
"(optional when set) are passed as parameter $1, $2, $3, $4 and $5",
"(optional when set) are passed as parameter $1, $2, $3, $4 and $5.",
)
self.commandline().addGroup("Cities")
self.commandline().addStringOption(
"Cities",
"max-dist",
"maximum distance for using the distance " "from a city to the earthquake",
"Maximum distance for using the distance from a city to the earthquake.",
str(self._citiesMaxDist),
)
self.commandline().addStringOption(
"Cities",
"min-population",
"minimum population for a city to " "become a point of interest",
"Minimum population for a city to become a point of interest.",
str(self._citiesMinPopulation),
)
self.commandline().addGroup("Debug")
self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID")
self.commandline().addStringOption("Debug", "eventid,E", "Specify event ID.")
return True
def init(self):

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
bin/sczip

Binary file not shown.

View File

@ -722,8 +722,8 @@ def on_status(args, _):
if env.isModuleEnabled(mod.name) or isinstance(
mod, seiscomp.kernel.CoreModule
):
mod.status(shouldModuleRun(mod.name))
found += 1
if mod.status(shouldModuleRun(mod.name)) == 0:
found += 1
if not useCSV:
print(f"Summary: {found} modules enabled")
@ -733,8 +733,8 @@ def on_status(args, _):
if len(args) > 0 and args[0] == "started":
for mod in mods:
if shouldModuleRun(mod.name):
mod.status(shouldModuleRun(mod.name))
found += 1
if mod.status(shouldModuleRun(mod.name)) == 0:
found += 1
if not useCSV:
print(f"Summary: {found} modules started")
@ -743,8 +743,8 @@ def on_status(args, _):
for mod in mods:
if mod.name in args or len(args) == 0:
mod.status(shouldModuleRun(mod.name))
found += 1
if mod.status(shouldModuleRun(mod.name)) == 0:
found += 1
if not useCSV:
print(f"Summary: {found} modules reported")

View File

@ -86,7 +86,7 @@ class SH2Proc(seiscomp.client.Application):
"""Usage:
sh2proc [options]
Convert Seismic Handler event data to SeisComP XML format"""
Convert Seismic Handler event data to SeisComP XML format which is sent to stdout."""
)
seiscomp.client.Application.printUsage(self)
@ -95,10 +95,10 @@ Convert Seismic Handler event data to SeisComP XML format"""
"""Examples:
Convert the Seismic Handler file shm.evt to SCML. Receive the database
connection to read inventory and configuration information from messaging
sh2proc shm.evt
sh2proc shm.evt > event.xml
Read Seismic Handler data from stdin. Provide inventory and configuration in XML
cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml
cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml > event.xml
"""
)
@ -489,7 +489,7 @@ Read Seismic Handler data from stdin. Provide inventory and configuration in XML
seiscomp.datamodel.IMPULSIVE,
seiscomp.datamodel.QUESTIONABLE,
]:
if value == seiscomp.datamodel.EPickOnsetNames_name(onset):
if value == seiscomp.datamodel.EPickOnsetNames.name(onset):
pick.setOnset(onset)
found = True
break
@ -524,7 +524,7 @@ Read Seismic Handler data from stdin. Provide inventory and configuration in XML
seiscomp.datamodel.AUTOMATIC,
seiscomp.datamodel.MANUAL,
]:
if value == seiscomp.datamodel.EEvaluationModeNames_name(mode):
if value == seiscomp.datamodel.EEvaluationModeNames.name(mode):
pick.setEvaluationMode(mode)
found = True
break

Binary file not shown.

Binary file not shown.

3673
bin/slmon2 Executable file

File diff suppressed because it is too large Load Diff

Binary file not shown.