diff --git a/bin/Hypo71PC b/bin/Hypo71PC index 597f281..fdc1f72 100755 Binary files a/bin/Hypo71PC and b/bin/Hypo71PC differ diff --git a/bin/dlsv2inv b/bin/dlsv2inv index c87d522..c7bccc3 100755 Binary files a/bin/dlsv2inv and b/bin/dlsv2inv differ diff --git a/bin/ew2sc b/bin/ew2sc index f020fe4..12ae55b 100755 Binary files a/bin/ew2sc and b/bin/ew2sc differ diff --git a/bin/extr_file b/bin/extr_file deleted file mode 100755 index 9cf416f..0000000 --- a/bin/extr_file +++ /dev/null @@ -1,28 +0,0 @@ -#!/usr/bin/env seiscomp-python - -from __future__ import print_function -import sys -from seiscomp import mseedlite as mseed - -open_files = {} - -if len(sys.argv) != 2: - print("Usage: extr_file FILE") - sys.exit(1) - -for rec in mseed.Input(open(sys.argv[1], "rb")): - oname = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha) - - if oname not in open_files: - postfix = ".D.%04d.%03d.%02d%02d" % (rec.begin_time.year, - rec.begin_time.timetuple()[7], rec.begin_time.hour, - rec.begin_time.minute) - - open_files[oname] = open(oname + postfix, "ab") - - ofile = open_files[oname] - ofile.write(rec.header + rec.data) - -for oname in open_files: - open_files[oname].close() - diff --git a/bin/fdsnws b/bin/fdsnws index ba6fe8b..fe98504 100755 --- a/bin/fdsnws +++ b/bin/fdsnws @@ -40,7 +40,7 @@ import seiscomp.logging import seiscomp.client import seiscomp.system -from seiscomp.math import KM_OF_DEGREE +from seiscomp.math import WGS84_KM_OF_DEGREE from seiscomp.fdsnws.utils import isRestricted, u_str, b_str from seiscomp.fdsnws.dataselect import ( @@ -72,6 +72,14 @@ from seiscomp.fdsnws.http import ( ) from seiscomp.fdsnws.log import Log +try: + from seiscomp.fdsnws.jwt import JWT + + _jwtSupported = True + +except ImportError: + _jwtSupported = False + def logSC3(entry): try: @@ -411,6 +419,14 @@ class FDSNWS(seiscomp.client.Application): self._access = None self._checker = None + self._jwtEnabled = False + self._jwtIssuers = ["https://geofon.gfz.de/eas2", "https://login.earthscope.org/"] + self._jwtAudience = ["eas", "fdsn"] + self._jwtAlgorithms = ["RS256"] + self._jwtUpdateMin = 300 + self._jwtUpdateMax = 86400 + self._jwt = None + self._requestLog = None self.__reloadRequested = False self.__timeInventoryLoaded = None @@ -745,6 +761,42 @@ class FDSNWS(seiscomp.client.Application): except Exception: pass + # enable JWT extension? + try: + self._jwtEnabled = self.configGetBool("jwt.enable") + except Exception: + pass + + # JWT issuers + try: + self._jwtIssuers = self.configGetStrings("jwt.issuers") + except Exception: + pass + + # JWT audience + try: + self._jwtAudience = self.configGetStrings("jwt.audience") + except Exception: + pass + + # JWT algorithms + try: + self._jwtAlgorithms = self.configGetStrings("jwt.algorithms") + except Exception: + pass + + # JWT minimum update period + try: + self._jwtUpdateMin = self.configGetStrings("jwt.updateMinSeconds") + except Exception: + pass + + # JWT maximum update period + try: + self._jwtUpdateMax = self.configGetStrings("jwt.updateMaxSeconds") + except Exception: + pass + # If the database connection is passed via command line or # configuration file then messaging is disabled. Messaging is only used # to get the configured database connection URI. @@ -878,7 +930,7 @@ Execute on command line with debug output if self._invCoordinatePrecision is not None: invCoordinatePrecisionStr = ( f"{self._invCoordinatePrecision} decimal places (≅" - f"{int(KM_OF_DEGREE * 1000 / 10**self._invCoordinatePrecision)}m)" + f"{int(WGS84_KM_OF_DEGREE * 1000 / 10**self._invCoordinatePrecision)}m)" ) else: invCoordinatePrecisionStr = "unlimited" @@ -926,6 +978,13 @@ configuration read: auth enabled : {self._authEnabled} gnupgHome : {self._authGnupgHome} + JWT + enabled : {self._jwtEnabled} + issuers : {self._jwtIssuers} + audience : {self._jwtAudience} + algorithms : {self._jwtAlgorithms} + updateMinSeconds : {self._jwtUpdateMin} + updateMaxSeconds : {self._jwtUpdateMax} requestLog : {self._requestLogFile}""" ) @@ -937,6 +996,17 @@ configuration read: seiscomp.logging.error("all services disabled through configuration") return None + if self._jwtEnabled: + if not _jwtSupported: + seiscomp.logging.error( + "JWT is not supported due to missing dependencies" + ) + return None + + self._jwt = JWT( + self._jwtIssuers, self._jwtAudience, self._jwtAlgorithms, self._jwtUpdateMin, self._jwtUpdateMax + ) + # access logger if requested if self._accessLogFile: self._accessLog = Log(self._accessLogFile) @@ -1019,9 +1089,16 @@ configuration read: dataselect.putChild(b"1", dataselect1) # query - dataselect1.putChild( - b"query", FDSNDataSelect(dataSelectInv, self._recordBulkSize) - ) + if self._jwtEnabled: + authSession = self._jwt.getAuthSessionWrapper( + FDSNDataSelect, dataSelectInv, self._recordBulkSize, self._access + ) + dataselect1.putChild(b"query", authSession) + + else: + dataselect1.putChild( + b"query", FDSNDataSelect(dataSelectInv, self._recordBulkSize) + ) # queryauth if self._authEnabled: @@ -1050,7 +1127,8 @@ configuration read: dataselect1.putChild(b"builder", fileRes) if self._authEnabled: - from seiscomp.fdsnws.http import AuthResource + from seiscomp.fdsnws.authresource import AuthResource + dataselect1.putChild( b"auth", AuthResource(DataSelectVersion, self._authGnupgHome, self._userdb), @@ -1180,7 +1258,13 @@ configuration read: availability.putChild(b"1", availability1) # query - availability1.putChild(b"query", FDSNAvailabilityQuery()) + if self._jwtEnabled: + authSession = self._jwt.getAuthSessionWrapper( + FDSNAvailabilityQuery, self._access + ) + availability1.putChild(b"query", authSession) + else: + availability1.putChild(b"query", FDSNAvailabilityQuery()) # queryauth if self._authEnabled: @@ -1192,7 +1276,13 @@ configuration read: availability1.putChild(b"queryauth", authSession) # extent - availability1.putChild(b"extent", FDSNAvailabilityExtent()) + if self._jwtEnabled: + authSession = self._jwt.getAuthSessionWrapper( + FDSNAvailabilityExtent, self._access + ) + availability1.putChild(b"extent", authSession) + else: + availability1.putChild(b"extent", FDSNAvailabilityExtent()) # extentauth if self._authEnabled: diff --git a/bin/fdsnxml2inv b/bin/fdsnxml2inv index fcdcf86..0f41759 100755 Binary files a/bin/fdsnxml2inv and b/bin/fdsnxml2inv differ diff --git a/bin/inv2dlsv b/bin/inv2dlsv deleted file mode 100755 index 358d557..0000000 --- a/bin/inv2dlsv +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env seiscomp-python -# -*- coding: utf-8 -*- -############################################################################ -# Copyright (C) GFZ Potsdam # -# All rights reserved. # -# # -# GNU Affero General Public License Usage # -# This file may be used under the terms of the GNU Affero # -# Public License version 3.0 as published by the Free Software Foundation # -# and appearing in the file LICENSE included in the packaging of this # -# file. Please review the following information to ensure the GNU Affero # -# Public License version 3.0 requirements will be met: # -# https://www.gnu.org/licenses/agpl-3.0.html. # -############################################################################ - -import sys -import io -from seiscomp.legacy.fseed import * -from seiscomp.legacy.db.seiscomp3 import sc3wrap -from seiscomp.legacy.db.seiscomp3.inventory import Inventory -import seiscomp.datamodel -import seiscomp.io - -ORGANIZATION = "EIDA" - - -def iterinv(obj): - return (j for i in obj.values() for j in i.values()) - - -def main(): - if len(sys.argv) < 1 or len(sys.argv) > 3: - print("Usage inv2dlsv [in_xml [out_dataless]]", file=sys.stderr) - return 1 - - if len(sys.argv) > 1: - inFile = sys.argv[1] - else: - inFile = "-" - - if len(sys.argv) > 2: - out = sys.argv[2] - else: - out = "" - - sc3wrap.dbQuery = None - - ar = seiscomp.io.XMLArchive() - if not ar.open(inFile): - raise IOError(inFile + ": unable to open") - - obj = ar.readObject() - if obj is None: - raise TypeError(inFile + ": invalid format") - - sc3inv = seiscomp.datamodel.Inventory.Cast(obj) - if sc3inv is None: - raise TypeError(inFile + ": invalid format") - - inv = Inventory(sc3inv) - inv.load_stations("*", "*", "*", "*") - inv.load_instruments() - - vol = SEEDVolume(inv, ORGANIZATION, "", resp_dict=False) - - for net in iterinv(inv.network): - for sta in iterinv(net.station): - for loc in iterinv(sta.sensorLocation): - for strm in iterinv(loc.stream): - try: - vol.add_chan( - net.code, - sta.code, - loc.code, - strm.code, - strm.start, - strm.end, - ) - - except SEEDError as exc: - print( - f"Error ({net.code},{sta.code},{loc.code},{strm.code}): {str(exc)}", - file=sys.stderr, - ) - - if not out or out == "-": - output = io.BytesIO() - vol.output(output) - stdout = sys.stdout.buffer if hasattr(sys.stdout, "buffer") else sys.stdout - stdout.write(output.getvalue()) - stdout.flush() - output.close() - else: - with open(sys.argv[2], "wb") as fd: - vol.output(fd) - - return 0 - - -if __name__ == "__main__": - try: - sys.exit(main()) - except Exception as e: - print(f"Error: {str(e)}", file=sys.stderr) - sys.exit(1) diff --git a/bin/invextr b/bin/invextr index 79143b4..9277e17 100755 Binary files a/bin/invextr and b/bin/invextr differ diff --git a/bin/load_timetable b/bin/load_timetable index 197b09a..c248211 100755 Binary files a/bin/load_timetable and b/bin/load_timetable differ diff --git a/bin/msrtsimul b/bin/msrtsimul index 10f9a87..b489a3e 100755 --- a/bin/msrtsimul +++ b/bin/msrtsimul @@ -107,19 +107,19 @@ def rt_simul(f, speed=1.0, jump=0.0, delaydict=None): # ------------------------------------------------------------------------------ def usage(): print( - """Usage: - msrtsimul [options] file + f"""Usage: + {os.path.basename(__file__)} [options] file miniSEED real-time playback and simulation -msrtsimul reads sorted (and possibly multiplexed) miniSEED files and writes -individual records in pseudo-real-time. This is useful e.g. for testing and +{os.path.basename(__file__)} reads sorted (and possibly multiplexed) miniSEED files and +writes individual records in pseudo-real-time. This is useful e.g. for testing and simulating data acquisition. Output is $SEISCOMP_ROOT/var/run/seedlink/mseedfifo unless --seedlink or -c is used. Verbosity: - -h, --help Display this help message - -v, --verbose Verbose mode + -h, --help Display this help message. + -v, --verbose Verbose mode. Playback: -j, --jump Minutes to skip (float). @@ -131,14 +131,15 @@ Playback: -m --mode Choose between 'realtime' and 'historic'. -s, --speed Speed factor (float). --test Test mode. - -u, --unlimited Allow miniSEED records which are not 512 bytes + -u, --unlimited Allow miniSEED records which are not 512 bytes. By default + seedlink supports 512 bytes only. Examples: Play back miniSEED waveforms in real time with verbose output - msrtsimul -v data.mseed + {os.path.basename(__file__)} -v data.mseed Play back miniSEED waveforms in real time skipping the first 1.5 minutes - msrtsimul -j 1.5 data.mseed + {os.path.basename(__file__)} -j 1.5 data.mseed """ ) @@ -170,7 +171,7 @@ def main(): "help", "mode=", "seedlink=", - "unlimited" + "unlimited", ], ) except GetoptError: @@ -278,7 +279,7 @@ Check if SeedLink is running and configured for real-time playback. time_diff = None print( - f"Starting msrtsimul at {datetime.datetime.utcnow()}", + f"Starting msrtsimul at {datetime.datetime.now(datetime.UTC)}", file=sys.stderr, ) for rec in inp: @@ -292,7 +293,7 @@ starting on {str(rec.begin_time)}: length != 512 Bytes.", if time_diff is None: ms = 1000000.0 * (rec.nsamp / rec.fsamp) time_diff = ( - datetime.datetime.utcnow() + datetime.datetime.now(datetime.UTC).replace(tzinfo=None) - rec.begin_time - datetime.timedelta(microseconds=ms) ) diff --git a/bin/optodas_inventory b/bin/optodas_inventory index c4e7d86..eb104d2 100755 --- a/bin/optodas_inventory +++ b/bin/optodas_inventory @@ -84,7 +84,7 @@ def main(): resp = seiscomp.datamodel.ResponsePAZ_Create() resp.setType("A") - resp.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"]) + resp.setGain(args.gain / header["dataScale"]) resp.setGainFrequency(0) resp.setNormalizationFactor(1) resp.setNormalizationFrequency(0) @@ -93,9 +93,9 @@ def main(): inv.add(resp) sensor = seiscomp.datamodel.Sensor_Create() - sensor.setName(header["instrument"]) - sensor.setDescription(header["instrument"]) - sensor.setUnit(header["sensitivities"][0]["unit"]) + sensor.setName(header["experiment"]) + sensor.setDescription(header["measurement"]) + sensor.setUnit(header["unit"]) sensor.setResponse(resp.publicID()) inv.add(sensor) @@ -131,8 +131,8 @@ def main(): cha = seiscomp.datamodel.Stream_Create() cha.setCode(args.channel) cha.setStart(net.start()) - cha.setGain(args.gain * header["sensitivities"][0]["factor"] / header["dataScale"]) - cha.setGainUnit(header["sensitivities"][0]["unit"]) + cha.setGain(args.gain / header["dataScale"]) + cha.setGainUnit(header["unit"]) cha.setGainFrequency(0) cha.setSensor(sensor.publicID()) cha.setDatalogger(datalogger.publicID()) diff --git a/bin/ql2sc b/bin/ql2sc index bca2949..235a1e5 100755 Binary files a/bin/ql2sc and b/bin/ql2sc differ diff --git a/bin/run_with_lock b/bin/run_with_lock index 0b73944..37a4b9a 100755 Binary files a/bin/run_with_lock and b/bin/run_with_lock differ diff --git a/bin/sc2pa b/bin/sc2pa index 26b0e86..9dc4c14 100755 --- a/bin/sc2pa +++ b/bin/sc2pa @@ -13,11 +13,14 @@ # https://www.gnu.org/licenses/agpl-3.0.html. # ############################################################################ -import time import sys import os -import time -import seiscomp.core, seiscomp.client, seiscomp.datamodel, seiscomp.logging + +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel +import seiscomp.logging + from seiscomp.scbulletin import Bulletin, stationCount @@ -40,8 +43,8 @@ class ProcAlert(seiscomp.client.Application): self.minPickCount = 25 self.procAlertScript = "" - - ep = seiscomp.datamodel.EventParameters() + self.bulletin = None + self.cache = None def createCommandLineDescription(self): try: @@ -61,7 +64,8 @@ class ProcAlert(seiscomp.client.Application): self.commandline().addStringOption( "Publishing", "procalert-script", - "Specify the script to publish an event. The ProcAlert file and the event id are passed as parameter $1 and $2", + "Specify the script to publish an event. The ProcAlert file and the " + "event id are passed as parameter $1 and $2", ) self.commandline().addOption( "Publishing", "test", "Test mode, no messages are sent" @@ -174,13 +178,14 @@ class ProcAlert(seiscomp.client.Application): return False def send_procalert(self, txt, evid): - if self.procAlertScript: - tmp = f"/tmp/yyy{evid.replace('/', '_').replace(':', '-')}" - f = file(tmp, "w") - f.write(f"{txt}") - f.close() + if not self.procAlertScript: + return - os.system(self.procAlertScript + " " + tmp + " " + evid) + file = f"/tmp/yyy{evid.replace('/', '_').replace(':', '-')}" + with open(file, "w", encoding="utf8") as f: + print(txt, file=f) + + os.system(self.procAlertScript + " " + file + " " + evid) def coordinates(self, org): return org.latitude().value(), org.longitude().value(), org.depth().value() @@ -194,7 +199,7 @@ class ProcAlert(seiscomp.client.Application): seiscomp.logging.error("suspicious region/depth - ignored") publish = False - if stationCount(org) < self.minPickCount: + if stationCount(org, 0.5) < self.minPickCount: seiscomp.logging.error("too few picks - ignored") publish = False diff --git a/bin/sc32inv b/bin/sc32inv deleted file mode 120000 index 7d624b2..0000000 --- a/bin/sc32inv +++ /dev/null @@ -1 +0,0 @@ -scml2inv \ No newline at end of file diff --git a/bin/scalert b/bin/scalert index c960e45..a249767 100755 --- a/bin/scalert +++ b/bin/scalert @@ -39,6 +39,8 @@ class ObjectAlert(seiscomp.client.Application): self.addMessagingSubscription("EVENT") self.addMessagingSubscription("LOCATION") self.addMessagingSubscription("MAGNITUDE") + self.addMessagingSubscription("AMPLITUDE") + self.addMessagingSubscription("PICK") self.setAutoApplyNotifierEnabled(True) self.setInterpretNotifierEnabled(True) @@ -76,50 +78,57 @@ class ObjectAlert(seiscomp.client.Application): self.commandline().addOption( "Generic", "first-new", - "calls an event a new event when it is seen the first time", + "Calls an event a new event when it is seen the first time.", ) self.commandline().addGroup("Alert") self.commandline().addStringOption( - "Alert", "amp-type", "amplitude type to listen to", self._ampType + "Alert", + "amp-type", + "Amplitude type to listen to.", + self._ampType, ) self.commandline().addStringOption( "Alert", "pick-script", - "script to be called when a pick arrived, network-, station code pick " - "publicID are passed as parameters $1, $2, $3 and $4", + "Script to be called when a pick arrived, network-, station code pick " + "publicID are passed as parameters $1, $2, $3 and $4.", ) self.commandline().addStringOption( "Alert", "amp-script", - "script to be called when a station amplitude arrived, network-, station " - "code, amplitude and amplitude publicID are passed as parameters $1, $2, $3 and $4", + "Script to be called when a station amplitude arrived, network-, station " + "code, amplitude and amplitude publicID are passed as parameters $1, $2, " + "$3 and $4.", ) self.commandline().addStringOption( "Alert", "alert-script", - "script to be called when a preliminary origin arrived, latitude and " - "longitude are passed as parameters $1 and $2", + "Script to be called when a preliminary origin arrived, latitude and " + "longitude are passed as parameters $1 and $2.", ) self.commandline().addStringOption( "Alert", "event-script", - "script to be called when an event has been declared; the message string, a " - "flag (1=new event, 0=update event), the EventID, the arrival count and the " - "magnitude (optional when set) are passed as parameter $1, $2, $3, $4 and $5", + "Script to be called when an event has been declared; the message string, " + "a flag (1=new event, 0=update event), the EventID, the arrival count and " + "the magnitude (optional when set) are passed as parameter $1, $2, $3, $4 " + "and $5.", ) self.commandline().addGroup("Cities") self.commandline().addStringOption( "Cities", "max-dist", - "maximum distance for using the distance from a city to the earthquake", + "Maximum distance for using the distance from a city to the earthquake.", + str(self._citiesMaxDist), ) self.commandline().addStringOption( "Cities", "min-population", - "minimum population for a city to become a point of interest", + "Minimum population for a city to become a point of interest.", + str(self._citiesMinPopulation), ) self.commandline().addGroup("Debug") - self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID") + self.commandline().addStringOption("Debug", "eventid,E", "Specify event ID.") return True def init(self): @@ -174,7 +183,7 @@ class ObjectAlert(seiscomp.client.Application): phaseStreams = self.configGetStrings("constraints.phaseStreams") for item in phaseStreams: rule = item.strip() - # rule is NET.STA.LOC.CHA and the special charactes ? * | ( ) are allowed + # allowned: NET.STA.LOC.CHA and the special charactes ? * | ( ) if not re.fullmatch(r"[A-Z|a-z|0-9|\?|\*|\||\(|\)|\.]+", rule): seiscomp.logging.error( f"Wrong stream ID format in `constraints.phaseStreams`: {item}" @@ -559,7 +568,8 @@ class ObjectAlert(seiscomp.client.Application): break if not matched: seiscomp.logging.debug( - f" + stream ID {waveformID} does not match constraints.phaseStreams rules" + f" + stream ID {waveformID} does not match " + "constraints.phaseStreams rules" ) return @@ -568,7 +578,8 @@ class ObjectAlert(seiscomp.client.Application): self.notifyPick(obj) else: seiscomp.logging.debug( - f" + phase hint {phaseHint} does not match '{self._phaseHints}'" + f" + phase hint {phaseHint} does not match " + f"'{self._phaseHints}'" ) else: seiscomp.logging.debug( @@ -739,6 +750,11 @@ class ObjectAlert(seiscomp.client.Application): seiscomp.logging.debug(f"desc: {dsc}") + try: + evType = seiscomp.datamodel.EEventTypeNames.name(evt.type()) + except Exception: + evType = "earthquake" + dep = org.depth().value() now = seiscomp.core.Time.GMT() otm = org.time().value() @@ -756,14 +772,10 @@ class ObjectAlert(seiscomp.client.Application): dt = f"{int(dt)} seconds ago" if preliminary: - message = f"earthquake, XXL, preliminary, {dt}, {dsc}" + message = f"{evType}, XXL, preliminary, {dt}, {dsc}" else: - message = "earthquake, %s, %s, %s, depth %d kilometers" % ( - dt, - dsc, - mag, - int(dep + 0.5), - ) + message = f"{evType}, {dt}, {dsc}, {mag}, depth {int(dep + 0.5)} kilometers" + seiscomp.logging.info(message) if not self._eventScript: diff --git a/bin/scamp b/bin/scamp index 32fb4d2..a1bc86c 100755 Binary files a/bin/scamp and b/bin/scamp differ diff --git a/bin/scanloc b/bin/scanloc index 4fbdeca..7ab4278 100755 Binary files a/bin/scanloc and b/bin/scanloc differ diff --git a/bin/scardac b/bin/scardac index 3423f36..6b9a7a9 100755 Binary files a/bin/scardac and b/bin/scardac differ diff --git a/bin/scart b/bin/scart index cfacf82..2e06eba 100755 --- a/bin/scart +++ b/bin/scart @@ -300,10 +300,15 @@ class StreamIterator: self.file = workdir + file # print "Starting at file %s" % self.file - self.record, self.index = ar.findIndex(begin, end, self.file) - if self.record: - self.current = self.record.startTime() - self.currentEnd = self.record.endTime() + while begin < end: + self.record, self.index = ar.findIndex(begin, end, self.file) + if self.record: + self.current = self.record.startTime() + self.currentEnd = self.record.endTime() + break + begin = self.archive.stepTime(begin) + workdir, file = ar.location(begin, net, sta, loc, cha) + self.file = workdir + file def __next__(self): while True: @@ -458,7 +463,8 @@ class RecordRenamer: def printRules(self): for r in self.renameRules: print( - f"Renaming {(r.pattern.pattern if r.pattern is not None else '*.*.*.*')} " + "Renaming " + f"{(r.pattern.pattern if r.pattern is not None else '*.*.*.*')} " f"to {r.newNet}.{r.newSta}.{r.newLoc}.{r.newCha}", file=sys.stderr, ) @@ -805,10 +811,9 @@ Usage: {os.path.basename(__file__)} -d [options] [archive] {os.path.basename(__file__)} --check [options] [archive] -Import miniSEED waveforms or dump records from an SDS structure, sort them, -modify the time and replay them. Also check files and archives. -For Import and Dump mode the data streams can be selected in three ways -using the combinations of options: -n -c -t or --nslc -t or --list +Import or export miniSEED waveforms into/from an SDS structure. Also check files and +archives. Data streams can be selected in three ways using the combinations of options: +-n -c -t or --nslc -t or --list. Verbosity: -h, --help Display this help message. @@ -843,7 +848,7 @@ Processing: 2007-03-28 15:48;2007-03-28 16:18;GE.LAST.*.* 2007-03-28 15:48;2007-03-28 16:18;GE.PMBI..BH? -m, --modify Dump mode: Modify the record time for real time playback - when dumping. + when dumping. Implicitly sets the speed parameter to 1. -n arg Import, dump mode: Data stream selection as a comma separated list "stream1,stream2,streamX" where each stream can be NET or NET.STA or NET.STA.LOC or NET.STA.LOC.CHA. If CHA is omitted, @@ -858,16 +863,18 @@ Processing: A rule is "[match-stream:]rename-stream" and match-stream is optional. match-stream and rename-stream are in the "NET.STA.LOC.CHA" format. match-stream supports special - charactes "?" "*" "|" "(" ")". rename-stream supports the + characters "?" "*" "|" "(" ")". rename-stream supports the special character "-" that can be used in place of NET, STA, LOC, CHA codes with the meaning of not renaming those. "-" can also be used as the last character in CHA code. Multiple rules can be provided as a comma separated list or by providing multiple --rename options. -s, --sort Dump mode: Sort records. - --speed arg Dump mode: Specify the speed to dump the records. A value - of 0 means no delay. Otherwise speed is a multiplier of - the real time difference between the records. + --speed arg Dump mode: Specify the speed to dump the records as a + multiplier of the real time difference between the records. + A value > 1 will speed up the playback while a value > 0 + and < 1 will slow the playback down. This option implies + sorting of the records. -t, --time-window t1~t2 Import, dump mode: UTC time window filter to be applied to the data streams. Format: "StartTime~EndTime". Example: @@ -886,7 +893,7 @@ Output: --print-streams. --with-filecheck Import mode: Check all accessed files after import. Unsorted or unreadable files are reported to stderr. Checks are only - complete for files containing exactly one stream. More + complete for files containing exactly one stream. More complete checks are made with scmssort. --with-filename Import mode: Print all accessed files to sterr after import. @@ -901,11 +908,16 @@ Import miniSEED data into a SDS archive, check all modified files for errors {os.path.basename(__file__)} -I file.mseed --with-filecheck $SEISCOMP_ROOT/var/lib/archive Import miniSEED data from FDSNWS into a SDS archive for specific time range and streams - {os.path.basename(__file__)} -I fdsnws://geofon.gfz-potsdam.de \ + {os.path.basename(__file__)} -I fdsnws://geofon.gfz.de \ -t 2022-03-28T15:48~2022-03-28T16:18 --nslc list.file $SEISCOMP_ROOT/var/lib/archive Check an archive for files with out-of-order records {os.path.basename(__file__)} --check /archive + +Play back miniSEED data from archive at normal speed as in real time and pipe \ +them into another application, here scrttv + + {os.path.basename(__file__)} -dmv -t 2026-05-01~2026-05-02 /archive | scrttv -I - --offline --no-inventory """ @@ -964,7 +976,7 @@ def main(): # default = stdin recordURL = "file://-" - speed = 0 + speed = None stdout = False outputFile = None ignoreRecords = False @@ -1038,7 +1050,23 @@ def main(): else: usage(exitcode=1) - if not dump and not checkSDS and not importMode: + if dump: + if modifyTime and speed is None: + speed = 1 + sort = True + elif speed is not None: + if speed <= 0: + print("'--speed' must be greater than 0", file=sys.stderr) + return -1 + + sort = True + if modifyTime and speed != 1: + print( + "Modify time requested with '--speed' value other than 1. Gaps " + "or overlaps will be created.", + file=sys.stderr, + ) + elif not checkSDS and not importMode: importMode = True if files: @@ -1116,18 +1144,21 @@ def main(): print(f"Stream file: '{nslcFile}'", file=sys.stderr) if dump: - if not sort and not modifyTime: - print("Mode: DUMP", file=sys.stderr) - elif sort and not modifyTime: - print("Mode: DUMP & SORT", file=sys.stderr) - elif not sort and modifyTime: - print("Mode: DUMP & MODIFY_TIME", file=sys.stderr) - elif sort and modifyTime: - print("Mode: DUMP & SORT & MODIFY_TIME", file=sys.stderr) + flags = [] + if speed: + flags.append(f"speed={speed}") + if sort: + flags.append("sort") + if modifyTime: + flags.append("modify time") + flagStr = "" + if flags: + flagStr = f" ({', '.join(flags)})" + print(f"Mode: DUMP{flagStr}", file=sys.stderr) print(f"Archive: {archiveDirectory}", file=sys.stderr) if checkSDS: - print("Mode: Check", file=sys.stderr) + print("Mode: CHECK", file=sys.stderr) if importMode: print("Mode: IMPORT", file=sys.stderr) @@ -1157,7 +1188,7 @@ def main(): else: out = sys.stdout.buffer - # list file witht times takes priority over nslc list + # list file with times takes priority over nslc list if listFile: nslcFile = None @@ -1174,7 +1205,8 @@ def main(): for stream in streamFilter: if stream.tmin >= stream.tmax: print( - f"Info: ignoring {stream.net}.{stream.sta}.{stream.loc}.{stream.cha} - " + "Info: " + f"ignoring {stream.net}.{stream.sta}.{stream.loc}.{stream.cha} - " f"start {stream.tmin} after end {stream.tmax}", file=sys.stderr, ) @@ -1228,8 +1260,9 @@ def main(): f"{stream.cha} {stream.tmin} - {stream.tmax}", file=sys.stderr, ) - stime = None - realTime = seiscomp.core.Time.GMT() + + firstRecordEndTime = None + startTime = seiscomp.core.Time.UTC() if sort: records = Sorter(archiveIterator) @@ -1245,36 +1278,34 @@ def main(): if ignoreRecords: continue - etime = seiscomp.core.Time(rec.endTime()) + etime = rec.endTime() - if stime is None: - stime = etime + if not firstRecordEndTime: + firstRecordEndTime = seiscomp.core.Time(etime) if verbose: - print(f"First record: {stime.iso()}", file=sys.stderr) + print( + f"First record end time: {firstRecordEndTime.iso()}", + file=sys.stderr, + ) - dt = etime - stime + if speed: + dt = (etime - firstRecordEndTime).length() + playTime = startTime + seiscomp.core.TimeSpan(dt / speed) - now = seiscomp.core.Time.GMT() + if modifyTime: + recLength = etime - rec.startTime() + rec.setStartTime(seiscomp.core.Time(playTime) - recLength) - if speed > 0: - playTime = (realTime + dt).toDouble() / speed - else: - playTime = now.toDouble() - - sleepTime = playTime - now.toDouble() - if sleepTime > 0: - time.sleep(sleepTime) - - if modifyTime: - recLength = etime - rec.startTime() - rec.setStartTime(seiscomp.core.Time(playTime) - recLength) + sleepSeconds = (playTime - seiscomp.core.Time.UTC()).length() + if sleepSeconds > 0: + time.sleep(sleepSeconds) if verbose: - etime = rec.endTime() print( - f"{rec.streamID()} time current: " - f"{seiscomp.core.Time.LocalTime().iso()} start: " - f"{rec.startTime().iso()} end: {etime.iso()}", + f"{rec.streamID()} " + f"current time: {seiscomp.core.Time.LocalTime().iso()}" + f", rec start: {rec.startTime().iso()}" + f", rec end: {rec.startTime().iso()}", file=sys.stderr, ) @@ -1529,7 +1560,8 @@ def main(): f = open(archiveDirectory + file, "ab") except BaseException: print( - f"File {archiveDirectory + file} could not be opened for writing", + f"File {archiveDirectory + file} could not be opened " + f"for writing", file=sys.stderr, ) return -1 @@ -1605,8 +1637,8 @@ def main(): print(fileName, file=sys.stderr) if printStreams and streamDict: - minTime = seiscomp.core.Time.GMT() - maxTime = str2time("1970-01-01 00:00:00") + minTime = None + maxTime = None totalRecs = 0 totalSamples = 0 totalChans = set() @@ -1624,8 +1656,12 @@ def main(): file=sys.stderr, ) - maxTime = max(maxTime, str2time(end)) - minTime = min(minTime, str2time(start)) + if minTime: + minTime = min(minTime, str2time(start)) + maxTime = max(maxTime, str2time(end)) + else: + minTime = str2time(start) + maxTime = str2time(end) totalChans.add(key) totalNetworks.add(key.split(".")[0]) @@ -1637,28 +1673,17 @@ def main(): "# Summary", file=sys.stderr, ) + if minTime and maxTime: + print( + f"# time range: {minTime.iso()} - {maxTime.iso()}", + file=sys.stderr, + ) print( - f"# time range: {minTime.iso()} - {maxTime.iso()}", - file=sys.stderr, - ) - print( - f"# networks: {len(totalNetworks)}", - file=sys.stderr, - ) - print( - f"# stations: {len(totalStations)}", - file=sys.stderr, - ) - print( - f"# streams: {len(totalChans)}", - file=sys.stderr, - ) - print( - f"# records: {totalRecs}", - file=sys.stderr, - ) - print( - f"# samples: {totalSamples}", + f""""# networks: {len(totalNetworks)} +# stations: {len(totalStations)} +# streams: {len(totalChans)} +# records: {totalRecs} +# samples: {totalSamples}""", file=sys.stderr, ) diff --git a/bin/scautoloc b/bin/scautoloc index 319096c..4ac2336 100755 Binary files a/bin/scautoloc and b/bin/scautoloc differ diff --git a/bin/scautopick b/bin/scautopick index 76ac994..cccb014 100755 Binary files a/bin/scautopick and b/bin/scautopick differ diff --git a/bin/scchkcfg b/bin/scchkcfg index e39053e..2683dc9 100755 Binary files a/bin/scchkcfg and b/bin/scchkcfg differ diff --git a/bin/sccnv b/bin/sccnv index 296eec9..396e79a 100755 Binary files a/bin/sccnv and b/bin/sccnv differ diff --git a/bin/scconfig b/bin/scconfig index 01ccb76..5cd59bf 100755 Binary files a/bin/scconfig and b/bin/scconfig differ diff --git a/bin/scdb b/bin/scdb index c5c95d3..7e3c2de 100755 Binary files a/bin/scdb and b/bin/scdb differ diff --git a/bin/scdbstrip b/bin/scdbstrip index c6add30..cf2beb8 100755 --- a/bin/scdbstrip +++ b/bin/scdbstrip @@ -81,25 +81,20 @@ class MySQLDB(QueryInterface): return tmp_tables def deleteObjectQuery(self, *v): - if v[0]: - q = ( - "delete " - + v[0] - + " from " - + ", ".join(v) - + " where " - + v[0] - + "._oid=" - + v[1] - + "._oid and " - ) - else: - q = "delete " + v[1] + " from " + ", ".join(v[1:]) + " where " + q = ( + "delete " + + v[0] + + " from " + + ", ".join(v) + + " where " + + v[0] + + "._oid=" + + v[1] + + "._oid" + ) for i in range(1, len(v) - 1): - if i > 1: - q += " and " - q += v[i] + "._oid=" + v[i + 1] + "._oid" + q += " and " + v[i] + "._oid=" + v[i + 1] + "._oid" return q @@ -211,25 +206,20 @@ class PostgresDB(QueryInterface): return tmp_tables def deleteObjectQuery(self, *v): - if v[0]: - q = ( - "delete from " - + v[0] - + " using " - + ", ".join(v[1:]) - + " where " - + v[0] - + "._oid=" - + v[1] - + "._oid and " - ) - else: - q = "delete from " + v[1] + " using " + ", ".join(v[2:]) + " where " + q = ( + "delete from " + + v[0] + + " using " + + ", ".join(v[1:]) + + " where " + + v[0] + + "._oid=" + + v[1] + + "._oid" + ) for i in range(1, len(v) - 1): - if i > 1: - q += " and " - q += v[i] + "._oid=" + v[i + 1] + "._oid" + q += " and " + v[i] + "._oid=" + v[i + 1] + "._oid" return q @@ -333,6 +323,8 @@ class DBCleaner(seiscomp.client.Application): self._invertMode = False self._stripEP = True self._stripQC = True + self._keepModes = [] # Array with modes to keep + self._keepStatus = [] # Array with status to keep self._steps = 0 self._currentStep = 0 @@ -368,6 +360,18 @@ class DBCleaner(seiscomp.client.Application): "Event-IDs to keep in the database. Combining with 'qc-only' " "is invalid.", ) + self.commandline().addStringOption( + "Objects", + "keep-event-modes", + "Keep all events where is evaluation mode of the preferred origin is " + "one of the given modes." + ) + self.commandline().addStringOption( + "Objects", + "keep-event-status", + "Keep all events where is evaluation status of the preferred origin is " + "one of the given status." + ) self.commandline().addOption( "Objects", "qc-only,Q", @@ -473,7 +477,7 @@ class DBCleaner(seiscomp.client.Application): f"""Usage: {os.path.basename(__file__)} [options] -Remove event and waveform quality parameters from the database in a timespan. Use +Remove event and waveform quality parameters from the database in a timespan. Use scardac for removing data availability parameters.""" ) @@ -543,6 +547,19 @@ Remove all waveform QC paramters older than 30 days but do not effect event para except RuntimeError: pass + try: + status = self.commandline().optionString("keep-event-status") + self._keepStatus = [s.strip() for s in status.split(",")] + print(status, self._keepStatus) + except RuntimeError: + pass + + try: + modes = self.commandline().optionString("keep-event-modes") + self._keepModes = [m.strip() for m in modes.split(",")] + except RuntimeError: + pass + try: dateTime = self.commandline().optionString("datetime") except RuntimeError: @@ -694,6 +711,11 @@ Remove all waveform QC paramters older than 30 days but do not effect event para self.beginMessage("Search objects") if not self.runCommand(tmp_object): return False + + tmp_object = "create index idx_oid on tmp_object(_oid)" + if not self.runCommand(tmp_object): + return False + self.endMessage(self.globalCount("tmp_object")) for table in tables: @@ -783,6 +805,7 @@ Remove all waveform QC paramters older than 30 days but do not effect event para self.beginMessage("Deleting waveform quality parameters") if not self.runCommand( self._query.deleteObjectQuery("Object", "WaveformQuality") + + " and " + timeRangeSelection(f"WaveformQuality.{self.cnvCol('end')}") ): return False @@ -822,9 +845,28 @@ Remove all waveform QC paramters older than 30 days but do not effect event para + " not in ('%s')" % "','".join(self._keepEvents) ) + if len(self._keepModes) > 0: + old_events += ( + " and Origin." + + self.cnvCol("evaluationMode") + + " not in ('%s')" % "','".join(self._keepModes) + ) + + if len(self._keepStatus) > 0: + old_events += ( + " and Origin." + + self.cnvCol("evaluationStatus") + + " not in ('%s')" % "','".join(self._keepStatus) + ) + self.beginMessage("Find old events") if not self.runCommand(old_events): return False + + old_events = "create index idx_oid on old_events(_oid)" + if not self.runCommand(old_events): + return False + self.endMessage(self.globalCount("old_events")) # Delete OriginReferences of old events @@ -879,6 +921,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para self.beginMessage("Find unassociated focal mechanisms") + if not self.runCommand(tmp_fm): + return False + + tmp_fm = "create index idx_oid on tmp_fm(_oid)" if not self.runCommand(tmp_fm): return False @@ -990,6 +1036,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para self.beginMessage("Find unassociated origins") + if not self.runCommand(tmp_origin): + return False + + tmp_origin = "create index idx_oid on tmp_origin(_oid)" if not self.runCommand(tmp_origin): return False @@ -998,7 +1048,7 @@ Remove all waveform QC paramters older than 30 days but do not effect event para update tmp_origin set used=1 \ where (" + self.cnvCol("publicID") - + " in (select distinct " + + " in (select " + self.cnvCol("originID") + " from OriginReference)) \ or (" @@ -1093,6 +1143,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para ) ) + if not self.runCommand(tmp_pick): + return False + + tmp_pick = "create index idx_oid on tmp_pick(_oid)" if not self.runCommand(tmp_pick): return False @@ -1145,6 +1199,10 @@ Remove all waveform QC paramters older than 30 days but do not effect event para ) ) + if not self.runCommand(tmp_amp): + return False + + tmp_amp = "create index idx_oid on tmp_amp(_oid)" if not self.runCommand(tmp_amp): return False @@ -1287,27 +1345,27 @@ Remove all waveform QC paramters older than 30 days but do not effect event para self._query.deleteJournalQuery("PublicObject", *v) + " and used=0" ) self.runCommand( - self._query.deleteObjectQuery(None, "Object", *v) + " and used=0" + self._query.deleteObjectQuery("Object", *v) + " and used=0" ) self.runCommand( - self._query.deleteObjectQuery(None, "PublicObject", *v) + " and used=0" + self._query.deleteObjectQuery("PublicObject", *v) + " and used=0" ) def deleteObjects(self, *v): self.runCommand(self._query.deleteJournalQuery("PublicObject", *v)) - self.runCommand(self._query.deleteObjectQuery("Object", *v)) - self.runCommand(self._query.deleteObjectQuery("PublicObject", *v)) - self.runCommand(self._query.deleteObjectQuery(None, *v)) + self.runCommand(self._query.deleteObjectQuery(*v)) + self.runCommand(self._query.deleteObjectQuery("PublicObject", *v[1:])) + self.runCommand(self._query.deleteObjectQuery("Object", *v[1:])) def deleteUnusedObjects(self, *v): self.runCommand( self._query.deleteJournalQuery("PublicObject", *v) + " and used=0" ) - self.runCommand(self._query.deleteObjectQuery("Object", *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery(*v) + " and used=0") self.runCommand( - self._query.deleteObjectQuery("PublicObject", *v) + " and used=0" + self._query.deleteObjectQuery("PublicObject", *v[1:]) + " and used=0" ) - self.runCommand(self._query.deleteObjectQuery(None, *v) + " and used=0") + self.runCommand(self._query.deleteObjectQuery("Object", *v[1:]) + " and used=0") def delete(self, message, func, *v): self.beginMessage(message) diff --git a/bin/scdispatch b/bin/scdispatch index d96da46..77b1347 100755 Binary files a/bin/scdispatch and b/bin/scdispatch differ diff --git a/bin/scdumpobject b/bin/scdumpobject deleted file mode 100755 index 22c3797..0000000 --- a/bin/scdumpobject +++ /dev/null @@ -1,84 +0,0 @@ -#!/usr/bin/env seiscomp-python -# -*- coding: utf-8 -*- -############################################################################ -# Copyright (C) GFZ Potsdam # -# All rights reserved. # -# # -# GNU Affero General Public License Usage # -# This file may be used under the terms of the GNU Affero # -# Public License version 3.0 as published by the Free Software Foundation # -# and appearing in the file LICENSE included in the packaging of this # -# file. Please review the following information to ensure the GNU Affero # -# Public License version 3.0 requirements will be met: # -# https://www.gnu.org/licenses/agpl-3.0.html. # -############################################################################ - -import sys -import seiscomp.client, seiscomp.datamodel, seiscomp.io - - -class ObjectDumper(seiscomp.client.Application): - - def __init__(self): - seiscomp.client.Application.__init__(self, len(sys.argv), sys.argv) - self.setMessagingEnabled(True) - self.setDatabaseEnabled(True, False) - self.setMessagingUsername("") - - def createCommandLineDescription(self): - seiscomp.client.Application.createCommandLineDescription(self) - self.commandline().addGroup("Dump") - self.commandline().addStringOption("Dump", "public-id,P", "publicID") - - def loadEventParametersObject(self, publicID): - for tp in ( - seiscomp.datamodel.Pick, - seiscomp.datamodel.Amplitude, - seiscomp.datamodel.Origin, - seiscomp.datamodel.Event, - seiscomp.datamodel.FocalMechanism, - seiscomp.datamodel.Magnitude, - seiscomp.datamodel.StationMagnitude, - ): - - obj = self.query().loadObject(tp.TypeInfo(), publicID) - obj = tp.Cast(obj) - if obj: - ep = seiscomp.datamodel.EventParameters() - ep.add(obj) - return ep - - def loadInventoryObject(self, publicID): - for tp in ( - seiscomp.datamodel.Network, - seiscomp.datamodel.Station, - seiscomp.datamodel.Sensor, - seiscomp.datamodel.SensorLocation, - seiscomp.datamodel.Stream, - ): - - obj = self.query().loadObject(tp.TypeInfo(), publicID) - obj = tp.Cast(obj) - if obj: - return obj - - def run(self): - publicID = self.commandline().optionString("public-id") - obj = self.loadEventParametersObject(publicID) - if obj is None: - obj = self.loadInventoryObject(publicID) - if obj is None: - raise ValueError("unknown object '" + publicID + "'") - - # dump formatted XML archive to stdout - ar = seiscomp.io.XMLArchive() - ar.setFormattedOutput(True) - ar.create("-") - ar.writeObject(obj) - ar.close() - return True - - -if __name__ == "__main__": - app = ObjectDumper() - app() diff --git a/bin/scesv b/bin/scesv index c312469..ed02909 100755 Binary files a/bin/scesv and b/bin/scesv differ diff --git a/bin/scevent b/bin/scevent index f27703a..ce7b7ab 100755 Binary files a/bin/scevent and b/bin/scevent differ diff --git a/bin/scevtls b/bin/scevtls index f605f12..b9c0d5c 100755 --- a/bin/scevtls +++ b/bin/scevtls @@ -53,7 +53,7 @@ def readXML(self): if self._eventType: try: - eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type()) + eventType = seiscomp.datamodel.EEventTypeNames.name(evt.type()) if eventType != self._eventType: continue except ValueError: @@ -108,10 +108,16 @@ class EventList(seiscomp.client.Application): ) self.commandline().addGroup("Events") self.commandline().addStringOption( - "Events", "begin", "Specify the lower bound of the time interval." + "Events", + "begin", + "Specify the lower bound of the time interval. Uses 1900-01-01T00:00:00 " + "unless given.", ) self.commandline().addStringOption( - "Events", "end", "Specify the upper bound of the time interval." + "Events", + "end", + "Specify the upper bound of the time interval Uses 2500-01-01T00:00:00 " + "unless given.", ) self.commandline().addStringOption( "Events", @@ -266,7 +272,7 @@ List event IDs available in a given time range and print to stdout.""" f"""Examples: Print all event IDs from year 2022 and thereafter {os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \ ---begin "2022-01-01 00:00:00" +--begin 2022-01-01T00:00:00 Print all event IDs with event type 'quarry blast' {os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp --event-type 'quarry blast' @@ -303,7 +309,7 @@ Print IDs of all events in XML file if self._eventType: try: - eventType = seiscomp.datamodel.EEventTypeNames_name(evt.type()) + eventType = seiscomp.datamodel.EEventTypeNames.name(evt.type()) if eventType != self._eventType: continue except ValueError: diff --git a/bin/scevtstreams b/bin/scevtstreams index 479b3bc..e307e10 100755 --- a/bin/scevtstreams +++ b/bin/scevtstreams @@ -295,7 +295,7 @@ class EventStreams(client.Application): """Usage: scevtstreams [options] -Extract stream information and time windows from an event""" +Extract stream information and time windows from picks of an event or solitary picks.""" ) client.Application.printUsage(self) @@ -305,8 +305,8 @@ Extract stream information and time windows from an event""" Get the time windows for an event in the database: scevtstreams -E gfz2012abcd -d mysql://sysop:sysop@localhost/seiscomp -Create lists compatible with fdsnws: - scevtstreams -E gfz2012abcd -i event.xml -m 120,500 --fdsnws +Get the time windows for all picks given in an XML file without origins and events: + scevtstreams -i picks.xml -m 120,500 """ ) @@ -314,10 +314,14 @@ Create lists compatible with fdsnws: resolveWildcards = self.commandline().hasOption("resolve-wildcards") picks = [] - # read picks from input file if self.inputFile: - picks = self.readXML() + try: + picks = self.readXML() + except IOError as e: + print(f"Error: {e}", file=sys.stderr) + return False + if not picks: raise ValueError("Could not find picks in input file") @@ -327,6 +331,7 @@ Create lists compatible with fdsnws: pick = datamodel.Pick.Cast(obj) if pick is None: continue + picks.append(pick) if not picks: @@ -502,11 +507,18 @@ Create lists compatible with fdsnws: ep = datamodel.EventParameters.Cast(obj) if ep is None: - raise ValueError("no event parameters found in input file") + # pick may be provided as base object, only one can be read + pick = datamodel.Pick.Cast(obj) + if pick is None: + raise ValueError( + "Neither event parameters nor pick found in input file" + ) + else: + return [pick] # we require at least one origin which references to picks via arrivals - if ep.originCount() == 0: - raise ValueError("no origin found in input file") + if ep.originCount() == 0 and ep.pickCount() == 0: + raise ValueError("No origin found in input file") originIDs = [] @@ -524,7 +536,7 @@ Create lists compatible with fdsnws: # use first event/origin if no id was specified else: # no event, use first available origin - if ep.eventCount() == 0: + if ep.eventCount() == 0 and ep.originCount() > 0: if ep.originCount() > 1: print( "WARNING: Input file contains no event but more than " @@ -534,7 +546,7 @@ Create lists compatible with fdsnws: originIDs.append(ep.origin(0).publicID()) # use origin references of first available event - else: + elif ep.eventCount() > 0 and ep.originCount() > 0: if ep.eventCount() > 1: print( "WARNING: Input file contains more than 1 event. " @@ -546,10 +558,18 @@ Create lists compatible with fdsnws: ev.originReference(i).originID() for i in range(ev.originReferenceCount()) ] + else: + print("Found no origins, trying to continue with picks only.") + + if originIDs: + print( + f"Considering all arrivals from {len(originIDs)} origin(s).", + file=sys.stderr, + ) - # collect pickIDs pickIDs = set() for oID in originIDs: + # collect pickIDs from origins o = datamodel.Origin.Find(oID) if o is None: continue @@ -557,6 +577,11 @@ Create lists compatible with fdsnws: for i in range(o.arrivalCount()): pickIDs.add(o.arrival(i).pickID()) + if len(pickIDs) == 0: + # try reading picks only + for i in range(ep.pickCount()): + pickIDs.add(ep.pick(i).publicID()) + # lookup picks picks = [] for pickID in pickIDs: @@ -564,6 +589,9 @@ Create lists compatible with fdsnws: if pick: picks.append(pick) + if len(pickIDs) == 0: + print("Found no picks.", file=sys.stderr) + return picks diff --git a/bin/scheli b/bin/scheli index 3c243cb..5cc20c4 100755 Binary files a/bin/scheli and b/bin/scheli differ diff --git a/bin/scimex b/bin/scimex index daafe4e..e003c0c 100755 Binary files a/bin/scimex and b/bin/scimex differ diff --git a/bin/scimport b/bin/scimport index 0cc809b..292a954 100755 Binary files a/bin/scimport and b/bin/scimport differ diff --git a/bin/scinv b/bin/scinv index 42d8cca..883fb1b 100755 Binary files a/bin/scinv and b/bin/scinv differ diff --git a/bin/scm b/bin/scm index 920c6dd..8f01b28 100755 Binary files a/bin/scm and b/bin/scm differ diff --git a/bin/scmag b/bin/scmag index 687eea5..d12b914 100755 Binary files a/bin/scmag and b/bin/scmag differ diff --git a/bin/scmapcut b/bin/scmapcut index 6c35282..ef45fbd 100755 Binary files a/bin/scmapcut and b/bin/scmapcut differ diff --git a/bin/scmaster b/bin/scmaster index a5ea71a..f2b416f 100755 Binary files a/bin/scmaster and b/bin/scmaster differ diff --git a/bin/scmm b/bin/scmm index d7b74eb..71d5861 100755 Binary files a/bin/scmm and b/bin/scmm differ diff --git a/bin/scmsdemux b/bin/scmsdemux new file mode 100755 index 0000000..eae5815 --- /dev/null +++ b/bin/scmsdemux @@ -0,0 +1,144 @@ +#!/usr/bin/env seiscomp-python + +############################################################################ +# Copyright (C) gempa GmbH # +# All rights reserved. # +# Contact: gempa GmbH (seiscomp-dev@gempa.de) # +# # +# GNU Affero General Public License Usage # +# This file may be used under the terms of the GNU Affero # +# Public License version 3.0 as published by the Free Software Foundation # +# and appearing in the file LICENSE included in the packaging of this # +# file. Please review the following information to ensure the GNU Affero # +# Public License version 3.0 requirements will be met: # +# https://www.gnu.org/licenses/agpl-3.0.html. # +# # +# Other Usage # +# Alternatively, this file may be used in accordance with the terms and # +# conditions contained in a signed written agreement between you and # +# gempa GmbH. # +############################################################################ + +import os +import sys + +from getopt import gnu_getopt, GetoptError +from seiscomp import mseedlite as mseed + + +def usage(): + print( + f"""Usage: + {os.path.basename(__file__)} source + +Demultiplex all miniSEED records found in the given source by stream code writing them +into separate new files. The source can be files or stdin. One file per stream is +generated. File names are derived from stream codes and the begin time of the records. + +Verbosity: + -h, --help Display this help message. + -v, --verbose Verbose mode. + +Examples: +Demultiplex the miniSEED records contained in data.mseed and additionally print the +names of created files to stderr + {os.path.basename(__file__)} -v data.mseed + +Demultiplex the miniSEED records received from stdin + scmssort -u -E data.mseed | {os.path.basename(__file__)} - +""" + ) + + +def main(): + try: + opts, args = gnu_getopt( + sys.argv[1:], + "hv", + [ + "help", + "verbose", + ], + ) + except GetoptError: + print( + f"{os.path.basename(__file__)}: Unknown option", + file=sys.stderr, + ) + usage() + return False + + verbosity = False + for flag, arg in opts: + if flag in ("-h", "--help"): + usage() + return True + + if flag in ("-v", "--verbose"): + verbosity = True + + inFile = sys.stdin.buffer + try: + if len(args[0]) > 0: + openFiles = {} + except Exception: + print( + f"{os.path.basename(__file__)}: Missing source", + file=sys.stderr, + ) + usage() + sys.exit(1) + + if len(args) == 1: + if args[0] != "-": + try: + inFile = open(args[0], "rb") + except IOError as e: + print( + f"Could not open input file '{args[0]}' for reading: {e}", + file=sys.stderr, + ) + return False + else: + print( + "Waiting for miniSEED records on stdin. Use Ctrl + C to interrupt.", + file=sys.stderr, + ) + elif len(args) != 0: + usage() + sys.exit(1) + + try: + for rec in mseed.Input(inFile): + oName = "%s.%s.%s.%s" % (rec.sta, rec.net, rec.loc, rec.cha) + + if oName not in openFiles: + postfix = ".D.%04d.%03d.%02d%02d" % ( + rec.begin_time.year, + rec.begin_time.timetuple()[7], + rec.begin_time.hour, + rec.begin_time.minute, + ) + + openFiles[oName] = open(oName + postfix, "ab") + + oFile = openFiles[oName] + oFile.write(rec.header + rec.data) + + if verbosity: + print("Generated output files:", file=sys.stderr) + + for oName in openFiles: + if verbosity: + print(f" {oName}", file=sys.stderr) + + openFiles[oName].close() + + except KeyboardInterrupt: + return True + + return True + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/bin/scmv b/bin/scmv index bb909ec..12ad32b 100755 Binary files a/bin/scmv and b/bin/scmv differ diff --git a/bin/scmvx b/bin/scmvx new file mode 100755 index 0000000..3f4d539 Binary files /dev/null and b/bin/scmvx differ diff --git a/bin/scolv b/bin/scolv index c337dd9..a0b5a60 100755 Binary files a/bin/scolv and b/bin/scolv differ diff --git a/bin/scorg2nll b/bin/scorg2nll index a8508b9..cd59c22 100755 Binary files a/bin/scorg2nll and b/bin/scorg2nll differ diff --git a/bin/scorgls b/bin/scorgls index c3936cb..ffa59d9 100755 --- a/bin/scorgls +++ b/bin/scorgls @@ -89,12 +89,14 @@ class OriginList(seiscomp.client.Application): self.commandline().addStringOption( "Origins", "begin", - "The lower bound of the time interval. Format: '1970-01-01 00:00:00'.", + "The lower bound of the time interval. Uses 1900-01-01T00:00:00 unless " + "given.", ) self.commandline().addStringOption( "Origins", "end", - "The upper bound of the time interval. Format: '1970-01-01 00:00:00'.", + "The upper bound of the time interval. Format: 1970-01-01T00:00:00. Uses " + "2500-01-01T00:00:00 unless given.", ) self.commandline().addStringOption( "Origins", "author", "The author of the origins." @@ -179,7 +181,7 @@ List origin IDs available in a given time range and print to stdout.""" f"""Examples: Print all origin IDs from year 2022 and thereafter {os.path.basename(__file__)} -d mysql://sysop:sysop@localhost/seiscomp \ ---begin "2022-01-01 00:00:00" +--begin 2022-01-01T00:00:00 Print IDs of all events in XML file {os.path.basename(__file__)} -i origins.xml diff --git a/bin/scplot b/bin/scplot index 4b3b506..ca8e25e 100755 Binary files a/bin/scplot and b/bin/scplot differ diff --git a/bin/scproclat b/bin/scproclat index 345066d..88558be 100755 --- a/bin/scproclat +++ b/bin/scproclat @@ -13,19 +13,25 @@ # https://www.gnu.org/licenses/agpl-3.0.html. # ############################################################################ -import time, sys, os, traceback -import seiscomp.core, seiscomp.client, seiscomp.datamodel -import seiscomp.logging, seiscomp.system +import os +import sys +import traceback + +import seiscomp.core +import seiscomp.client +import seiscomp.datamodel +import seiscomp.logging +import seiscomp.system -def createDirectory(dir): - if os.access(dir, os.W_OK): +def createDirectory(directory): + if os.access(directory, os.W_OK): return True try: - os.makedirs(dir) + os.makedirs(directory) return True - except: + except OSError: return False @@ -46,8 +52,8 @@ def timeSpanToString(ts): if neg: return "-%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs) - else: - return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs) + + return "%.2d:%.2d:%.2d:%.2d.%06d" % (days, hours, mins, secs, usecs) class ProcLatency(seiscomp.client.Application): @@ -135,8 +141,6 @@ class ProcLatency(seiscomp.client.Application): def logObject(self, parentID, obj, update): now = seiscomp.core.Time.GMT() - time = None - pick = seiscomp.datamodel.Pick.Cast(obj) if pick: phase = "" @@ -199,7 +203,7 @@ class ProcLatency(seiscomp.client.Application): pass try: - status = seiscomp.datamodel.EOriginStatusNames.name(org.status()) + status = seiscomp.datamodel.EEvaluationStatusNames.name(org.status()) except: pass @@ -286,7 +290,7 @@ class ProcLatency(seiscomp.client.Application): sys.stdout.write(f"{timeToString(received)};{logEntry}\n") if nowDirectory != self._nowDirectory: - if createDirectory(nowDirectory) == False: + if not createDirectory(nowDirectory): seiscomp.logging.error(f"Unable to create directory {nowDirectory}") return False @@ -298,7 +302,7 @@ class ProcLatency(seiscomp.client.Application): ) if triggeredDirectory != self._triggeredDirectory: - if createDirectory(triggeredDirectory) == False: + if not createDirectory(triggeredDirectory): seiscomp.logging.error( f"Unable to create directory {triggeredDirectory}" ) @@ -321,7 +325,7 @@ class ProcLatency(seiscomp.client.Application): # logEntry = timeToString(received) logEntry = "" - if not triggered is None: + if triggered is not None: aTriggered = triggered.get() triggeredDirectory = ( self._directory + "/".join(["%.2d" % i for i in aTriggered[1:4]]) + "/" @@ -341,7 +345,7 @@ class ProcLatency(seiscomp.client.Application): sys.stdout.write(f"{timeToString(received)};{logEntry}\n") if nowDirectory != self._nowDirectory: - if createDirectory(nowDirectory) == False: + if not createDirectory(nowDirectory): seiscomp.logging.error(f"Unable to create directory {nowDirectory}") return False @@ -353,7 +357,7 @@ class ProcLatency(seiscomp.client.Application): if triggeredDirectory: if triggeredDirectory != self._triggeredDirectory: - if createDirectory(triggeredDirectory) == False: + if not createDirectory(triggeredDirectory): seiscomp.logging.error( f"Unable to create directory {triggeredDirectory}" ) @@ -369,11 +373,8 @@ class ProcLatency(seiscomp.client.Application): return True def writeLog(self, file, text): - of = open(file, "a") - if of: - of.write(text) - of.write("\n") - of.close() + with open(file, "a", encoding="utf8") as of: + of.print(text, file=of) app = ProcLatency(len(sys.argv), sys.argv) diff --git a/bin/scqc b/bin/scqc index 33cb21d..fb5a6b3 100755 Binary files a/bin/scqc and b/bin/scqc differ diff --git a/bin/scqcv b/bin/scqcv index 7538fb3..9fe2cdb 100755 Binary files a/bin/scqcv and b/bin/scqcv differ diff --git a/bin/scquery b/bin/scquery index 4896c13..cf04115 100755 Binary files a/bin/scquery and b/bin/scquery differ diff --git a/bin/scqueryqc b/bin/scqueryqc index a0f908f..7b49976 100755 --- a/bin/scqueryqc +++ b/bin/scqueryqc @@ -105,10 +105,14 @@ class WfqQuery(seiscomp.client.Application): self.commandline().addGroup("Query") self.commandline().addStringOption( - "Query", "begin,b", "Begin time of query: 'YYYY-MM-DD hh:mm:ss'" + "Query", + "begin,b", + "Begin time of query. Uses 1900-01-01T00:00:00 unless given.", ) self.commandline().addStringOption( - "Query", "end,e", "End time of query: 'YYYY-MM-DD hh:mm:ss'" + "Query", + "end,e", + "End time of query. Uses current time unless given.", ) self.commandline().addStringOption( "Query", @@ -116,7 +120,7 @@ class WfqQuery(seiscomp.client.Application): "Waveform stream ID to search for QC parameters: net.sta.loc.cha -" " [networkCode].[stationCode].[sensorLocationCode].[channelCode]. " "Provide a single ID or a comma-separated list. Overrides " - "--streams-from-inventory", + "--streams-from-inventory.", ) self.commandline().addStringOption( "Query", @@ -151,8 +155,8 @@ Query a database for waveform quality control (QC) parameters.""", print( f"""Examples: Query rms and delay values for streams 'AU.AS18..SHZ' and 'AU.AS19..SHZ' from \ -'2021-11-20 00:00:00' until current - {os.path.basename(__file__)} -d localhost -b '2021-11-20 00:00:00' -p rms,delay \ +2021-11-20 00:00:00 until current + {os.path.basename(__file__)} -d localhost -b 2021-11-20T00:00:00 -p rms,delay \ -i AU.AS18..SHZ,AU.AS19..SHZ""", file=sys.stderr, ) diff --git a/bin/screloc b/bin/screloc index 1aecd09..78daf55 100755 Binary files a/bin/screloc and b/bin/screloc differ diff --git a/bin/screpick b/bin/screpick index 1b77019..1e698f4 100755 Binary files a/bin/screpick and b/bin/screpick differ diff --git a/bin/scrttv b/bin/scrttv index 2ebc813..31b0ca6 100755 Binary files a/bin/scrttv and b/bin/scrttv differ diff --git a/bin/scsendorigin b/bin/scsendorigin index 3c4d724..07771c8 100755 --- a/bin/scsendorigin +++ b/bin/scsendorigin @@ -69,8 +69,8 @@ class SendOrigin(seiscomp.client.Application): "Parameters", "coord", "Latitude,longitude,depth of origin" ) self.commandline().addStringOption("Parameters", "time", "time of origin") - except: - seiscomp.logging.warning(f"caught unexpected error {sys.exc_info()}") + except Exception: + seiscomp.logging.warning(f"Caught unexpected error {sys.exc_info()}") def printUsage(self): print( @@ -85,7 +85,7 @@ Create an artificial origin and send to the messaging""" print( """Examples: Send an artificial origin with hypocenter parameters to the messaging - scsendorigin --time "2022-05-01 10:00:00" --coord 52,12,10 + scsendorigin --time 2022-05-01T10:00:00 --coord 52,12,10 """ ) diff --git a/bin/scshowevent b/bin/scshowevent index 859e53c..494511f 100755 Binary files a/bin/scshowevent and b/bin/scshowevent differ diff --git a/bin/scsmdump b/bin/scsmdump new file mode 100755 index 0000000..73e3a50 Binary files /dev/null and b/bin/scsmdump differ diff --git a/bin/scsohlog b/bin/scsohlog index 4c8732e..c5967b4 100755 --- a/bin/scsohlog +++ b/bin/scsohlog @@ -359,9 +359,7 @@ Create an output XML file every 60 seconds and execute a custom script to proces try: f = open(self._outputFile, "w") except: - seiscomp.logging.error( - f"Unable to create output file: {self._outputFile}" - ) + seiscomp.logging.error(f"Unable to create output file: {self._outputFile}") return self.toXML(f) diff --git a/bin/scvoice b/bin/scvoice index 05d4797..0d70670 100755 --- a/bin/scvoice +++ b/bin/scvoice @@ -62,50 +62,52 @@ class VoiceAlert(client.Application): self.commandline().addOption( "Generic", "first-new", - "calls an event a new event when it is " "seen the first time", + "Calls an event a new event when it is seen the first time.", ) self.commandline().addGroup("Alert") self.commandline().addStringOption( "Alert", "amp-type", - "specify the amplitude type to listen to", + "Specify the amplitude type to listen to.", self._ampType, ) self.commandline().addStringOption( "Alert", "amp-script", - "specify the script to be called when a " + "Specify the script to be called when a " "stationamplitude arrived, network-, stationcode and amplitude are " - "passed as parameters $1, $2 and $3", + "passed as parameters $1, $2 and $3.", ) self.commandline().addStringOption( "Alert", "alert-script", - "specify the script to be called when a " + "Specify the script to be called when a " "preliminary origin arrived, latitude and longitude are passed as " - "parameters $1 and $2", + "parameters $1 and $2.", ) self.commandline().addStringOption( "Alert", "event-script", - "specify the script to be called when an " + "Specify the script to be called when an " "event has been declared; the message string, a flag (1=new event, " "0=update event), the EventID, the arrival count and the magnitude " - "(optional when set) are passed as parameter $1, $2, $3, $4 and $5", + "(optional when set) are passed as parameter $1, $2, $3, $4 and $5.", ) self.commandline().addGroup("Cities") self.commandline().addStringOption( "Cities", "max-dist", - "maximum distance for using the distance " "from a city to the earthquake", + "Maximum distance for using the distance from a city to the earthquake.", + str(self._citiesMaxDist), ) self.commandline().addStringOption( "Cities", "min-population", - "minimum population for a city to " "become a point of interest", + "Minimum population for a city to become a point of interest.", + str(self._citiesMinPopulation), ) self.commandline().addGroup("Debug") - self.commandline().addStringOption("Debug", "eventid,E", "specify Event ID") + self.commandline().addStringOption("Debug", "eventid,E", "Specify event ID.") return True def init(self): diff --git a/bin/scwfas b/bin/scwfas index acda6e9..b37a09a 100755 Binary files a/bin/scwfas and b/bin/scwfas differ diff --git a/bin/scwfparam b/bin/scwfparam index a8131a2..35e58df 100755 Binary files a/bin/scwfparam and b/bin/scwfparam differ diff --git a/bin/scxmldump b/bin/scxmldump index b70c200..67f0c56 100755 Binary files a/bin/scxmldump and b/bin/scxmldump differ diff --git a/bin/scxmlmerge b/bin/scxmlmerge index 115972b..ad52a0e 100755 Binary files a/bin/scxmlmerge and b/bin/scxmlmerge differ diff --git a/bin/sczip b/bin/sczip index faf199b..57eb570 100755 Binary files a/bin/sczip and b/bin/sczip differ diff --git a/bin/seiscomp-control.py b/bin/seiscomp-control.py index 1b30acc..586a4f5 100755 --- a/bin/seiscomp-control.py +++ b/bin/seiscomp-control.py @@ -722,8 +722,8 @@ def on_status(args, _): if env.isModuleEnabled(mod.name) or isinstance( mod, seiscomp.kernel.CoreModule ): - mod.status(shouldModuleRun(mod.name)) - found += 1 + if mod.status(shouldModuleRun(mod.name)) == 0: + found += 1 if not useCSV: print(f"Summary: {found} modules enabled") @@ -733,8 +733,8 @@ def on_status(args, _): if len(args) > 0 and args[0] == "started": for mod in mods: if shouldModuleRun(mod.name): - mod.status(shouldModuleRun(mod.name)) - found += 1 + if mod.status(shouldModuleRun(mod.name)) == 0: + found += 1 if not useCSV: print(f"Summary: {found} modules started") @@ -743,8 +743,8 @@ def on_status(args, _): for mod in mods: if mod.name in args or len(args) == 0: - mod.status(shouldModuleRun(mod.name)) - found += 1 + if mod.status(shouldModuleRun(mod.name)) == 0: + found += 1 if not useCSV: print(f"Summary: {found} modules reported") diff --git a/bin/sh2proc b/bin/sh2proc index 8d1e43e..ec97108 100755 --- a/bin/sh2proc +++ b/bin/sh2proc @@ -86,7 +86,7 @@ class SH2Proc(seiscomp.client.Application): """Usage: sh2proc [options] -Convert Seismic Handler event data to SeisComP XML format""" +Convert Seismic Handler event data to SeisComP XML format which is sent to stdout.""" ) seiscomp.client.Application.printUsage(self) @@ -95,10 +95,10 @@ Convert Seismic Handler event data to SeisComP XML format""" """Examples: Convert the Seismic Handler file shm.evt to SCML. Receive the database connection to read inventory and configuration information from messaging - sh2proc shm.evt + sh2proc shm.evt > event.xml Read Seismic Handler data from stdin. Provide inventory and configuration in XML - cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml + cat shm.evt | sh2proc --inventory-db=inventory.xml --config-db=config.xml > event.xml """ ) @@ -489,7 +489,7 @@ Read Seismic Handler data from stdin. Provide inventory and configuration in XML seiscomp.datamodel.IMPULSIVE, seiscomp.datamodel.QUESTIONABLE, ]: - if value == seiscomp.datamodel.EPickOnsetNames_name(onset): + if value == seiscomp.datamodel.EPickOnsetNames.name(onset): pick.setOnset(onset) found = True break @@ -524,7 +524,7 @@ Read Seismic Handler data from stdin. Provide inventory and configuration in XML seiscomp.datamodel.AUTOMATIC, seiscomp.datamodel.MANUAL, ]: - if value == seiscomp.datamodel.EEvaluationModeNames_name(mode): + if value == seiscomp.datamodel.EEvaluationModeNames.name(mode): pick.setEvaluationMode(mode) found = True break diff --git a/bin/slarchive b/bin/slarchive index ef27982..de026dc 100755 Binary files a/bin/slarchive and b/bin/slarchive differ diff --git a/bin/slinktool b/bin/slinktool index 331fa18..63b95dc 100755 Binary files a/bin/slinktool and b/bin/slinktool differ diff --git a/bin/slmon2 b/bin/slmon2 new file mode 100755 index 0000000..3951879 --- /dev/null +++ b/bin/slmon2 @@ -0,0 +1,3673 @@ +#!/usr/bin/env seiscomp-python + +from getopt import getopt, GetoptError +from time import time, gmtime +from datetime import datetime +import os +import sys +import signal +import glob +import re +import json + +from seiscomp.myconfig import MyConfig +import seiscomp.slclient +import seiscomp.kernel, seiscomp.config +from urllib.request import urlopen + +# A dictionary to store station coordinates +station_coordinates = {} + +def load_station_coordinates(config): + """Load station coordinates from FDSN web service""" + global station_coordinates + + # Get base URL from config or use default + base_url = config['setup'].get('fdsnws_url', 'http://localhost:8080/fdsnws/') + + # Create a dictionary in the format needed by data_fetcher + stations_config = {} + for key in config.station: + network = config.station[key]['net'] + station = config.station[key]['sta'] + station_id = f"{network}.{station}" + stations_config[station_id] = { + 'network': network, + 'station': station, + 'location': '', # Default location + 'stream': 'HHZ' # Default stream + } + + # Fetch coordinates for each station + for station_id, station_info in stations_config.items(): + network = station_info['network'] + station = station_info['station'] + + try: + with urlopen(base_url + f"station/1/query?net={network}&sta={station}&format=text") as fp: + fp.readline() + location_info = dict(zip(('lat', 'lon', 'elevation'), map(float, fp.readline().split(b'|')[2:5]))) + + if location_info: + station_coordinates[f"{network}_{station}"] = location_info + print(f"Loaded coordinates for {network}_{station}: {location_info}") + else: + print(f"Could not fetch coordinates for {network}_{station}") + except Exception as e: + print(f"Error fetching coordinates for {network}_{station}: {str(e)}") + + # Print summary + print(f"Loaded coordinates for {len(station_coordinates)} stations") + + + +usage_info = """ +Usage: + slmon [options] + +Enhanced SeedLink monitor creating modern, interactive web dashboards + +Options: + -h, --help display this help message + -c ini_setup = arg + -s ini_stations = arg + -t refresh = float(arg) # XXX not yet used + -v verbose = 1 + -g, --generate generate only template files and exit + +Examples: +Start slmon from the command line + slmon -c $SEISCOMP_ROOT/var/lib/slmon/config.ini + +Restart slmon in order to update the web pages. Use crontab entries for +automatic restart, e.g.: + */3 * * * * /home/sysop/seiscomp/bin/seiscomp check slmon >/dev/null 2>&1 +""" + +def usage(exitcode=0): + sys.stderr.write(usage_info) + exit(exitcode) + +try: + seiscompRoot = os.environ["SEISCOMP_ROOT"] +except: + print("\nSEISCOMP_ROOT must be defined - EXIT\n", file=sys.stderr) + usage(exitcode=2) + +ini_stations = os.path.join(seiscompRoot, 'var/lib/slmon2/stations.ini') +ini_setup = os.path.join(seiscompRoot, 'var/lib/slmon2/config.ini') + +regexStreams = re.compile("[SLBVEH][HNLGD][ZNE123ADHF]") +verbose = 0 +generate_only = False + + +class Module(seiscomp.kernel.Module): + def __init__(self, env): + seiscomp.kernel.Module.__init__(self, env, env.moduleName(__file__)) + + def printCrontab(self): + print("3 * * * * %s/bin/seiscomp check slmon >/dev/null 2>&1" % (self.env.SEISCOMP_ROOT)) + + +class Status: + def __repr__(self): + return "%2s %-5s %2s %3s %1s %s %s" % \ + (self.net, self.sta, self.loc, self.cha, self.typ, + str(self.last_data), str(self.last_feed)) + + +class StatusDict(dict): + def __init__(self, source=None): + if source: + self.read(source) + + def fromSlinkTool(self, server="", stations=["AU_ARMA", "AU_BLDU", "AU_YAPP"]): + # later this shall use XML + cmd = "slinktool -nd 10 -nt 10 -Q %s" % server + print(cmd) + f = os.popen(cmd) + # regex = re.compile("[SLBVEH][HNLG][ZNE123]") + regex = regexStreams + for line in f: + net_sta = line[:2].strip() + "_" + line[3:8].strip() + if not net_sta in stations: + continue + typ = line[16] + if typ != "D": + continue + cha = line[12:15].strip() + if not regex.match(cha): + continue + + d = Status() + d.net = line[0: 2].strip() + d.sta = line[3: 8].strip() + d.loc = line[9:11].strip() + d.cha = line[12:15] + d.typ = line[16] + d.last_data = seiscomp.slclient.timeparse(line[47:70]) + d.last_feed = d.last_data + sec = "%s_%s" % (d.net, d.sta) + sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ) + self[sec] = d + + def read(self, source): + """ + Read status data from various source types (file path, file object, or list of lines) + Python 3 compatible version + """ + lines = [] + + # Handle different source types + if isinstance(source, str): + # String - treat as file path + with open(source, 'r', encoding='utf-8') as f: + lines = f.readlines() + elif hasattr(source, 'readlines'): + # File-like object + lines = source.readlines() + elif isinstance(source, list): + # Already a list of lines + lines = source + else: + raise TypeError(f'Cannot read from {type(source).__name__}') + + # Process each line + for line in lines: + line = str(line).rstrip('\n\r') + + # Skip lines that are too short + if len(line) < 65: + continue + + # Create status object and parse fields + d = Status() + d.net = line[0:2].strip() + d.sta = line[3:8].strip() + d.loc = line[9:11].strip() + d.cha = line[12:15].strip() + d.typ = line[16] + + # Parse timestamps with error handling + try: + d.last_data = seiscomp.slclient.timeparse(line[18:41]) + except: + d.last_data = None + + try: + d.last_feed = seiscomp.slclient.timeparse(line[42:65]) + except: + d.last_feed = None + + # Ensure last_feed is not earlier than last_data + if d.last_feed and d.last_data and d.last_feed < d.last_data: + d.last_feed = d.last_data + + # Create dictionary key and store + sec = f"{d.net}_{d.sta}:{d.loc}.{d.cha}.{d.typ}" + self[sec] = d + + def write(self, f): + """ + Write status data to file or file-like object + Python 3 compatible version + """ + should_close = False + + if isinstance(f, str): + # String - treat as file path + f = open(f, "w", encoding='utf-8') + should_close = True + + try: + # Prepare and write sorted lines + lines = [str(self[key]) for key in sorted(self.keys())] + f.write('\n'.join(lines) + '\n') + finally: + if should_close: + f.close() + + def to_json(self): + """Convert status dictionary to JSON for JavaScript use""" + global station_coordinates + stations_data = {} + + # Group by network and station + for key, value in self.items(): + net_sta = f"{value.net}_{value.sta}" + if net_sta not in stations_data: + stations_data[net_sta] = { + "network": value.net, + "station": value.sta, + "channels": [], + "channelGroups": { + "HH": [], # High-frequency, High-gain channels + "BH": [], # Broadband, High-gain channels + "LH": [], # Long-period, High-gain channels + "SH": [], # Short-period, High-gain channels + "EH": [], # Extremely Short-period, High-gain channels + "other": [] # All other channel types + } + } + + # Add coordinates if available + if net_sta in station_coordinates: + stations_data[net_sta]["coordinates"] = station_coordinates[net_sta] + + # Get latency information + now = datetime.utcnow() + latency_data = now - value.last_data + latency_seconds = total_seconds(latency_data) + + # Extract channel type (first two characters, e.g., 'LH', 'BH', 'HH', 'EH') + channel_type = value.cha[:2] if len(value.cha) >= 2 else "other" + + # Get status with channel-aware thresholds + status = get_status_from_seconds(latency_seconds, channel_type) + + # Create channel data + channel_data = { + "location": value.loc, + "channel": value.cha, + "type": value.typ, + "channelType": channel_type, + "last_data": value.last_data.isoformat() if value.last_data else None, + "last_feed": value.last_feed.isoformat() if value.last_feed else None, + "latency": latency_seconds, + "status": status + } + + # Add to main channels list + stations_data[net_sta]["channels"].append(channel_data) + + # Add to channel group for separated status calculation + if channel_type in stations_data[net_sta]["channelGroups"]: + stations_data[net_sta]["channelGroups"][channel_type].append(channel_data) + else: + stations_data[net_sta]["channelGroups"]["other"].append(channel_data) + + # Convert to list for easier JavaScript processing + stations_list = [] + for net_sta, data in stations_data.items(): + # Calculate overall station status based on priority channels (non-LH channels) + # First try HH channels + if data["channelGroups"]["HH"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["HH"]]) + data["status"] = get_status_from_seconds(worst_latency) + data["primaryChannelType"] = "HH" + # Then try BH channels + elif data["channelGroups"]["BH"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["BH"]]) + data["status"] = get_status_from_seconds(worst_latency) + data["primaryChannelType"] = "BH" + # Then try SH channels + elif data["channelGroups"]["SH"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["SH"]]) + data["status"] = get_status_from_seconds(worst_latency) + data["primaryChannelType"] = "SH" + # Then try EH channels + elif data["channelGroups"]["EH"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["EH"]]) + data["status"] = get_status_from_seconds(worst_latency) + data["primaryChannelType"] = "EH" + # Only use LH if nothing else is available + elif data["channelGroups"]["LH"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["LH"]]) + data["status"] = get_status_from_seconds(worst_latency, "LH") + data["primaryChannelType"] = "LH" + # Fall back to other channels + elif data["channelGroups"]["other"]: + worst_latency = max([ch["latency"] for ch in data["channelGroups"]["other"]]) + data["status"] = get_status_from_seconds(worst_latency) + data["primaryChannelType"] = "other" + else: + # Failsafe if no channels + data["status"] = "unavailable" + data["primaryChannelType"] = "none" + worst_latency = 0 + + data["latency"] = worst_latency + data["id"] = net_sta + stations_list.append(data) + + return json.dumps(stations_list) + +def get_map_settings(config): + """Extract map settings from config for JavaScript use""" + map_settings = { + 'center': { + 'lat': -25.6, # Default latitude + 'lon': 134.3, # Default longitude + 'zoom': 6 # Default zoom + }, + 'defaultLayer': 'street', + 'enableClustering': True, + 'showFullscreenControl': True, + 'showLayerControl': True, + 'showLocateControl': True, + 'darkModeLayer': 'dark', + 'lightModeLayer': 'street' + } + + # Extract center coordinates from config + if 'center_map' in config['setup']: + if 'lat' in config['setup']['center_map']: + map_settings['center']['lat'] = float(config['setup']['center_map']['lat']) + if 'lon' in config['setup']['center_map']: + map_settings['center']['lon'] = float(config['setup']['center_map']['lon']) + if 'zoom' in config['setup']['center_map']: + map_settings['center']['zoom'] = int(config['setup']['center_map']['zoom']) + + # Extract other map settings + if 'map_settings' in config['setup']: + map_config = config['setup']['map_settings'] + + if 'default_layer' in map_config: + map_settings['defaultLayer'] = map_config['default_layer'] + + if 'enable_clustering' in map_config: + map_settings['enableClustering'] = map_config['enable_clustering'] == 'true' or map_config['enable_clustering'] is True + + if 'show_fullscreen_control' in map_config: + map_settings['showFullscreenControl'] = map_config['show_fullscreen_control'] == 'true' or map_config['show_fullscreen_control'] is True + + if 'show_layer_control' in map_config: + map_settings['showLayerControl'] = map_config['show_layer_control'] == 'true' or map_config['show_layer_control'] is True + + if 'show_locate_control' in map_config: + map_settings['showLocateControl'] = map_config['show_locate_control'] == 'true' or map_config['show_locate_control'] is True + + if 'dark_mode_layer' in map_config: + map_settings['darkModeLayer'] = map_config['dark_mode_layer'] + + if 'light_mode_layer' in map_config: + map_settings['lightModeLayer'] = map_config['light_mode_layer'] + + return map_settings + +def get_status_from_seconds(seconds, channel_type=None): + """ + Get status code based on latency in seconds with channel-specific thresholds + + Args: + seconds (float): Latency in seconds + channel_type (str): Channel type (e.g., 'LH', 'BH', 'HH', 'EH') + + Returns: + str: Status code (good, delayed, etc.) + """ + # Special handling for LH channels - they're naturally delayed + if channel_type == 'LH': + # More lenient thresholds for LH channels + if seconds > 604800: # > 7 days + return "unavailable" + elif seconds > 518400: # > 6 days + return "four-day" + elif seconds > 432000: # > 5 days + return "three-day" + elif seconds > 345600: # > 4 days + return "multi-day" + elif seconds > 259200: # > 3 days + return "day-delayed" + elif seconds > 86400: # > 1 day + return "critical" + elif seconds > 43200: # > 12 hours + return "warning" + elif seconds > 21600: # > 6 hours + return "hour-delayed" + elif seconds > 10800: # > 3 hours + return "very-delayed" + elif seconds > 3600: # > 1 hour + return "long-delayed" + elif seconds > 1800: # > 30 minutes + return "delayed" + else: # <= 30 minutes (LH channels are considered good even with moderate delay) + return "good" + + # Standard thresholds for other channels + if seconds > 432000: # > 5 days + return "unavailable" + elif seconds > 345600: # > 4 days + return "four-day" + elif seconds > 259200: # > 3 days + return "three-day" + elif seconds > 172800: # > 2 days + return "multi-day" + elif seconds > 86400: # > 1 day + return "day-delayed" + elif seconds > 21600: # > 6 hours + return "critical" + elif seconds > 7200: # > 2 hours + return "warning" + elif seconds > 3600: # > 1 hour + return "hour-delayed" + elif seconds > 1800: # > 30 minutes + return "very-delayed" + elif seconds > 600: # > 10 minutes + return "long-delayed" + elif seconds > 60: # > 1 minute + return "delayed" + else: # <= 1 minute + return "good" + + +def getColor(delta): + delay = total_seconds(delta) + if delay > 432000: return '#666666' # > 5 days + elif delay > 345600: return '#999999' # > 4 days + elif delay > 259200: return '#CCCCCC' # > 3 days + elif delay > 172800: return '#FFB3B3' # > 2 days + elif delay > 86400: return '#FF3333' # > 1 day + elif delay > 21600: return '#FF9966' # > 6 hours + elif delay > 7200: return '#FFFF00' # > 2 hours + elif delay > 3600: return '#00FF00' # > 1 hour + elif delay > 1800: return '#3399FF' # > 30 minutes + elif delay > 600: return '#9470BB' # > 10 minutes + elif delay > 60: return '#EBD6FF' # > 1 minute + else: return '#FFFFFF' # <= 1 minute + + +def total_seconds(td): + return td.seconds + (td.days*86400) + + +def myrename(name1, name2): + # fault-tolerant rename that doesn't cause an exception if it fails, which + # may happen e.g. if the target is on a non-reachable NFS directory + try: + os.rename(name1, name2) + except OSError: + print("failed to rename(%s,%s)" % (name1, name2), file=sys.stderr) + + +def formatLatency(delta): + """Format latency for display""" + if delta is None: return 'n/a' + + t = total_seconds(delta) + + if t > 86400: return f"{t/86400:.1f} d" + elif t > 7200: return f"{t/3600:.1f} h" + elif t > 120: return f"{t/60:.1f} m" + else: return f"{t:.1f} s" + + +def generate_css_file(config): + """Generate the CSS file with theme support""" + css_content = """ +:root { + /* Light theme variables */ + --primary-color: #4f46e5; + --primary-hover: #4338ca; + --text-primary: #1f2937; + --text-secondary: #6b7280; + --bg-primary: #ffffff; + --bg-secondary: #f9fafb; + --bg-tertiary: #f3f4f6; + --border-color: #e5e7eb; + --border-radius: 8px; + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.05); + --shadow-md: 0 4px 6px rgba(0, 0, 0, 0.1); + --shadow-lg: 0 10px 15px rgba(0, 0, 0, 0.1); + + /* Status colors */ + --status-good: #ffffff; + --status-delayed: #c084fc; + --status-long-delayed: #8b5cf6; + --status-very-delayed: #3b82f6; + --status-hour-delayed: #10b981; + --status-warning: #fbbf24; + --status-critical: #f97316; + --status-day-delayed: #ef4444; + --status-multi-day: #f87171; + --status-three-day: #d1d5db; + --status-four-day: #9ca3af; + --status-unavailable: #6b7280; +} + +.dark-mode { + /* Dark theme variables */ + --primary-color: #818cf8; + --primary-hover: #a5b4fc; + --text-primary: #f9fafb; + --text-secondary: #9ca3af; + --bg-primary: #1f2937; + --bg-secondary: #111827; + --bg-tertiary: #374151; + --border-color: #374151; + --shadow-sm: 0 1px 2px rgba(0, 0, 0, 0.2); + --shadow-md: 0 4px 6px rgba(0, 0, 0, 0.3); + --shadow-lg: 0 10px 15px rgba(0, 0, 0, 0.3); + + /* Dark theme status colors - background stays dark */ + --status-good: #1f2937; +} + +/* General Styles */ +* { + box-sizing: border-box; + margin: 0; + padding: 0; +} + +body { + font-family: 'Inter', -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; + line-height: 1.6; + color: var(--text-primary); + background-color: var(--bg-secondary); + padding: 0; + margin: 0; +} + +.container { + max-width: 1400px; + margin: 20px auto; + padding: 30px; + background-color: var(--bg-primary); + border-radius: var(--border-radius); + box-shadow: var(--shadow-md); +} + +.header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 20px; + border-bottom: 1px solid var(--border-color); + padding-bottom: 15px; +} + +h1 { + font-size: 28px; + font-weight: 600; + color: var(--text-primary); + letter-spacing: -0.5px; +} + +.subtitle { + color: var(--text-secondary); + font-size: 16px; + margin-bottom: 20px; +} + +/* Navigation Tabs */ +.view-toggle { + display: flex; + gap: 10px; +} + +.view-toggle a { + padding: 8px 15px; + border-radius: 6px; + color: var(--text-secondary); + text-decoration: none; + transition: all 0.2s ease; + font-weight: 500; + font-size: 14px; +} + +.view-toggle a:hover { + background-color: var(--bg-tertiary); + color: var(--primary-color); +} + +.view-toggle a.active { + background-color: var(--primary-color); + color: white; +} + +/* Controls */ +.controls { + display: flex; + justify-content: space-between; + align-items: center; + margin: 20px 0; + flex-wrap: wrap; + gap: 15px; +} + +.actions { + display: flex; + gap: 10px; +} + +.action-button { + padding: 8px 15px; + display: flex; + align-items: center; + gap: 6px; + background-color: var(--bg-tertiary); + border: 1px solid var(--border-color); + border-radius: 6px; + color: var(--text-secondary); + cursor: pointer; + font-size: 14px; + font-weight: 500; + transition: all 0.2s ease; +} + +.action-button:hover { + background-color: var(--bg-primary); + color: var(--primary-color); +} + +.action-button svg { + width: 16px; + height: 16px; +} + +.refresh-control { + display: flex; + align-items: center; + gap: 12px; + padding: 10px 15px; + background-color: var(--bg-tertiary); + border-radius: 6px; +} + +.input-group { + display: flex; + align-items: center; + gap: 8px; +} + +.refresh-control input { + width: 60px; + padding: 6px 10px; + border: 1px solid var(--border-color); + border-radius: 4px; + font-size: 14px; + background-color: var(--bg-primary); + color: var(--text-primary); + text-align: center; +} + +.refresh-control button { + padding: 6px 12px; + background-color: var(--primary-color); + color: white; + border: none; + border-radius: 4px; + cursor: pointer; + font-weight: 500; + transition: background-color 0.2s ease; +} + +.refresh-control button:hover { + background-color: var(--primary-hover); +} + +.status-counter { + display: flex; + flex-direction: column; + align-items: flex-end; + gap: 2px; +} + +#refresh-status { + font-size: 13px; + color: var(--text-secondary); +} + +.countdown { + font-size: 13px; + color: var(--text-secondary); +} + +#next-refresh { + color: var(--primary-color); + font-weight: 500; +} + +/* Filter and Search */ +.filters { + display: flex; + flex-wrap: wrap; + gap: 10px; + margin-bottom: 20px; + padding: 15px; + background-color: var(--bg-tertiary); + border-radius: var(--border-radius); +} + +.filter-group { + display: flex; + align-items: center; + gap: 8px; +} + +.filter-group label { + font-size: 14px; + color: var(--text-secondary); + font-weight: 500; +} + +.filter-group select { + padding: 6px 10px; + border: 1px solid var(--border-color); + border-radius: 4px; + background-color: var(--bg-primary); + color: var(--text-primary); + font-size: 14px; +} + +.search-box { + padding: 6px 12px; + border: 1px solid var(--border-color); + border-radius: 4px; + background-color: var(--bg-primary); + color: var(--text-primary); + font-size: 14px; + min-width: 200px; +} + +/* Table View */ +.table-container { + overflow-x: auto; + margin-bottom: 20px; +} + +table { + width: 100%; + border-collapse: collapse; +} + +table th { + padding: 12px 15px; + background-color: var(--bg-tertiary); + color: var(--text-secondary); + font-weight: 600; + text-align: left; + border-bottom: 1px solid var(--border-color); + position: sticky; + top: 0; + z-index: 10; +} + +table td { + padding: 10px 15px; + border-bottom: 1px solid var(--border-color); +} + +table tr:hover { + background-color: var(--bg-tertiary); +} + +/* Grid View */ +.grid-container { + display: table; + width: 100%; + border-collapse: collapse; + margin-top: 20px; +} + +.grid-row { + display: table-row; +} + +.network-label { + display: table-cell; + vertical-align: middle; + text-align: center; + font-weight: 600; + width: 60px; + min-width: 60px; + height: 34px; + background-color: var(--bg-tertiary); + border-radius: 6px; + color: var(--text-secondary); + box-shadow: var(--shadow-sm); + padding: 4px; + margin: 2px; + border: 1px solid var(--border-color); +} + +.stations-container { + display: table-cell; + padding-left: 6px; +} + +.stations-row { + display: flex; + flex-wrap: wrap; + gap: 4px; + margin: 2px 0; +} + +.grid-cell { + width: 60px; + height: 34px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 6px; + font-size: 13px; + font-weight: 500; + box-shadow: var(--shadow-sm); + text-decoration: none; + color: var(--text-primary); + transition: all 0.15s ease; + position: relative; + border: 1px solid var(--border-color); + background-color: var(--status-good); +} + +.grid-cell:hover { + transform: translateY(-2px); + box-shadow: var(--shadow-md); + z-index: 10; +} + +/* Map View */ +.map-container { + width: 100%; + height: 600px; + background-color: var(--bg-tertiary); + border-radius: var(--border-radius); + margin-bottom: 20px; + position: relative; +} + +/* Status Colors */ +.station-unavailable { + background-color: var(--status-unavailable); + color: white; + border-color: var(--status-unavailable); +} + +.station-warning { + background-color: var(--status-warning); + color: #7c2d12; + border-color: var(--status-warning); +} + +.station-critical { + background-color: var(--status-critical); + color: white; + border-color: var(--status-critical); +} + +.station-delayed { + background-color: var(--status-delayed); + color: #4a044e; + border-color: var(--status-delayed); +} + +.station-long-delayed { + background-color: var(--status-long-delayed); + color: white; + border-color: var(--status-long-delayed); +} + +.station-very-delayed { + background-color: var(--status-very-delayed); + color: white; + border-color: var(--status-very-delayed); +} + +.station-hour-delayed { + background-color: var(--status-hour-delayed); + color: white; + border-color: var(--status-hour-delayed); +} + +.station-day-delayed { + background-color: var(--status-day-delayed); + color: white; + border-color: var(--status-day-delayed); +} + +.station-multi-day { + background-color: var(--status-multi-day); + color: #7f1d1d; + border-color: var(--status-multi-day); +} + +.station-three-day { + background-color: var(--status-three-day); + color: #1f2937; + border-color: var(--status-three-day); +} + +.station-four-day { + background-color: var(--status-four-day); + color: white; + border-color: var(--status-four-day); +} + +.station-good { + background-color: var(--status-good); + color: var(--text-primary); + border-color: var(--border-color); +} + +/* Tooltip */ +.grid-cell::after { + content: attr(data-tooltip); + position: absolute; + bottom: 120%; + left: 50%; + transform: translateX(-50%); + background-color: #1f2937; + color: white; + text-align: center; + padding: 8px 12px; + border-radius: 6px; + font-size: 12px; + white-space: nowrap; + z-index: 20; + opacity: 0; + visibility: hidden; + transition: all 0.2s ease; + pointer-events: none; + box-shadow: var(--shadow-md); +} + +.grid-cell::before { + content: ''; + position: absolute; + top: -6px; + left: 50%; + transform: translateX(-50%); + border-width: 6px 6px 0; + border-style: solid; + border-color: #1f2937 transparent transparent; + z-index: 20; + opacity: 0; + visibility: hidden; + transition: all 0.2s ease; + pointer-events: none; +} + +.grid-cell:hover::after, +.grid-cell:hover::before { + opacity: 1; + visibility: visible; +} + +/* Stats */ +.stats-container { + margin: 20px 0; + padding: 20px; + background-color: var(--bg-tertiary); + border-radius: var(--border-radius); + display: none; +} + +.stats-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 20px; +} + +.stats-title { + font-weight: 600; + font-size: 16px; + color: var(--text-primary); +} + +.network-stats { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(240px, 1fr)); + gap: 15px; +} + +.network-stat { + display: flex; + flex-direction: column; + gap: 8px; +} + +.network-name { + font-weight: 600; + font-size: 14px; + color: var(--text-primary); + display: flex; + justify-content: space-between; +} + +.progress-bar { + height: 8px; + background-color: var(--border-color); + border-radius: 4px; + overflow: hidden; +} + +.progress { + height: 100%; + background-color: var(--primary-color); + border-radius: 4px; +} + +/* Legend */ +.legend { + display: flex; + flex-wrap: wrap; + gap: 10px; + margin: 25px 0; + padding: 15px; + background-color: var(--bg-tertiary); + border-radius: var(--border-radius); + justify-content: center; +} + +.legend-item { + display: flex; + align-items: center; + gap: 5px; + font-size: 13px; + color: var(--text-secondary); +} + +.legend-color { + width: 16px; + height: 16px; + border-radius: 4px; + border: 1px solid rgba(0, 0, 0, 0.1); +} + +/* Footer */ +.footer { + margin-top: 30px; + padding-top: 15px; + border-top: 1px solid var(--border-color); + display: flex; + justify-content: space-between; + color: var(--text-secondary); + font-size: 14px; +} + +.footer a { + color: var(--primary-color); + text-decoration: none; +} + +.footer a:hover { + text-decoration: underline; +} + +/* Loading */ +#loading { + display: flex; + align-items: center; + justify-content: center; + margin: 30px 0; + color: var(--text-secondary); +} + +.loading-spinner { + width: 24px; + height: 24px; + border: 3px solid var(--bg-tertiary); + border-top: 3px solid var(--primary-color); + border-radius: 50%; + margin-right: 12px; + animation: spin 1s linear infinite; +} + +@keyframes spin { + 0% { transform: rotate(0deg); } + 100% { transform: rotate(360deg); } +} + +/* Error Message */ +#error-message { + padding: 15px; + margin: 20px 0; + border-radius: var(--border-radius); + background-color: #fee2e2; + color: #b91c1c; + border-left: 4px solid #ef4444; + display: none; +} + +/* Responsive Design */ +@media (max-width: 768px) { + .container { + margin: 10px; + padding: 15px; + border-radius: 6px; + } + + .header { + flex-direction: column; + align-items: flex-start; + gap: 10px; + } + + .view-toggle { + align-self: flex-end; + } + + .controls { + flex-direction: column; + align-items: stretch; + } + + .actions { + justify-content: space-between; + } + + .refresh-control { + flex-direction: column; + align-items: flex-start; + gap: 10px; + } + + .filters { + flex-direction: column; + gap: 10px; + } + + .filter-group { + width: 100%; + } + + .search-box { + width: 100%; + } + + .network-stats { + grid-template-columns: 1fr; + } + + .map-container { + height: 400px; + } + } + /* Marker Cluster Styles */ + .marker-cluster { + background-clip: padding-box; + border-radius: 20px; + } + + .marker-cluster div { + width: 36px; + height: 36px; + margin-left: 2px; + margin-top: 2px; + text-align: center; + border-radius: 18px; + font-size: 12px; + display: flex; + align-items: center; + justify-content: center; + } + + /* Map Controls */ + .leaflet-control-locate { + border: 2px solid rgba(0,0,0,0.2); + background-clip: padding-box; + } + + .leaflet-control-locate a { + background-color: var(--bg-primary); + background-position: 50% 50%; + background-repeat: no-repeat; + display: block; + width: 30px; + height: 30px; + line-height: 30px; + color: var(--text-primary); + text-align: center; + } + + .leaflet-control-locate a:hover { + background-color: var(--bg-tertiary); + color: var(--primary-color); + } + + .leaflet-control-locate.active a { + color: var(--primary-color); + } + + .leaflet-control-fullscreen { + border: 2px solid rgba(0,0,0,0.2); + background-clip: padding-box; + } + + .leaflet-control-fullscreen a { + background-color: var(--bg-primary); + background-position: 50% 50%; + background-repeat: no-repeat; + display: block; + width: 30px; + height: 30px; + line-height: 30px; + color: var(--text-primary); + text-align: center; + } + + .leaflet-control-fullscreen a:hover { + background-color: var(--bg-tertiary); + color: var(--primary-color); + } + + /* Map layers control */ + .leaflet-control-layers { + border-radius: var(--border-radius); + background-color: var(--bg-primary); + color: var(--text-primary); + border: 1px solid var(--border-color); + box-shadow: var(--shadow-sm); + } + + .dark-mode .leaflet-control-layers { + background-color: var(--bg-tertiary); + } + + .leaflet-control-layers-toggle { + width: 36px; + height: 36px; + background-size: 20px 20px; + } + + .leaflet-control-layers-expanded { + padding: 10px; + background-color: var(--bg-primary); + color: var(--text-primary); + border-radius: var(--border-radius); + } + + .dark-mode .leaflet-control-layers-expanded { + background-color: var(--bg-tertiary); + } + + .leaflet-control-layers-list { + margin-top: 8px; + } + + .leaflet-control-layers label { + margin-bottom: 5px; + display: block; + } + + /* Map layer selection buttons */ + .map-layers-control { + position: absolute; + top: 10px; + right: 10px; + z-index: 1000; + background: white; + padding: 5px; + border-radius: 4px; + box-shadow: 0 1px 5px rgba(0,0,0,0.65); + } + + .map-layers-control button { + display: block; + margin: 5px 0; + padding: 5px; + width: 100%; + border: none; + background: #f8f8f8; + cursor: pointer; + } + + .map-layers-control button:hover { + background: #f0f0f0; + } + + .map-layers-control button.active { + background: #ddd; + font-weight: bold; + } + + /* Map tools control */ + .map-tools-control { + position: absolute; + bottom: 30px; + right: 10px; + z-index: 1000; + display: flex; + flex-direction: column; + gap: 5px; + } + + .map-tools-control button { + width: 34px; + height: 34px; + background: white; + border: 2px solid rgba(0,0,0,0.2); + border-radius: 4px; + display: flex; + align-items: center; + justify-content: center; + cursor: pointer; + color: #333; + } + + .map-tools-control button:hover { + background: #f4f4f4; + } + + .dark-mode .map-tools-control button { + background: #333; + color: #fff; + border-color: rgba(255,255,255,0.2); + } + + .dark-mode .map-tools-control button:hover { + background: #444; + } + + /* Map measurement widget */ + .leaflet-measure-path-measurement { + position: absolute; + font-size: 12px; + color: black; + text-shadow: -1px 0 white, 0 1px white, 1px 0 white, 0 -1px white; + white-space: nowrap; + transform-origin: 0; + pointer-events: none; + } + + .dark-mode .leaflet-measure-path-measurement { + color: white; + text-shadow: -1px 0 black, 0 1px black, 1px 0 black, 0 -1px black; + } + + /* Popup styling */ + .leaflet-popup-content-wrapper { + border-radius: var(--border-radius); + background-color: var(--bg-primary); + color: var(--text-primary); + box-shadow: var(--shadow-md); + } + + .dark-mode .leaflet-popup-content-wrapper { + background-color: var(--bg-tertiary); + } + + .leaflet-popup-content { + margin: 12px; + line-height: 1.5; + } + + .leaflet-popup-tip { + background-color: var(--bg-primary); + } + + .dark-mode .leaflet-popup-tip { + background-color: var(--bg-tertiary); + } + + .leaflet-popup-content a { + color: var(--primary-color); + text-decoration: none; + } + + .leaflet-popup-content a:hover { + text-decoration: underline; + } + + /* Make the map more responsive on mobile */ + @media (max-width: 768px) { + .map-container { + height: 450px; + } + + .leaflet-control-layers, + .leaflet-control-zoom, + .leaflet-control-fullscreen, + .leaflet-control-locate { + margin-right: 10px !important; + } + + .leaflet-control-scale { + margin-bottom: 40px !important; + } + } + """ + + try: + css_path = os.path.join(config['setup']['wwwdir'], 'styles.css') + with open(css_path, 'w') as f: + f.write(css_content) + print(f"CSS file generated at {css_path}") + return css_path + except Exception as e: + print(f"Error generating CSS file: {str(e)}") + return None + + +def generate_js_file(config): + """Generate the JavaScript file with interactive features""" + js_content = """ +// Global variables +let refreshTimer = null; +let currentRefreshInterval = 60; +let lastRefreshTime = 0; +let isRefreshing = false; +let stationsData = []; +let viewMode = 'table'; // 'table', 'grid', or 'map' +let mapInitialized = false; +let map = null; +let markers = []; + +// Function to initialize the application +document.addEventListener('DOMContentLoaded', function() { + // Load saved preferences + loadPreferences(); + + // Set up event listeners + setupEventListeners(); + + //Add channel type filter + setupFilters(); + + // Set active view based on URL or default + setActiveView(); + + // Initial data load + fetchData(); +}); + +// Function to load user preferences from localStorage +function loadPreferences() { + // Load refresh interval + const savedInterval = parseInt(localStorage.getItem('seedlinkRefreshInterval')); + if (savedInterval && savedInterval >= 10) { + document.getElementById('refresh-interval').value = savedInterval; + currentRefreshInterval = savedInterval; + } + + // Load dark mode preference + const darkModeEnabled = localStorage.getItem('seedlink-dark-mode') === 'true'; + if (darkModeEnabled) { + document.body.classList.add('dark-mode'); + updateThemeToggleButton(true); + } + + // Load view mode + const savedViewMode = localStorage.getItem('seedlink-view-mode'); + if (savedViewMode) { + viewMode = savedViewMode; + } +} + +// Function to set up all event listeners +function setupEventListeners() { + // View toggle buttons + document.querySelectorAll('.view-toggle a').forEach(link => { + link.addEventListener('click', function(e) { + e.preventDefault(); + const view = this.getAttribute('data-view'); + switchView(view); + }); + }); + + // Refresh controls + document.getElementById('apply-refresh').addEventListener('click', function() { + const interval = parseInt(document.getElementById('refresh-interval').value); + if (interval && interval >= 10) { + updateRefreshInterval(interval); + } + }); + + document.getElementById('refresh-now').addEventListener('click', function() { + if (refreshTimer) { + clearTimeout(refreshTimer); + } + fetchData(); + }); + + // Theme toggle + document.getElementById('theme-toggle').addEventListener('click', toggleDarkMode); + + // Export CSV + document.getElementById('export-csv').addEventListener('click', exportToCsv); + + // Stats toggle + document.getElementById('stats-toggle').addEventListener('click', toggleStats); + document.getElementById('close-stats').addEventListener('click', function() { + document.getElementById('stats-container').style.display = 'none'; + }); + + // Filter inputs + document.getElementById('network-filter').addEventListener('change', applyFilters); + document.getElementById('status-filter').addEventListener('change', applyFilters); + document.getElementById('search-input').addEventListener('input', debounce(applyFilters, 300)); + + // Sort headers in table view + document.querySelectorAll('th[data-sort]').forEach(header => { + header.addEventListener('click', function() { + sortTable(this.getAttribute('data-sort')); + }); + }); + + // Handle visibility changes (tab switching) + document.addEventListener('visibilitychange', function() { + if (document.visibilityState === 'visible') { + // If data is stale (not refreshed in over half the interval) + const timeSinceLastRefresh = Date.now() - lastRefreshTime; + if (timeSinceLastRefresh > (currentRefreshInterval * 500)) { + if (refreshTimer) { + clearTimeout(refreshTimer); + } + fetchData(); + } + } + }); +} + +// Function to set active view based on URL or saved preference +function setActiveView() { + // Extract view from URL if present + const urlParams = new URLSearchParams(window.location.search); + const urlView = urlParams.get('view'); + + if (urlView && ['table', 'grid', 'map'].includes(urlView)) { + viewMode = urlView; + } + + // Set active class on the appropriate link + document.querySelectorAll('.view-toggle a').forEach(link => { + if (link.getAttribute('data-view') === viewMode) { + link.classList.add('active'); + } else { + link.classList.remove('active'); + } + }); + + // Show the appropriate view container + document.querySelectorAll('.view-container').forEach(container => { + if (container.id === `${viewMode}-view`) { + container.style.display = 'block'; + } else { + container.style.display = 'none'; + } + }); + + // Initialize map if needed + if (viewMode === 'map' && !mapInitialized && typeof L !== 'undefined') { + initializeMap(); + } + + // Save preference + localStorage.setItem('seedlink-view-mode', viewMode); +} + +// Function to switch between views +function switchView(view) { + viewMode = view; + + // Update URL without reloading the page + const url = new URL(window.location); + url.searchParams.set('view', view); + window.history.pushState({}, '', url); + + setActiveView(); + + // Refresh data display for the new view + renderData(); +} + +// Function to toggle dark mode +function toggleDarkMode() { + document.body.classList.toggle('dark-mode'); + const isDarkMode = document.body.classList.contains('dark-mode'); + localStorage.setItem('seedlink-dark-mode', isDarkMode ? 'true' : 'false'); + + updateThemeToggleButton(isDarkMode); + + // Update map tiles if map is initialized + if (mapInitialized && map) { + updateMapTiles(isDarkMode); + } +} + +// Function to update theme toggle button appearance +function updateThemeToggleButton(isDarkMode) { + const themeToggle = document.getElementById('theme-toggle'); + if (isDarkMode) { + themeToggle.innerHTML = ` + + + + + + + + + + + + Light Mode + `; + } else { + themeToggle.innerHTML = ` + + + + Dark Mode + `; + } +} + +// Function to initialize the map view +// 1. Enhanced map initialization function +// Updated initializeMap function with markerCluster safety checks +function initializeMap() { + // Check if Leaflet is available + if (typeof L === 'undefined') { + console.error('Leaflet library not loaded'); + document.getElementById('map-container').innerHTML = '
Map library not available. Please check your internet connection.
'; + return; + } + + // Initialize markerCluster as null so it's defined even if the plugin isn't available + markerCluster = null; + + // Read map settings from the page data if available + const mapSettings = window.mapSettings || { + center: { lat: 20, lon: 0, zoom: 2 }, + defaultLayer: 'street', + enableClustering: true, + showFullscreenControl: true, + showLayerControl: true, + showLocateControl: true + }; + + // Create map instance + map = L.map('map-container', { + center: [mapSettings.center.lat, mapSettings.center.lon], + zoom: mapSettings.center.zoom, + zoomControl: false // We'll add this separately for better positioning + }); + + // Define available base layers + const baseLayers = { + 'Street': L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { + attribution: '© OpenStreetMap contributors', + maxZoom: 19 + }), + 'Satellite': L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', { + attribution: 'Imagery © Esri © ArcGIS', + maxZoom: 19 + }), + 'Terrain': L.tileLayer('https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png', { + attribution: 'Map data: © OpenStreetMap contributors, SRTM | Map style: © OpenTopoMap', + maxZoom: 17 + }), + 'Dark': L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + attribution: '© OpenStreetMap contributors © CARTO', + subdomains: 'abcd', + maxZoom: 19 + }) + }; + + // Add appropriate layer based on settings or dark mode + const isDarkMode = document.body.classList.contains('dark-mode'); + let defaultLayer = isDarkMode ? 'Dark' : (mapSettings.defaultLayer || 'Street'); + defaultLayer = defaultLayer.charAt(0).toUpperCase() + defaultLayer.slice(1); // Capitalize + + // Add the default layer to the map + if (baseLayers[defaultLayer]) { + baseLayers[defaultLayer].addTo(map); + } else { + // Fallback to the first available layer + baseLayers[Object.keys(baseLayers)[0]].addTo(map); + } + + // Add layer control if enabled + if (mapSettings.showLayerControl !== false) { + L.control.layers(baseLayers, {}, { + position: 'topright', + collapsed: true + }).addTo(map); + } + + // Add zoom control in a better position + L.control.zoom({ + position: 'bottomright' + }).addTo(map); + + // Add scale control + L.control.scale().addTo(map); + + // Add fullscreen control if enabled and the plugin is available + if (mapSettings.showFullscreenControl !== false && typeof L.Control.Fullscreen !== 'undefined') { + L.control.fullscreen({ + position: 'topright', + title: { + 'false': 'View Fullscreen', + 'true': 'Exit Fullscreen' + } + }).addTo(map); + } + + // Add locate control if enabled and the plugin is available + if (mapSettings.showLocateControl !== false && typeof L.Control.Locate !== 'undefined') { + L.control.locate({ + position: 'bottomright', + icon: 'fa fa-location-arrow', + strings: { + title: 'Show my location' + }, + locateOptions: { + enableHighAccuracy: true, + maxZoom: 10 + } + }).addTo(map); + } + + // Initialize marker cluster group if enabled and the plugin is available + if (mapSettings.enableClustering !== false && typeof L.MarkerClusterGroup !== 'undefined') { + try { + markerCluster = L.markerClusterGroup({ + disableClusteringAtZoom: 10, + spiderfyOnMaxZoom: true, + showCoverageOnHover: false, + iconCreateFunction: function(cluster) { + const count = cluster.getChildCount(); + + // Determine color based on worst status in the cluster + let worstStatus = 'good'; + const markers = cluster.getAllChildMarkers(); + + for (const marker of markers) { + const status = marker.options.status || 'good'; + + // Simple ordering of statuses from least to most severe + const statusOrder = { + 'good': 0, + 'delayed': 1, + 'long-delayed': 2, + 'very-delayed': 3, + 'hour-delayed': 4, + 'warning': 5, + 'critical': 6, + 'day-delayed': 7, + 'multi-day': 8, + 'three-day': 9, + 'four-day': 10, + 'unavailable': 11 + }; + + if ((statusOrder[status] || 0) > (statusOrder[worstStatus] || 0)) { + worstStatus = status; + } + } + + // Get color for worst status + const color = getStatusColor(worstStatus); + + const textColor = getBestTextColor(color); + + return L.divIcon({ + html: `
${count}
`, + className: 'marker-cluster', + iconSize: new L.Point(40, 40) + }); + } + }); + + map.addLayer(markerCluster); + console.log("Marker clustering initialized successfully"); + } catch (e) { + console.error("Error initializing marker clustering:", e); + markerCluster = null; // Reset to null if initialization failed + } + } else { + console.log("Marker clustering is disabled or not available"); + } + + // Mark as initialized + mapInitialized = true; + + // Update markers if we already have data + if (stationsData.length > 0) { + updateMapMarkers(); + } +} + +// Helper function to determine best text color (black or white) based on background color +function getBestTextColor(bgColor) { + // Handle named colors + if (bgColor.toLowerCase() === '#ffffff') return '#000000'; + if (bgColor.toLowerCase() === '#000000') return '#ffffff'; + + // Convert hex to rgb + let hex = bgColor.replace('#', ''); + let r, g, b; + + if (hex.length === 3) { + r = parseInt(hex.charAt(0) + hex.charAt(0), 16); + g = parseInt(hex.charAt(1) + hex.charAt(1), 16); + b = parseInt(hex.charAt(2) + hex.charAt(2), 16); + } else { + r = parseInt(hex.substring(0, 2), 16); + g = parseInt(hex.substring(2, 4), 16); + b = parseInt(hex.substring(4, 6), 16); + } + + // Calculate luminance + const luminance = (0.299 * r + 0.587 * g + 0.114 * b) / 255; + + // Return white for dark backgrounds, black for light backgrounds + return luminance > 0.5 ? '#000000' : '#ffffff'; +} + +function updateMapTiles(isDarkMode) { + if (!map) return; + + // Get available layers from map's layer control + const baseLayers = {}; + map.eachLayer(layer => { + if (layer instanceof L.TileLayer) { + map.removeLayer(layer); + } + }); + + // Add the default layer based on theme + if (isDarkMode) { + if (window.mapSettings && window.mapSettings.darkModeLayer) { + // Use configured dark mode layer + const darkLayer = window.mapSettings.darkModeLayer.toLowerCase(); + if (darkLayer === 'satellite') { + L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', { + attribution: 'Imagery © Esri © ArcGIS', + maxZoom: 19 + }).addTo(map); + } else { + L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + attribution: '© OpenStreetMap contributors © CARTO', + subdomains: 'abcd', + maxZoom: 19 + }).addTo(map); + } + } else { + // Default dark theme + L.tileLayer('https://{s}.basemaps.cartocdn.com/dark_all/{z}/{x}/{y}{r}.png', { + attribution: '© OpenStreetMap contributors © CARTO', + subdomains: 'abcd', + maxZoom: 19 + }).addTo(map); + } + } else { + if (window.mapSettings && window.mapSettings.lightModeLayer) { + // Use configured light mode layer + const lightLayer = window.mapSettings.lightModeLayer.toLowerCase(); + if (lightLayer === 'satellite') { + L.tileLayer('https://server.arcgisonline.com/ArcGIS/rest/services/World_Imagery/MapServer/tile/{z}/{y}/{x}', { + attribution: 'Imagery © Esri © ArcGIS', + maxZoom: 19 + }).addTo(map); + } else if (lightLayer === 'terrain') { + L.tileLayer('https://{s}.tile.opentopomap.org/{z}/{x}/{y}.png', { + attribution: 'Map data: © OpenStreetMap contributors, SRTM | Map style: © OpenTopoMap', + maxZoom: 17 + }).addTo(map); + } else { + L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { + attribution: '© OpenStreetMap contributors', + maxZoom: 19 + }).addTo(map); + } + } else { + // Default light theme + L.tileLayer('https://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', { + attribution: '© OpenStreetMap contributors', + maxZoom: 19 + }).addTo(map); + } + } +} + +function updateMapMarkers() { + if (!mapInitialized || !map) return; + + // Clear existing markers + if (markerCluster) { + try { + markerCluster.clearLayers(); + } catch (e) { + console.error("Error clearing marker cluster:", e); + // Fall back to standard markers if cluster fails + markerCluster = null; + markers.forEach(marker => { + try { map.removeLayer(marker); } catch(e) {} + }); + } + } else { + markers.forEach(marker => { + try { map.removeLayer(marker); } catch(e) {} + }); + } + markers = []; + + // Variables to track bounds for auto-zooming + let validCoordinates = false; + const bounds = L.latLngBounds(); + + // Add markers for each station + stationsData.forEach(station => { + // Skip stations without coordinates + if (!station.coordinates || !station.coordinates.lat || !station.coordinates.lon) { + console.log(`Station ${station.network}_${station.station} has no coordinates`); + return; + } + + validCoordinates = true; + const lat = station.coordinates.lat; + const lon = station.coordinates.lon; + + // Add to bounds for auto-zooming + bounds.extend([lat, lon]); + + // Create marker with appropriate color based on status + const markerColor = getStatusColor(station.status); + + // Create marker with a badge if it's using LH channels + const isLH = station.primaryChannelType === 'LH'; + const markerIcon = L.divIcon({ + html: isLH + ? `
+ L +
` + : `
`, + className: 'station-marker', + iconSize: [18, 18], + iconAnchor: [9, 9] + }); + + const marker = L.marker([lat, lon], { + icon: markerIcon, + title: `${station.network}_${station.station}`, + status: station.status // Store status for cluster coloring + }); + + // Create channel group summary for popup + const channelGroupsText = station.channels.reduce((groups, channel) => { + const type = channel.channelType || 'other'; + if (!groups[type]) groups[type] = 0; + groups[type]++; + return groups; + }, {}); + + const channelGroupsHTML = Object.entries(channelGroupsText) + .map(([type, count]) => `${type}: ${count}`) + .join(', '); + + // Add popup with station info + marker.bindPopup(` + ${station.network}_${station.station}
+ Primary channel type: ${station.primaryChannelType || 'N/A'}
+ Status: ${formatStatus(station.status, station.primaryChannelType)}
+ Latency: ${formatLatency(station.latency)}
+ Channels: ${channelGroupsHTML}
+ Coordinates: ${lat.toFixed(4)}, ${lon.toFixed(4)} + ${station.coordinates.elevation ? '
Elevation: ' + station.coordinates.elevation.toFixed(1) + ' m' : ''} +
View Details + `); + + // Add to the cluster group or directly to the map + try { + if (markerCluster) { + markerCluster.addLayer(marker); + } else { + marker.addTo(map); + } + markers.push(marker); + } catch (e) { + console.error("Error adding marker:", e); + // If cluster fails, try adding directly to map + try { + marker.addTo(map); + markers.push(marker); + } catch (e2) { + console.error("Also failed to add directly to map:", e2); + } + } + }); + + // Auto-zoom to fit all markers if we have valid coordinates + if (validCoordinates && markers.length > 0) { + // Don't zoom too close if there's only one station + if (markers.length === 1) { + map.setView(bounds.getCenter(), 8); + } else { + try { + map.fitBounds(bounds, { + padding: [30, 30], + maxZoom: 12 + }); + } catch (e) { + console.error("Error fitting bounds:", e); + // Fallback to a default view + map.setView([20, 0], 2); + } + } + } else if (!validCoordinates && markers.length === 0) { + // Show message if no stations have coordinates + const noCoordinatesMsg = document.createElement('div'); + noCoordinatesMsg.className = 'error-message'; + noCoordinatesMsg.style.position = 'absolute'; + noCoordinatesMsg.style.top = '50%'; + noCoordinatesMsg.style.left = '50%'; + noCoordinatesMsg.style.transform = 'translate(-50%, -50%)'; + noCoordinatesMsg.style.background = 'rgba(255, 255, 255, 0.9)'; + noCoordinatesMsg.style.padding = '15px'; + noCoordinatesMsg.style.borderRadius = '8px'; + noCoordinatesMsg.style.zIndex = 1000; + noCoordinatesMsg.innerHTML = ` +

No station coordinates available

+

Make sure your FDSNWS service is properly configured and accessible.

+ `; + document.getElementById('map-container').appendChild(noCoordinatesMsg); + } +} + +// Custom legend for the map +function addMapLegend() { + if (!mapInitialized || !map) return; + + // Remove existing legend if any + const existingLegend = document.querySelector('.map-legend'); + if (existingLegend) { + existingLegend.remove(); + } + + // Create a custom legend + const legend = L.control({position: 'bottomright'}); + + legend.onAdd = function() { + const div = L.DomUtil.create('div', 'map-legend'); + div.innerHTML = ` +

Station Status

+
Good (≤ 1 min)
+
> 1 min
+
> 10 min
+
> 30 min
+
> 1 hour
+
> 2 hours
+
> 6 hours
+
> 1 day
+ `; + + // Add custom styles to the legend + const style = document.createElement('style'); + style.textContent = ` + .map-legend { + padding: 10px; + background: white; + background: rgba(255, 255, 255, 0.9); + border-radius: 5px; + line-height: 1.8; + color: #333; + box-shadow: 0 0 15px rgba(0, 0, 0, 0.2); + } + .dark-mode .map-legend { + background: rgba(31, 41, 55, 0.9); + color: #f9fafb; + } + .map-legend h4 { + margin: 0 0 5px; + font-size: 14px; + font-weight: 600; + } + .map-legend div { + display: flex; + align-items: center; + font-size: 12px; + margin-bottom: 3px; + } + .map-legend span { + display: inline-block; + width: 16px; + height: 16px; + margin-right: 8px; + border-radius: 50%; + border: 1px solid rgba(0, 0, 0, 0.2); + } + .dark-mode .map-legend span { + border-color: rgba(255, 255, 255, 0.2); + } + `; + + div.appendChild(style); + return div; + }; + + legend.addTo(map); +} +function setupFilters() { + // Add a channel type filter to the filters area + const filtersArea = document.querySelector('.filters'); + + if (!filtersArea) return; + + // Check if the filter already exists + if (!document.getElementById('channel-filter')) { + const channelFilterGroup = document.createElement('div'); + channelFilterGroup.className = 'filter-group'; + channelFilterGroup.innerHTML = ` + + + `; + + filtersArea.appendChild(channelFilterGroup); + + // Add event listener + document.getElementById('channel-filter').addEventListener('change', applyFilters); + } +} + +// Enhanced station filters for the map +function setupMapFilters() { + if (!mapInitialized || !map) return; + + const mapFilters = L.control({position: 'topleft'}); + + mapFilters.onAdd = function() { + const div = L.DomUtil.create('div', 'map-filters'); + div.innerHTML = ` +
+ + +
+
+ + +
+ `; + + // Add styles + const style = document.createElement('style'); + style.textContent = ` + .map-filters { + padding: 10px; + background: rgba(255, 255, 255, 0.9); + border-radius: 5px; + box-shadow: 0 0 15px rgba(0, 0, 0, 0.2); + width: 200px; + } + .dark-mode .map-filters { + background: rgba(31, 41, 55, 0.9); + color: #f9fafb; + } + .map-filters .filter-select { + margin-bottom: 8px; + } + .map-filters label { + display: block; + margin-bottom: 3px; + font-weight: 500; + font-size: 12px; + } + .map-filters select { + width: 100%; + padding: 4px 8px; + border-radius: 4px; + border: 1px solid #ddd; + font-size: 12px; + } + .dark-mode .map-filters select { + background: #374151; + color: #f9fafb; + border-color: #4b5563; + } + `; + + div.appendChild(style); + + // Prevent map interactions when using the filters + L.DomEvent.disableClickPropagation(div); + L.DomEvent.disableScrollPropagation(div); + + // Setup network filter options + const networkFilter = div.querySelector('#map-network-filter'); + const networks = [...new Set(stationsData.map(station => station.network))].sort(); + + networks.forEach(network => { + const option = document.createElement('option'); + option.value = network; + option.textContent = network; + networkFilter.appendChild(option); + }); + + // Add event listeners + networkFilter.addEventListener('change', function() { + const selectedNetwork = this.value; + updateMapMarkersFilter(selectedNetwork, div.querySelector('#map-status-filter').value); + }); + + div.querySelector('#map-status-filter').addEventListener('change', function() { + const selectedStatus = this.value; + updateMapMarkersFilter(networkFilter.value, selectedStatus); + }); + + return div; + }; + + mapFilters.addTo(map); +} + +// Filter map markers based on selected criteria +function updateMapMarkersFilter(network, status) { + if (!mapInitialized || !map) return; + + // Clear existing markers + markers.forEach(marker => map.removeLayer(marker)); + markers = []; + + // Apply filters to data + let filteredData = stationsData; + + if (network) { + filteredData = filteredData.filter(station => station.network === network); + } + + if (status) { + filteredData = filteredData.filter(station => { + if (status === 'good') { + return station.status === 'good'; + } else if (status === 'warning') { + return ['delayed', 'long-delayed', 'very-delayed', 'hour-delayed', 'warning'].includes(station.status); + } else if (status === 'critical') { + return ['critical', 'day-delayed', 'multi-day', 'three-day', 'four-day'].includes(station.status); + } else if (status === 'unavailable') { + return station.status === 'unavailable'; + } + return true; + }); + } + + // Add filtered markers + const bounds = L.latLngBounds(); + let validCoordinates = false; + + filteredData.forEach(station => { + // Skip stations without coordinates + if (!station.coordinates || !station.coordinates.lat || !station.coordinates.lon) return; + + validCoordinates = true; + const lat = station.coordinates.lat; + const lon = station.coordinates.lon; + + // Add to bounds for auto-zooming + bounds.extend([lat, lon]); + + // Create marker with appropriate color + const markerColor = getStatusColor(station.status); + const markerIcon = L.divIcon({ + html: `
`, + className: 'station-marker', + iconSize: [18, 18], + iconAnchor: [9, 9] + }); + + const marker = L.marker([lat, lon], { + icon: markerIcon, + title: `${station.network}_${station.station}` + }); + + // Add popup with station info + marker.bindPopup(` + ${station.network}_${station.station}
+ Status: ${formatStatus(station.status)}
+ Latency: ${formatLatency(station.latency)}
+ Coordinates: ${lat.toFixed(4)}, ${lon.toFixed(4)} + ${station.coordinates.elevation ? '
Elevation: ' + station.coordinates.elevation.toFixed(1) + ' m' : ''} +
View Details + `); + + marker.addTo(map); + markers.push(marker); + }); + + // Auto-zoom to fit all markers if we have valid coordinates + if (validCoordinates && markers.length > 0) { + // Don't zoom too close if there's only one station + if (markers.length === 1) { + map.setView(bounds.getCenter(), 8); + } else { + map.fitBounds(bounds, { + padding: [30, 30], + maxZoom: 12 + }); + } + } +} + +// Enhanced version of the setActiveView function to handle map initialization +function setActiveView() { + // Extract view from URL if present + const urlParams = new URLSearchParams(window.location.search); + const urlView = urlParams.get('view'); + + if (urlView && ['table', 'grid', 'map'].includes(urlView)) { + viewMode = urlView; + } + + // Set active class on the appropriate link + document.querySelectorAll('.view-toggle a').forEach(link => { + if (link.getAttribute('data-view') === viewMode) { + link.classList.add('active'); + } else { + link.classList.remove('active'); + } + }); + + // Show the appropriate view container + document.querySelectorAll('.view-container').forEach(container => { + if (container.id === `${viewMode}-view`) { + container.style.display = 'block'; + } else { + container.style.display = 'none'; + } + }); + + // Initialize map if needed + if (viewMode === 'map') { + if (!mapInitialized && typeof L !== 'undefined') { + initializeMap(); + // Add map-specific UI elements after initialization + setTimeout(() => { + addMapLegend(); + setupMapFilters(); + }, 100); + } else if (mapInitialized) { + // If map is already initialized, ensure it's up to date + map.invalidateSize(); + updateMapMarkers(); + } + } + + // Save preference + localStorage.setItem('seedlink-view-mode', viewMode); +} + +// Function to fetch data from the server +function fetchData() { + if (isRefreshing) return; + + isRefreshing = true; + lastRefreshTime = Date.now(); + + // Show loading state + //document.getElementById('loading').style.display = 'flex'; + document.getElementById('error-message').style.display = 'none'; + document.getElementById('refresh-status').textContent = 'Refreshing...'; + + // Use cache-busting to prevent stale data + const timestamp = Date.now(); + + // Fetch the JSON data + fetch(`stations_data.json?_=${timestamp}`, { + cache: 'no-cache', + headers: { + 'Cache-Control': 'no-cache, no-store, must-revalidate', + 'Pragma': 'no-cache', + 'Expires': '0' + } + }) + .then(response => { + if (!response.ok) { + throw new Error(`Server returned ${response.status}: ${response.statusText}`); + } + return response.json(); + }) + .then(data => { + stationsData = data; + + // Update the filter dropdowns + updateFilters(); + + // Render the data based on current view + renderData(); + + // Update timestamp + const updateTime = new Date().toUTCString(); + document.getElementById('update-time').textContent = updateTime; + document.getElementById('refresh-status').textContent = 'Last refresh: ' + new Date().toLocaleTimeString(); + + // Hide loading state + document.getElementById('loading').style.display = 'none'; + document.getElementById('error-message').style.display = 'none'; + + // Setup next refresh + setupNextRefresh(); + }) + .catch(error => { + console.error('Error fetching data:', error); + document.getElementById('error-message').textContent = `Error loading data: ${error.message}`; + document.getElementById('error-message').style.display = 'block'; + document.getElementById('loading').style.display = 'none'; + + // Still setup next refresh to try again + setupNextRefresh(); + }) + .finally(() => { + isRefreshing = false; + }); +} + +// Function to update the filter dropdowns based on available data +function updateFilters() { + // Get unique networks + const networks = [...new Set(stationsData.map(station => station.network))].sort(); + + // Update network filter + const networkFilter = document.getElementById('network-filter'); + const selectedNetwork = networkFilter.value; + + // Clear existing options except the first one + while (networkFilter.options.length > 1) { + networkFilter.remove(1); + } + + // Add new options + networks.forEach(network => { + const option = document.createElement('option'); + option.value = network; + option.textContent = network; + networkFilter.appendChild(option); + }); + + // Restore selection if possible + if (selectedNetwork && networks.includes(selectedNetwork)) { + networkFilter.value = selectedNetwork; + } +} + +// Function to apply filters to the data +function applyFilters() { + const networkFilter = document.getElementById('network-filter').value; + const statusFilter = document.getElementById('status-filter').value; + const channelFilter = document.getElementById('channel-filter')?.value || ''; + const searchText = document.getElementById('search-input').value.toLowerCase(); + + // Apply filters to data + let filteredData = stationsData; + + if (networkFilter) { + filteredData = filteredData.filter(station => station.network === networkFilter); + } + + if (statusFilter) { + filteredData = filteredData.filter(station => { + if (statusFilter === 'good') { + return station.status === 'good'; + } else if (statusFilter === 'warning') { + return ['delayed', 'long-delayed', 'very-delayed', 'hour-delayed', 'warning'].includes(station.status); + } else if (statusFilter === 'critical') { + return ['critical', 'day-delayed', 'multi-day', 'three-day', 'four-day'].includes(station.status); + } else if (statusFilter === 'unavailable') { + return station.status === 'unavailable'; + } + return true; + }); + } + + if (channelFilter) { + filteredData = filteredData.filter(station => + station.primaryChannelType === channelFilter || + (channelFilter === 'other' && !['HH', 'BH', 'LH', 'SH', 'EH'].includes(station.primaryChannelType)) + ); + } + + if (searchText) { + filteredData = filteredData.filter(station => + `${station.network}_${station.station}`.toLowerCase().includes(searchText) + ); + } + + // Render filtered data + renderData(filteredData); +} + +// Function to render the data in the current view +function renderData(data = stationsData) { + // Default to all data if not specified + const displayData = data || stationsData; + + // Render based on current view mode + if (viewMode === 'table') { + renderTableView(displayData); + } else if (viewMode === 'grid') { + renderGridView(displayData); + } else if (viewMode === 'map') { + // Update map markers if map is initialized + if (mapInitialized) { + updateMapMarkers(); + } + } +} + +// Function to render table view +function renderTableView(data) { + const tableBody = document.getElementById('table-body'); + tableBody.innerHTML = ''; + + data.forEach(station => { + const row = document.createElement('tr'); + + // Network-Station cell + const nameCell = document.createElement('td'); + nameCell.innerHTML = `${station.network} ${station.station}`; + // Add a badge for primary channel type + if (station.primaryChannelType) { + nameCell.innerHTML += ` ${station.primaryChannelType}`; + } + row.appendChild(nameCell); + + // Status cell + const statusCell = document.createElement('td'); + statusCell.classList.add(`station-${station.status}`); + statusCell.textContent = formatStatus(station.status, station.primaryChannelType); + row.appendChild(statusCell); + + // Latency cell + const latencyCell = document.createElement('td'); + latencyCell.textContent = formatLatency(station.latency); + latencyCell.style.backgroundColor = getStatusColor(station.status); + row.appendChild(latencyCell); + + // Channels cell + const channelsCell = document.createElement('td'); + + // Create channel type summary + const channelGroups = {}; + station.channels.forEach(channel => { + const type = channel.channelType || 'other'; + if (!channelGroups[type]) { + channelGroups[type] = 0; + } + channelGroups[type]++; + }); + + // Format channel groups + const groupsHTML = Object.keys(channelGroups).map(type => + `${type}: ${channelGroups[type]}` + ).join(' '); + + channelsCell.innerHTML = `
${station.channels.length} total
${groupsHTML}
`; + row.appendChild(channelsCell); + + // Last updated cell + const lastDataCell = document.createElement('td'); + if (station.channels.length > 0 && station.channels[0].last_data) { + const lastDataTime = new Date(station.channels[0].last_data); + lastDataCell.textContent = lastDataTime.toLocaleString(); + } else { + lastDataCell.textContent = 'Unknown'; + } + row.appendChild(lastDataCell); + + tableBody.appendChild(row); + }); + + // Add CSS for channel badges if not already added + if (!document.getElementById('channel-badges-css')) { + const style = document.createElement('style'); + style.id = 'channel-badges-css'; + style.textContent = ` + .channel-badge { + background-color: var(--bg-tertiary); + color: var(--text-secondary); + font-size: 10px; + padding: 2px 4px; + border-radius: 4px; + margin-left: 4px; + font-weight: 500; + vertical-align: middle; + } + + .channel-groups { + display: flex; + flex-wrap: wrap; + gap: 4px; + margin-top: 2px; + font-size: 11px; + } + + .channel-group { + background-color: var(--bg-tertiary); + border-radius: 3px; + padding: 1px 4px; + color: var(--text-secondary); + } + `; + document.head.appendChild(style); + } +} + +// Function to render grid view +function renderGridView(data) { + const gridContainer = document.getElementById('grid-container'); + gridContainer.innerHTML = ''; + + // Group stations by network + const networks = {}; + data.forEach(station => { + if (!networks[station.network]) { + networks[station.network] = []; + } + networks[station.network].push(station); + }); + + // Sort networks by name + const sortedNetworks = Object.keys(networks).sort(); + + for (const network of sortedNetworks) { + const stations = networks[network]; + + // Create a row for the network + const networkRow = document.createElement('div'); + networkRow.className = 'grid-row'; + + // Add network label as a separate cell + const networkLabel = document.createElement('div'); + networkLabel.className = 'network-label'; + networkLabel.textContent = network; + networkRow.appendChild(networkLabel); + + // Create a container for the stations + const stationsContainer = document.createElement('div'); + stationsContainer.className = 'stations-container'; + + // Create a row for the stations + const stationsRow = document.createElement('div'); + stationsRow.className = 'stations-row'; + + // Sort stations by name + stations.sort((a, b) => a.station.localeCompare(b.station)); + + // Add stations + for (const station of stations) { + const stationCell = document.createElement('a'); + stationCell.className = `grid-cell station-${station.status}`; + stationCell.href = `${station.network}_${station.station}.html`; + stationCell.textContent = station.station; + stationCell.setAttribute('data-tooltip', `${station.network}_${station.station}: ${formatLatency(station.latency)}`); + stationCell.setAttribute('data-network', station.network); + stationCell.setAttribute('data-station', station.station); + stationCell.setAttribute('data-status', station.status); + stationCell.setAttribute('data-latency', formatLatency(station.latency)); + stationsRow.appendChild(stationCell); + } + + stationsContainer.appendChild(stationsRow); + networkRow.appendChild(stationsContainer); + gridContainer.appendChild(networkRow); + } +} + +// Function to format latency for display +function formatLatency(seconds) { + if (seconds === null || seconds === undefined) return 'n/a'; + + if (seconds > 86400) return `${(seconds/86400).toFixed(1)} d`; + if (seconds > 3600) return `${(seconds/3600).toFixed(1)} h`; + if (seconds > 60) return `${(seconds/60).toFixed(1)} m`; + return `${seconds.toFixed(1)} s`; +} + +// Function to format status for display +function formatStatus(status, channelType) { + const isLH = channelType === 'LH'; + + // Add (LH) tag to status labels for LH channels + const lhSuffix = isLH ? ' (LH)' : ''; + + if (status === 'good') return 'Good' + lhSuffix; + if (status === 'delayed') return 'Delayed (>1m)' + lhSuffix; + if (status === 'long-delayed') return 'Delayed (>10m)' + lhSuffix; + if (status === 'very-delayed') return 'Delayed (>30m)' + lhSuffix; + if (status === 'hour-delayed') return 'Delayed (>1h)' + lhSuffix; + if (status === 'warning') return 'Warning (>2h)' + lhSuffix; + if (status === 'critical') return 'Critical (>6h)' + lhSuffix; + if (status === 'day-delayed') return 'Delayed (>1d)' + lhSuffix; + if (status === 'multi-day') return 'Delayed (>2d)' + lhSuffix; + if (status === 'three-day') return 'Delayed (>3d)' + lhSuffix; + if (status === 'four-day') return 'Delayed (>4d)' + lhSuffix; + if (status === 'unavailable') return 'Unavailable (>5d)' + lhSuffix; + return status.charAt(0).toUpperCase() + status.slice(1) + lhSuffix; +} + +// Function to get color for a status +function getStatusColor(status) { + const colors = { + 'good': '#FFFFFF', + 'delayed': '#EBD6FF', + 'long-delayed': '#9470BB', + 'very-delayed': '#3399FF', + 'hour-delayed': '#00FF00', + 'warning': '#FFFF00', + 'critical': '#FF9966', + 'day-delayed': '#FF3333', + 'multi-day': '#FFB3B3', + 'three-day': '#CCCCCC', + 'four-day': '#999999', + 'unavailable': '#666666' + }; + + return colors[status] || '#FFFFFF'; +} + +// Function to update the refresh interval +function updateRefreshInterval(seconds) { + // Update current refresh interval + currentRefreshInterval = seconds; + + // Clear existing timer + if (refreshTimer) { + clearTimeout(refreshTimer); + } + + // Store the preference in localStorage + localStorage.setItem('seedlinkRefreshInterval', seconds); + + // Set up next refresh + setupNextRefresh(); + + return seconds; +} + +// Function to set up the next refresh +function setupNextRefresh() { + // Calculate time until next refresh + const timeUntilNextRefresh = Math.max(1000, (currentRefreshInterval * 1000)); + + // Clear existing timer + if (refreshTimer) { + clearTimeout(refreshTimer); + } + + // Set new timer + refreshTimer = setTimeout(fetchData, timeUntilNextRefresh); + + // Update the display + document.getElementById('refresh-interval').value = currentRefreshInterval; + document.getElementById('next-refresh').textContent = currentRefreshInterval; + + // Start countdown + startRefreshCountdown(); +} + +// Function to start countdown to next refresh +function startRefreshCountdown() { + const countdownElement = document.getElementById('next-refresh'); + const currentValue = parseInt(countdownElement.textContent); + + // Clear any existing interval + if (window.countdownInterval) { + clearInterval(window.countdownInterval); + } + + // Set new interval + window.countdownInterval = setInterval(() => { + const newValue = parseInt(countdownElement.textContent) - 1; + if (newValue > 0) { + countdownElement.textContent = newValue; + } else { + clearInterval(window.countdownInterval); + } + }, 1000); +} + +// Function to export data to CSV +function exportToCsv() { + // Create CSV content + let csvContent = 'Network,Station,Status,Latency,Channels,Last Updated\\n'; + + stationsData.forEach(station => { + const lastUpdate = station.channels.length > 0 && station.channels[0].last_data + ? new Date(station.channels[0].last_data).toISOString() + : 'Unknown'; + + csvContent += `${station.network},${station.station},${station.status},${station.latency},${station.channels.length},${lastUpdate}\\n`; + }); + + // Create download link + const blob = new Blob([csvContent], { type: 'text/csv;charset=utf-8;' }); + const url = URL.createObjectURL(blob); + const link = document.createElement('a'); + link.setAttribute('href', url); + link.setAttribute('download', `seedlink-status-${new Date().toISOString().split('T')[0]}.csv`); + link.style.display = 'none'; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + + // Clean up + setTimeout(() => { + URL.revokeObjectURL(url); + }, 100); +} + +// Function to toggle stats display +function toggleStats() { + const statsContainer = document.getElementById('stats-container'); + + if (statsContainer.style.display === 'block') { + statsContainer.style.display = 'none'; + return; + } + + // Show the stats container + statsContainer.style.display = 'block'; + + // Calculate statistics + calculateStats(); +} + +// Function to calculate and display statistics +function calculateStats() { + const networks = {}; + + let totalStations = stationsData.length; + let goodStations = 0; + let warningStations = 0; + let criticalStations = 0; + let unavailableStations = 0; + + // Group by network and count statuses + stationsData.forEach(station => { + // Create network entry if it doesn't exist + if (!networks[station.network]) { + networks[station.network] = { + total: 0, + good: 0, + warning: 0, + critical: 0, + unavailable: 0 + }; + } + + // Count by network + networks[station.network].total++; + + // Count by status + if (station.status === 'good') { + networks[station.network].good++; + goodStations++; + } else if (['delayed', 'long-delayed', 'very-delayed', 'hour-delayed', 'warning'].includes(station.status)) { + networks[station.network].warning++; + warningStations++; + } else if (['critical', 'day-delayed', 'multi-day', 'three-day', 'four-day'].includes(station.status)) { + networks[station.network].critical++; + criticalStations++; + } else if (station.status === 'unavailable') { + networks[station.network].unavailable++; + unavailableStations++; + } + }); + + // Update status counter + const statusCounter = document.getElementById('status-counter'); + statusCounter.innerHTML = ` +
+ ${totalStations - unavailableStations} active of ${totalStations} total stations +
+
+
+ ${goodStations} good +
+
+ ${warningStations} warning +
+
+ ${criticalStations} critical +
+
+ ${unavailableStations} unavailable +
+
+ `; + + // Update network stats + const networkStats = document.getElementById('network-stats'); + networkStats.innerHTML = ''; + + Object.keys(networks).sort().forEach(network => { + const stats = networks[network]; + const activePercentage = Math.round(((stats.total - stats.unavailable) / stats.total) * 100); + + const networkStat = document.createElement('div'); + networkStat.className = 'network-stat'; + + // Create the name and count display + const nameContainer = document.createElement('div'); + nameContainer.className = 'network-name'; + nameContainer.innerHTML = ` + ${network} + ${stats.total - stats.unavailable}/${stats.total} + `; + + // Create the progress bar + const progressContainer = document.createElement('div'); + progressContainer.className = 'progress-bar'; + + const progressBar = document.createElement('div'); + progressBar.className = 'progress'; + progressBar.style.width = `${activePercentage}%`; + + progressContainer.appendChild(progressBar); + + networkStat.appendChild(nameContainer); + networkStat.appendChild(progressContainer); + + networkStats.appendChild(networkStat); + }); +} + +function sortTable(column) { + // Get current sort direction from the header + const header = document.querySelector(`th[data-sort="${column}"]`); + const currentDirection = header.getAttribute('data-direction') || 'asc'; + const newDirection = currentDirection === 'asc' ? 'desc' : 'asc'; + + // Update all headers to remove sort indicators + document.querySelectorAll('th[data-sort]').forEach(th => { + th.setAttribute('data-direction', ''); + th.classList.remove('sort-asc', 'sort-desc'); + }); + + // Set direction on current header + header.setAttribute('data-direction', newDirection); + header.classList.add(`sort-${newDirection}`); + + // Sort data based on column + stationsData.sort((a, b) => { + let valueA, valueB; + + if (column === 'name') { + valueA = `${a.network}_${a.station}`; + valueB = `${b.network}_${b.station}`; + } else if (column === 'status') { + valueA = a.status; + valueB = b.status; + } else if (column === 'latency') { + valueA = a.latency; + valueB = b.latency; + } else if (column === 'channels') { + valueA = a.channels.length; + valueB = b.channels.length; + } else if (column === 'updated') { + valueA = a.channels.length > 0 ? new Date(a.channels[0].last_data || 0).getTime() : 0; + valueB = b.channels.length > 0 ? new Date(b.channels[0].last_data || 0).getTime() : 0; + } + + // Handle string comparison + if (typeof valueA === 'string') { + if (newDirection === 'asc') { + return valueA.localeCompare(valueB); + } else { + return valueB.localeCompare(valueA); + } + } + // Handle number comparison + else { + if (newDirection === 'asc') { + return valueA - valueB; + } else { + return valueB - valueA; + } + } + }); + + // Re-render the table with sorted data + renderTableView(stationsData); +} + +// Utility function for debouncing +function debounce(func, wait) { + let timeout; + return function(...args) { + const context = this; + clearTimeout(timeout); + timeout = setTimeout(() => func.apply(context, args), wait); + }; +} +""" + + try: + js_path = os.path.join(config['setup']['wwwdir'], 'script.js') + with open(js_path, 'w') as f: + f.write(js_content) + print(f"JavaScript file generated at {js_path}") + return js_path + except Exception as e: + print(f"Error generating JavaScript file: {str(e)}") + return None + + +def generate_html_base(config, title, active_view): + """Generate base HTML structure for all pages""" + + # Determine if map plugins should be included + include_map_plugins = 'enable_map' in config['setup'] and config['setup']['enable_map'] + + # Start with the first part using proper variable interpolation + html = f""" + + + + + {config['setup']['title']} - {title} + + + + + + + + + + +""" + + # Include additional map plugins if map is enabled + if include_map_plugins: + html += """ + + + + + + + + + + +""" + + # Continue with the second part, but using f-string instead of .format() + html += f""" + + +
+
+

{config['setup']['title']}

+ +
+ +

Real-time seismic station monitoring dashboard

+ +
+
+ + + +
+ +
+
+ + + seconds +
+ + +
+
Last refresh: -
+
Next in {int(config['setup']['refresh'])} seconds
+
+
+
+""" + + return html + +def generate_main_html(config, status): + """Generate the main index.html with all three views""" + + html = generate_html_base(config, "Dashboard", "table") + + map_settings = get_map_settings(config) + + # Add filters + html += f""" + +
+
+ + +
+
+ + +
+
+ +
+
+ +
+
+
Station Statistics
+ +
+
+
+ +
+
+ +
+
+ Loading station data... +
+ +
+ + +
+
+ + + + + + + + + + + + + +
StationStatusLatencyChannelsLast Updated
+
+
+ + + + + + + """ + + # Add legend + html += """ +
+
+
+ Good (≤ 1 min) +
+
+
+ > 1 min +
+
+
+ > 10 min +
+
+
+ > 30 min +
+
+
+ > 1 hour +
+
+
+ > 2 hours +
+
+
+ > 6 hours +
+
+
+ > 1 day +
+
+
+ > 2 days +
+
+
+ > 3 days +
+
+
+ > 4 days +
+
+
+ > 5 days +
+
+ """ + + # Add footer and close tags + html += f""" + +
+ + + + + + + + +""" + + try: + html_path = os.path.join(config['setup']['wwwdir'], 'index.html') + with open(html_path, 'w') as f: + f.write(html) + print(f"Main HTML file generated at {html_path}") + return html_path + except Exception as e: + print(f"Error generating main HTML file: {str(e)}") + return None + + +def generate_station_html(net_sta, config, status): + """Generate individual station HTML page""" + + try: + network, station = net_sta.split("_") + except: + print(f"Invalid station identifier: {net_sta}") + return None + + html = generate_html_base(config, f"Station {station}", "table") + + # Add station info + html += f""" +
+

{network}_{station}

+ """ + + # Add station information if available + try: + if 'info' in config.station[net_sta]: + html += f'
{config.station[net_sta]["info"]}
' + except: + pass + + html += """ +
+ """ + + # Add custom text if available + try: + if 'text' in config.station[net_sta]: + html += f'

{config.station[net_sta]["text"]}

' + except: + pass + + # Station details table + html += """ +
+ + + + + + + + + + + + + """ + + now = datetime.utcnow() + netsta2 = net_sta.replace("_", ".") + streams = [x for x in list(status.keys()) if x.find(netsta2) == 0] + streams.sort() + + for label in streams: + tim1 = status[label].last_data + tim2 = status[label].last_feed + + lat1, lat2, lat3 = now-tim1, now-tim2, tim2-tim1 + col1, col2, col3 = getColor(lat1), getColor(lat2), getColor(lat3) + + if lat1 == lat2: + lat2 = lat3 = None + + if label[-2] == '.' and label[-1] in "DE": + label = label[:-2] + + n, s, loc, c = label.split(".") + c = ("%s.%s" % (loc, c)).strip(".") + + time1_str = tim1.strftime("%Y/%m/%d %H:%M:%S") if tim1 else "n/a" + time2_str = tim2.strftime("%Y/%m/%d %H:%M:%S") if tim2 else "n/a" + + html += f""" + + + + + + + + + """ + + html += """ + +
ChannelLast SampleData LatencyLast ReceivedFeed LatencyDiff
{s} {c}{time1_str}{formatLatency(lat1)}{time2_str}{formatLatency(lat2)}{formatLatency(lat3)}
+
+ """ + + # Legend + html += """ +
+
+
+ Good (≤ 1 min) +
+
+
+ > 1 min +
+
+
+ > 10 min +
+
+
+ > 30 min +
+
+
+ > 1 hour +
+
+
+ > 2 hours +
+
+ """ + + # Links + html += '\n' + + # Add footer and close tags + html += f""" + + + + + + +""" + + try: + html_path = os.path.join(config['setup']['wwwdir'], f'{net_sta}.html') + with open(html_path, 'w') as f: + f.write(html) + print(f"Station HTML file generated at {html_path}") + return html_path + except Exception as e: + print(f"Error generating station HTML file: {str(e)}") + return None + + +def generate_json_data(status): + """Generate a JSON file with station data for JavaScript use""" + try: + json_data = status.to_json() + json_path = os.path.join(config['setup']['wwwdir'], 'stations_data.json') + with open(json_path, 'w') as f: + f.write(json_data) + print(f"JSON data file generated at {json_path}") + return json_path + except Exception as e: + print(f"Error generating JSON data file: {str(e)}") + return None + + +def generate_all_files(config, status): + """Generate all the static files needed for the web interface""" + + # Create the directory if it doesn't exist + try: + os.makedirs(config['setup']['wwwdir'], exist_ok=True) + except Exception as e: + print(f"Error creating directory: {str(e)}") + return False + + # Generate files + css_path = generate_css_file(config) + js_path = generate_js_file(config) + json_path = generate_json_data(status) + main_html = generate_main_html(config, status) + + # Generate station pages - Get UNIQUE station identifiers + unique_stations = set() + for k in status: + net_sta = f"{status[k].net}_{status[k].sta}" + unique_stations.add(net_sta) + + # Now generate each station page exactly once + station_htmls = [] + for net_sta in unique_stations: + html_path = generate_station_html(net_sta, config, status) + station_htmls.append(html_path is not None) + + # Return success only if all files were generated + all_stations_success = len(station_htmls) > 0 and all(station_htmls) + + # Log success or failure + if all_stations_success: + print(f"Successfully generated {len(station_htmls)} station HTML files") + else: + print(f"ERROR: Failed to generate some station HTML files") + + # Return success if all files were generated + return all([css_path, js_path, json_path, main_html, all_stations_success]) + + +def read_ini(): + """Read configuration files""" + global config, ini_setup, ini_stations + print("reading setup config from '%s'" % ini_setup) + if not os.path.isfile(ini_setup): + print("[error] setup config '%s' does not exist" % ini_setup, file=sys.stderr) + usage(exitcode=2) + + config = MyConfig(ini_setup) + print("reading station config from '%s'" % ini_stations) + if not os.path.isfile(ini_stations): + print("[error] station config '%s' does not exist" % ini_stations, file=sys.stderr) + usage(exitcode=2) + config.station = MyConfig(ini_stations) + + +def SIGINT_handler(signum, frame): + """Handle interruption signals""" + global status + print("received signal #%d => will write status file and exit" % signum) + sys.exit(0) + + +def main(): + """Main function to run the program""" + global config, status, verbose, generate_only, ini_setup, ini_stations + + # Parse command line arguments + try: + opts, args = getopt(sys.argv[1:], "c:s:t:hvg", ["help", "generate"]) + except GetoptError: + print("\nUnknown option in "+str(sys.argv[1:])+" - EXIT.", file=sys.stderr) + usage(exitcode=2) + + for flag, arg in opts: + if flag == "-c": ini_setup = arg + elif flag == "-s": ini_stations = arg + elif flag == "-t": refresh = float(arg) # XXX not yet used + elif flag in ("-h", "--help"): usage(exitcode=0) + elif flag == "-v": verbose = 1 + elif flag in ("-g", "--generate"): generate_only = True + + # Set up signal handlers + signal.signal(signal.SIGHUP, SIGINT_handler) + signal.signal(signal.SIGINT, SIGINT_handler) + signal.signal(signal.SIGQUIT, SIGINT_handler) + signal.signal(signal.SIGTERM, SIGINT_handler) + + # Read configuration + read_ini() + + # Load station coordinates from the FDSN web service + try: + load_station_coordinates(config) + except Exception as e: + print(f"Warning: Failed to load station coordinates: {str(e)}") + + # Prepare station information + s = config.station + net_sta = ["%s_%s" % (s[k]['net'], s[k]['sta']) for k in s] + s_arg = ','.join(net_sta) + + # Set server from config or use default + if 'server' in config['setup']: + server = config['setup']['server'] + else: + server = "localhost" + + # Initialize status dictionary + status = StatusDict() + + print("generating output to '%s'" % config['setup']['wwwdir']) + + if generate_only: + # Generate template files without fetching data + print("Generating template files only...") + + # Create dummy data for template rendering + for net_sta_item in net_sta: + net, sta = net_sta_item.split('_') + + d = Status() + d.net = net + d.sta = sta + d.loc = "" + d.cha = "HHZ" + d.typ = "D" + d.last_data = datetime.utcnow() + d.last_feed = datetime.utcnow() + + sec = "%s.%s.%s.%s.%c" % (d.net, d.sta, d.loc, d.cha, d.typ) + status[sec] = d + + # Generate all files + if generate_all_files(config, status): + print("Template files generated successfully.") + else: + print("Error generating template files.") + + sys.exit(0) + + # Get initial data + print("getting initial time windows from SeedLink server '%s'" % server) + status.fromSlinkTool(server, stations=net_sta) + if verbose: + status.write(sys.stderr) + + # Generate initial files + generate_all_files(config, status) + + # Set up the next time to generate files + nextTimeGenerateHTML = time() + + print("setting up connection to SeedLink server '%s'" % server) + + # Connect to the SeedLink server and start receiving data + input = seiscomp.slclient.Input(server, [(s[k]['net'], s[k]['sta'], "", "???") for k in s]) + for rec in input: + id = '.'.join([rec.net, rec.sta, rec.loc, rec.cha, rec.rectype]) + + try: + status[id].last_data = rec.end_time + status[id].last_feed = datetime.utcnow() + except: + continue + + if time() > nextTimeGenerateHTML: + generate_all_files(config, status) + nextTimeGenerateHTML = time() + int(config['setup']['refresh']) + + +if __name__ == "__main__": + main() diff --git a/bin/waitlock b/bin/waitlock index 97bcc1c..0cd6945 100755 Binary files a/bin/waitlock and b/bin/waitlock differ diff --git a/etc/defaults/ql2sc.cfg b/etc/defaults/ql2sc.cfg index 3555155..7fe810f 100644 --- a/etc/defaults/ql2sc.cfg +++ b/etc/defaults/ql2sc.cfg @@ -8,15 +8,97 @@ connection.subscriptions = EVENT # Number of seconds to fetch missed updates on start up. backLog = 1800 -# Number of public objects to cache. -cacheSize = 5000 - # Maximum number of notifiers to batch in one message. If set to 0 no size # limit is enforced. Make sure to not hit the overall message size limited of # 16MiB which is enforced by the messaging system. batchSize = 2000 # If event synchronisation is enabled and an incoming origin is not yet -# associated with an event on the target machine then this timeout defines -# the maximum number of seconds to wait for an association. +# associated with an event on the target machine, then this timeout defines the +# maximum number of seconds to wait for an association. eventAssociationTimeout = 10 + +# Registration of the host profiles defining the connection parameters to the +# QuakeLink hosts. +#hosts = local + +# URL of the QuakeLink service, the scheme 'qls' enables SSL. +# Format: [ql[s]://][user:pwd@][host][:port]. +# If set to an empty string the application will run without any QuakeLink +# connection attempt. +#host.local.url = ql://localhost:18010 + +# Enable/disable GZip (GNU zip) compression. +#host.local.gzip = false + +# Request native data instead of XML format. Native data export may be disabled +# on some hosts. +#host.local.native = true + +# Try to update the event attributes of the target event with the attributes of +# the source event which includes event type and event certainty. It will not +# import events but tries to find the associated event of the input preferred +# origin at the target system and will update the event attributes via +# journaling. +#host.local.syncEventAttributes = true + +# Synchronize the preferred origin and preferred magnitude selection if +# different from the imported selection. ql2sc will wait for the event +# association of an imported origin and check if the preferred origin or +# preferred magnitude is different from the imported Quakelink event. If so it +# will send a journal to force selection of the preferred origin and selection +# of the preferred magnitude type. These are the same operations as within +# scolv to fix an origin and a particular magnitude type. +#host.local.syncPreferred = false + +# Delays the synchronization of event attributes in seconds if set to a value +# greater than zero. +#host.local.syncEventDelay = 0 + +# Request server to send keep alive message every 30s to prevent connection +# reset by firewalls on long idle periods. If activated the client will reset +# the connection if no alive message is received within 60s. +#host.local.keepAlive = true + +# Server-side SQL like WHERE clause to filter the result set. The actual +# available parameters depend on the QuakeLink server version. Use 'telnet host +# port' followed by 'help select' to connect to a QuakeLink server an request +# available parameters. +# clause := condition[ AND|OR [(]clause[)]] condition := +# MAG|DEPTH|LAT|LON|PHASES|DIST(lat,lon) op {float} | DIST(lat,lon) IN +# [{float}, {float}] | UPDATED|OTIME op time | +# AGENCY|AUTHOR|STATUS|ESTATUS|EMODE|TYPE|CTYPE|DTYPE|REGION|MAG_T op 'string' +# | MAG|DEPTH|LAT|LON|PHASES|OTIME|UPDATED IS [NOT] NULL FELT|NOT FELT op := +# =|!=|>|>=|<|<=|eq|gt|ge|lt|ge time := %Y,%m,%d[,%H,%M,%S[,%f]] +#host.local.filter = "" + +# Map datamodel class names to messaging groups. For unmapped objects the +# mapping of their parent objects is evaluated recursively. Objects may be +# excluded by mapping them to 'NULL'. +#host.local.routingTable = Pick:IMPORT_GROUP, Amplitude:IMPORT_GROUP, FocalMechanism:EVENT, Origin:EVENT + +# Include picks +#host.local.data.picks = true + +# Include amplitudes +#host.local.data.amplitudes = true + +# Include origin arrivals +#host.local.data.arrivals = true + +# Include origin station magnitudes +#host.local.data.staMags = true + +# Include moment tensor station contributions and phase settings +#host.local.data.staMts = true + +# Include only preferred origin and magnitude information +#host.local.data.preferred = false + +# Defines a blacklist of publicID prefixes that are not allowed for processing. +# Separate items by comma. +#processing.blacklist.publicIDs = "" + +# Defines a whitelist of publicID prefixes that are allowed for processing. +# Separate items by comma. +#processing.whitelist.publicIDs = "" diff --git a/etc/defaults/scalert.cfg b/etc/defaults/scalert.cfg index feb6d69..618763b 100644 --- a/etc/defaults/scalert.cfg +++ b/etc/defaults/scalert.cfg @@ -1,3 +1,3 @@ # Defines a list of message groups to subscribe to. The default is usually # given by the application and does not need to be changed. -connection.subscriptions = EVENT, LOCATION, MAGNITUDE +connection.subscriptions = EVENT, LOCATION, MAGNITUDE, AMPLITUDE, PICK diff --git a/etc/defaults/scqcv.cfg b/etc/defaults/scqcv.cfg index 08cce3c..a94eb0b 100644 --- a/etc/defaults/scqcv.cfg +++ b/etc/defaults/scqcv.cfg @@ -264,9 +264,6 @@ range.above = 0, 999 # A color defined by the color definitions below. range.above.color = green -# Possible values: enableStream, disableStream -range.above.action = enableStream - # range.below = -99, -11 @@ -277,9 +274,6 @@ range.below.count = 0 # A color defined by the color definitions below. range.below.color = grey -# Possible values: enableStream, disableStream -range.below.action = disableStream - # range.timing = -200, -100 diff --git a/etc/defaults/scrttv.cfg b/etc/defaults/scrttv.cfg index bf0d3b7..81d202a 100644 --- a/etc/defaults/scrttv.cfg +++ b/etc/defaults/scrttv.cfg @@ -18,7 +18,7 @@ resortAutomatically = true showPicks = true # Defines the filters to be used when filtering is activated. -filters = "RMHP(2)>>ITAPER(5)>>BW(3, 0.5, 8.0)","RMHP(2)>>ITAPER(5)>>BW_HP(3, 3)" +filters = "BW 0.5 - 8.0 Hz;RMHP(2)>>ITAPER(5)>>BW(3, 0.5, 8.0)","HP 3.0 Hz;RMHP(2)>>ITAPER(5)>>BW_HP(3, 3)" # Activates the first filter of the configured filter list after startup. This # is equivalent to pressing 'f'. diff --git a/etc/descriptions/fdsnws.xml b/etc/descriptions/fdsnws.xml index 308c94b..9193034 100644 --- a/etc/descriptions/fdsnws.xml +++ b/etc/descriptions/fdsnws.xml @@ -72,10 +72,11 @@ Restrict end time of requests to current time - realtimeGap - seconds. Negative values allowed. Used in fdsnws-dataselect. - WARNING: If this value is unset and a realtime recordsource - (e.g. slink) is used, requests may block if end time in future - is requested. + seconds. Negative values are allowed. Used in fdsnws-dataselect. + + WARNING: If this value is unset and a real-time RecordStream + (e.g. slink) is used, requests may block if end times in the + future are requested. @@ -90,7 +91,7 @@ Set the number of bytes to buffer for each chunk of waveform data served to the client. The lower the buffer the higher the overhead of Python Twisted. The higher the buffer the higher the memory - usage per request. 100kB seems to be a good trade-off. + usage per request. 100 kB seems to be a good trade-off. @@ -103,12 +104,12 @@ - Path to access log file. If unset no access log is created. + Path to access log file. If unset, no access log is created. - Path to request log file. If unset no request log is created. + Path to request log file. If unset, no request log is created. @@ -121,7 +122,10 @@ List of domain names Cross-Origin Resource Sharing (CORS) request may originate from. A value of '*' allows any web page to embed your service. An empty value will switch of CORS - requests entirely. An example of multiple domains might be: + requests entirely. + + Example of multiple domains: + 'https://test.domain.de, https://production.domain.de'. @@ -154,7 +158,7 @@ If enabled, event comment elements are no longer accessible. - + If set, the event service will only return events having a preferred origin with a matching evaluationMode property. @@ -168,7 +172,7 @@ List of disabled event types - + List of enabled event formats. If unspecified, all supported formats are enabled. @@ -195,12 +199,12 @@ standard FDSNWS extension served under fdsnws/ext/availability. - + Path to station inventory filter file. - + Path to dataselect inventory filter file. @@ -288,6 +292,38 @@ + + + + Enable JWT extension. + + + + + List of issuer URLs. + + + + + List of valid audience. + + + + + List of allowed algorithms. + + + + + Minimum time to wait between requesting updated keys from a key server. + + + + + Maximum time to cache received keys. + + + diff --git a/etc/descriptions/global.xml b/etc/descriptions/global.xml index 45347a8..1ea84b3 100644 --- a/etc/descriptions/global.xml +++ b/etc/descriptions/global.xml @@ -34,7 +34,7 @@ comma. Add ${plugins} to consider all previously read values. - + Path to the cities XML file. If undefined, the data is read from "@CONFIGDIR@/cities.xml" or @@ -115,7 +115,7 @@ are written to log files per modules as "@CONFIGDIR@/log/[module].log". - + Set the logging level between 1 and 4 where 1=ERROR, 2=WARNING, 3=INFO and 4=DEBUG. @@ -269,14 +269,14 @@ is established. Override these values only if you know what you are doing. - + Load the inventory database from a given XML file if set. This overrides the inventory definitions loaded from the database backend. - + Load the configuration database from a given XML file if set. This overrides the configuration definitions loaded from the @@ -344,7 +344,7 @@ - + Path to crash handler script. @@ -453,9 +453,17 @@ e.g. "signalBegin". This can be overridden per station in its bindings. - + + + The name of the travel-time interface to use. The list + can be extended by plugins. + + + The name of the travel-time interface (velocity) model + to use. + @@ -483,10 +491,25 @@ + + + Control if the amplitude calculation should be + regionalized or not. The regions and their configuration + are taken from the corresponding magnitude profiles. If + regionalization is activate, then e.g. modules without + origin information will not be able to compute an + amplitude as the origin is required to determine the + effective settings. + + If amplitudes for this particular type shall be computed + regardless of any defined regions, set this parameter to + false. + + - If enabled then also stations with unused (disabled) - arrivals are considerd for amplitude and implicitly + If enabled, then also stations with unused (disabled) + arrivals are considered for amplitude and implicitly magnitude computations, e.g. by scamp, scmag and scolv. @@ -538,7 +561,7 @@ interpolated but not extrapolated. - + Path to a geofeature file, e.g. in BNA or GeoJSON format, with one or more polygons defining geographic regions. @@ -565,40 +588,72 @@ Enable the region or not. - + The minimum distance required to compute a magnitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to compute the distance. - The default value is implementation specific. + The default value depends on magnitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDist = 500km - + The maximum distance allowed to compute a magnitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to compute the distance. The default - value is implementation specific. + value depends on magnitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + maxDist = 500km - + The minimum depth required to compute a magnitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to retrieve the depth. The default - value is implementation specific. + value depends on magnitude type. + + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDepth = 500km - + The maximum depth allowed to compute a magnitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to retrieve the depth. The default - value is implementation specific. + value depends on magnitude type. + + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + maxDepth = 500km @@ -862,12 +917,12 @@ @@ -883,7 +938,7 @@ - @@ -1043,7 +1108,7 @@ - + An amplitude profile configures global parameters for a particular amplitude type. The available amplitude types @@ -1083,14 +1148,38 @@ will be used instead. - + Define the minimum SNR to be reached to compute the amplitudes. This value is amplitude type specific and has no global default value. - + + + Define the minimum period of the measured amplitude. If + the period is below this value, the amplitude will not be emitted. + This value is specific to amplitude type and has no global + default value. A value lower or equal than 0 will disable + this check. + + Caution: If a value is set but the amplitude does not + provide the period, no amplitude is sent. + + + + + Define the maximum period of the measured amplitude. If + the period is above this value, the amplitude will not be emitted. + This value is specific to amplitude type and has no global + default value. A value lower or equal than 0 will disable + this check. + + Caution: If a value is set but the amplitude does not + provide the period, no amplitude is sent. + + + Override the default time (relative to the trigger time) of the begin of the noise window used to compute @@ -1099,7 +1188,7 @@ should only be changed if you know what you are doing. - + Override the default time (relative to the trigger time) of the end of the noise window used to compute @@ -1108,7 +1197,7 @@ should only be changed if you know what you are doing. - + Override the default time (relative to the trigger time) of the begin of the signal window used to compute @@ -1117,7 +1206,7 @@ changed if you know what you are doing. - + Override the default time (relative to the trigger time) of the end of the signal window used to compute @@ -1126,69 +1215,86 @@ changed if you know what you are doing. - + The minimum distance required to compute an amplitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to compute - the distance. The default value is implementation - specific. + the distance. The default value depends on + amplitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDist = 500km - + The maximum distance allowed to compute an amplitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to compute - the distance. The default value is implementation - specific. + the distance. The default value depends on + amplitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + maxDist = 500km - + The minimum depth required to compute an amplitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to - retrieve the depth. The default value is implementation - specific. + retrieve the depth. The default value depends on + amplitude type. + + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDepth = 500km - + The maximum depth allowed to compute an amplitude. This settings has no effect with e.g. scautopick as there is no information about the source of the event to - retrieve the depth. The default value is implementation - specific. - - - - - Control if the amplitude calculation should be - regionalized or not. The regions and their configuration - are taken from the corresponding magnitude profiles. If - regionalization is activate, then e.g. modules without - origin information will not be able to compute an - amplitude as the origin is required to determine the - effective settings. + retrieve the depth. The default value depends on + amplitude type. - If amplitudes for this particular type shall be computed - regardless of any defined regions, set this parameter to - false. + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + maxDepth = 500km Several parameters if usage of full responses is enabled. - + Define the length of the taper at either side of the waveform. The length will be added to the data request: start - taper and end + taper. - + After data are converted in to the frequency domain that minimum frequency defines the end of the left-side @@ -1197,7 +1303,7 @@ A value of 0 or lower disables that taper. - + After data are converted in to the frequency domain that maximum frequency defines the start of the right-side @@ -1211,11 +1317,13 @@ - Define magnitude parameters independent of amplitude-type profiles. - For magnitude correction parameters, e.g., network of station - corrections, create a magnitude type profile. + Define the calibration parameters and constraints for computing + magnitudes from measured amplitudes including static corrections. + The parameters are independent of amplitude-type profiles. + + Standard magnitude types supported in SeisComP: Md,Mjma,ML,MLc,MLh,MLr,MLv,MN,mb,mB,Mwp,Ms_20,Ms(BB). - + A magnitude profile configures global parameters for a particular magnitude type. The available magnitude types @@ -1243,6 +1351,106 @@ Example: "0.0, regionA: -0.1, regionB: 0.2". + + + The minimum distance in degree required to compute a + magnitude. This settings has no effect with e.g. + scautopick as there is no information about the + source of the event to compute the distance. + The default value depends on magnitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDist = 500km + + + + + The maximum distance in degree allowed to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to compute the distance. The default + value depends on magnitude type. + + Although the default unit is 'deg', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + + + + The minimum depth required to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to retrieve the depth. The default + value depends on magnitude type. + + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + minDepth = 500km + + + + + The maximum depth allowed to compute a magnitude. + This settings has no effect with e.g. scautopick + as there is no information about the source of + the event to retrieve the depth. The default + value depends on magnitude type. + + Although the default unit is 'km', values can be + given in any SI distance unit such km, m or cm + by simply appending the unit to the value. + + Example: + + maxDepth = 500km + + + + + The minimum SNR required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the SNR is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. If this value is set then it + overrides the regionalized setting. + + + + + The minimum period required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. If this value is set, then it + overrides the regionalized setting. + + Caution: If a value is set but the amplitude does not + provide the period, no magnitude is computed. + + + + + The maximum period allowed for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is above this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. If this value is set, then it + overrides the regionalized setting. + + Caution: If a value is set but the amplitude does not + provide the period, no magnitude is computed. + + diff --git a/etc/descriptions/global_fixedhypocenter.xml b/etc/descriptions/global_fixedhypocenter.xml index b010bce..8056aa7 100644 --- a/etc/descriptions/global_fixedhypocenter.xml +++ b/etc/descriptions/global_fixedhypocenter.xml @@ -17,6 +17,30 @@ documentation for the required interface name. + + + The fixed latitude to use. If not set then this + value is read from the input origin. + + + + + The fixed longitude to use. If not set then this + value is read from the input origin. + + + + + The fixed depth to use. If not set then this + value is read from the input origin. + + + + + The fixed time to use. If not set then this + value is read from the input origin. + + Whether to use pick time uncertainties rather than a fixed diff --git a/etc/descriptions/global_gui.xml b/etc/descriptions/global_gui.xml index 9459d34..64fa148 100644 --- a/etc/descriptions/global_gui.xml +++ b/etc/descriptions/global_gui.xml @@ -73,9 +73,10 @@ /path/to/maps/%l/%c/%r.png. - + - Projection of the map tiles configured in 'map.location'. + Projection of the map tiles configured in "map.location". + Note: Additional projections may be available by plugins. @@ -114,6 +115,18 @@ layer is "cities". + + + Parameters specifically related to maps loaded with + mercator projection. + + + + Only support discrete zoom levels. The parameter impacts + the effect of "map.zoom.sensitivity". + + + @@ -131,7 +144,7 @@ Configuration for the fep layer showing the polygons - of FEP (FLinn-Engdahl-Polygon) files on maps if they + of FEP (Flinn-Engdahl-Polygon) files on maps if they exist in @DATADIR@/fep or @CONFIGDIR@/fep. @@ -144,9 +157,9 @@ Maximum number of cities to be rendered. If - cityPopulationWeight is less or equal than 0 then - all cities are rendered ordered by population count, - highest first. + "scheme.map.cityPopulationWeight" is less + or equal than 0 then all cities are rendered ordered + by population count, highest first. To show the N most populated places in the visible map region, set "scheme.map.cityPopulationWeight" to 0 @@ -163,21 +176,22 @@ - This group defines various options for color, pen, brush, font, etc. for SeisComP - graphical user interfaces. There are various conventions to - define colors, fonts and gradients. + This group defines various options controlling the look and feel + of graphical modules. The options include symbols, units, + precision, color, pen, brush, font, etc. There are various + conventions to define colors, fonts and gradients. - Colors are defined in HTML - convention, e.g. as rgb values, hexadecimal numbers or color - keyword names defined by W3C. If rgb or rgba is used, it must - be quoted because the comma is handled as list separator by - the configuration. - Examples: "rgb(255,0,0)", FF00FF40, green. + Colors are defined in HTML + convention, e.g. as rgb values, hexadecimal numbers or color + keyword names defined by W3C. If rgb or rgba is used, it must + be quoted because the comma is handled as list separator by + the configuration. + Examples: "rgb(0,128,0)", 00800040, green. - Gradients are configured as lists of tuples where each tuple - is colon separated in the form value:color. Value is either - int or double and color is again a color definition. - Example: 0:yellow,10:red + Gradients are configured as lists of tuples where each tuple + is colon separated in the form value:color. Value is either + int or double and color is again a color definition. + Example: 0:yellow,10:red Show menu bar. @@ -187,39 +201,71 @@ Show status bar. - + - Set position if tab bar. An unset value lets the application - decide where to place the tab bar. This option might not be - supported by all applications. Valid positions are: off, north, - south, east, west + The position of tab bar of GUI applications like scolv. + An unset value lets the application decide where to place + the tab bar. This option might not be supported by all + applications. Valid positions are: off, north, south, + east, west. + + + + + + Show hypocentral instead of epicentral distances. - The station symbol size (e.g. in scmv). + The station symbol size, e.g., in scmv. - The origin symbol minimum size. The formula to compute the - size of the origin symbol is: 4.9*(M-1.2). + The minimum symbol size of origins. The size is scaled + with magnitude according to + "scheme.mag.originSymbolScaleMag". + + + + + The minimum magnitude to be represented by an origin + symbol. The symbol size of origins with this or lower + magnitude is configured by + "scheme.mag.originSymbolMinSize". For origins + with larger magnitudes the symbol sizes are scaled according + to "scheme.mag.originSymbolScaleMag". + + Starting from the minimum magnitude legends explaining + magnitudes will show eight symbols in steps of one + magnitude. + + + + + The factor for scaling the symbol size of origins with + some magnitude as: + "scheme.mag.originSymbolScaleMag" * (magnitude - "scheme.mag.originSymbolMinMag"). + + The color of origins symbols is controlled by the parameters + "scheme.colors.originSymbol.*". - Apply antialiasing to map layers. - This improves the visual quality but decreases performance. + Apply antialiasing to map layers. + This improves the visual quality but decreases performance. - Apply bilinear filtering to maps. - The bilinear filter improves the visual quality but decreases - performance slightly. It is only used for static map images. - Not while dragging. + Apply bilinear filtering to maps. + The bilinear filter improves the visual quality but decreases + performance slightly. It is only used for static map images. + Not while dragging. @@ -229,54 +275,66 @@ - Show cities defined in "citiesXML" on maps + Show cities defined in "citiesXML" on maps - Controls at which zoom level a city will be visible. The - following formula is used: - screen_width (km) * weight >= population + Controls at which zoom level a city will be visible. The + following formula is used: + screen_width (km) * weight >= population - Show custom layers on maps + Show custom layers on maps - Show map legends initially. Some applications provide - controls to toggle the visibility in addition to this - option. + Show map legends initially. Some applications provide + controls to toggle the visibility in addition to this + option. - + - SeisComP ships with the rectangular projection built-in. - Other projections may be provided through plugins. + The projection for showing maps. SeisComP ships with + Rectangular and Mercator. + Other projections may be provided through plugins. + + The visible latitude range of maps shown with Mercator + is limited to -85 - +85 deg. - Converts map colors from RGB color scheme to BGR. + Converts map colors from RGB color scheme to BGR. - Minimum screen distance to plot a polygon or polyline line segment. + Minimum screen distance to plot a polygon or polyline line segment. + + + + + Maximum allowed zoom level. - Colors can be configured as hexadecimal numbers or color keyword names - defined by W3C. + + Colors can be configured as rgb, rgba. hexadecimal numbers or color keyword names + defined by W3C. If a color is unset, SeisComP applications + attempt to read it from your Desktop theme. + - A general application background color. Can be used to give - each application a different background color. An unset value - lets Qt decide. + A general application background color. Can be used to give + each application a different background color. An unset value + lets Qt decide. @@ -299,25 +357,28 @@ - The color of lines in the map (e.g. lines - connecting the origin and a station). + The color of lines in the map (e.g. lines + connecting the origin and a station). - The color of station outlines in the map. + The color of station outlines in the map. - Defines the pen of the directivity information - (backazimuth and slowness) in the map. + Defines the pen of the directivity information + (backazimuth and slowness) in the map. The color of the pen. - - The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + The width of the pen. @@ -330,8 +391,12 @@ The color of the pen. - - The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + + + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, + DashDotDotLine. + The width of the pen. @@ -339,47 +404,47 @@ - The color of station annotations. + The color of station annotations. - The color of city labels. + The color of city labels. - The color of city outlines. + The color of city outlines. - The color of a capital. + The color of a capital. - The color of a "normal" city. + The color of a "normal" city. - Appearance of map annotations including a text - color (pen), a border color (pen) and a background - color (brush). + Appearance of map annotations including a text + color (pen), a border color (pen) and a background + color (brush). - Text pen for non highlighted annotations. + Text pen for non highlighted annotations. The color of the pen. - + - The style of the pen. Supported values are: - NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -388,15 +453,15 @@ - Border pen for non highlighted annotations. + Border pen for non highlighted annotations. The color of the pen. - + - The style of the pen. Supported values are: - NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -405,12 +470,12 @@ - Background brush for non highlighted annotations. + Background brush for non highlighted annotations. The color of the brush. - + The style of the brush. Supported values are, e.g.: solid, dense1, dense7, horizontal, vertical, @@ -420,15 +485,15 @@ - Text pen for highlighted annotations. + Text pen for highlighted annotations. The color of the pen. - + - The style of the pen. Supported values are: - NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -437,15 +502,15 @@ - Border pen for highlighted annotations. + Border pen for highlighted annotations. The color of the pen. - + - The style of the pen. Supported values are: - NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. + The style of the pen. Supported values are: + NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -454,21 +519,23 @@ - Background brush for highlighted annotations. + Background brush for highlighted annotations. The color of the brush. - + - The style of the brush. Supported values are, e.g.: - solid, dense1, dense7, horizontal, vertical, - cross, bdiag, fdiag, diagcross. + The style of the brush. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. - Font point size of the label text. + + Font point size of the label text. + @@ -478,7 +545,9 @@ The general color of records/traces. - A general trace color of the alternate trace (eg scheli). + + A general trace color of the alternate trace, e.g., scheli. + The general background color of records/traces. @@ -509,7 +578,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -521,16 +590,16 @@ - Defines the brush of the enlcosed area. + Defines the brush of the enclosed area. The color of the pen. - + - The style of the brush. Supported values are, e.g.: - solid, dense1, dense7, horizontal, vertical, - cross, bdiag, fdiag, diagcross. + The style of the brush. Supported values are, e.g.: + solid, dense1, dense7, horizontal, vertical, + cross, bdiag, fdiag, diagcross. @@ -544,7 +613,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -561,7 +630,7 @@ The color of the pen. - + The style of the pen. Supported values are, e.g.: solid, dense1, dense7, horizontal, vertical, @@ -579,7 +648,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -596,7 +665,7 @@ The color of the pen. - + The style of the pen. Supported values are, e.g.: solid, dense1, dense7, horizontal, vertical, @@ -613,7 +682,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -627,7 +696,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -641,7 +710,7 @@ The color of the pen. - + The style of the pen. Supported values are: NoPen, SolidLine, DashLine, DotLine, DashDotLine, DashDotDotLine. @@ -650,26 +719,26 @@ - Defines the background color of records depending on their state. + Defines the background color of records depending on their state. - Additional data which was not requested. + Additional data which was not requested. - Requested data Background color of requested data. + Requested data Background color of requested data. - Data currently loading. + Data currently loading. - Data which was requested but is not available. + Data which was requested but is not available. @@ -714,7 +783,7 @@ The gradient of arrivals residuals. A gradient is defined as a list of tuples separated by colon where the first item is the value and the second is the color. Colors can be given in - rgb notation or hexadecimal. When rgb is used double quotes are needed to + rgb notation or hexadecimal. When rgb is used, double quotes are needed to protect the comma inside the rgb definition, e.g. -8:"rgb(0,0,100)", -4:"rgb(0,0,255)", -3:"rgb(100,100,255)", ... @@ -741,7 +810,7 @@ The color of the station name. - The color of associated stations (e.g. in scmv). + The color of associated stations, e.g., in scmv. The color of triggered stations. @@ -807,7 +876,7 @@ - The color of ground motion amplitudes in scmv. + The color of ground motion amplitudes in scmv. @@ -847,7 +916,7 @@ - The color of the selected zoom area (e.g. manual picker). + The color of the selected zoom area, e.g., in scolv picker. @@ -855,27 +924,32 @@ - The map legend background color. + The map legend background color. - The map legend border color. + The map legend border color. - The map legend text color. + The map legend text color. - The map legend header color. + The map legend header color. + + Parameters controlling the color of origin symbols. The + size is controlled by the parameters + "scheme.mag.originSymbol*". + @@ -883,8 +957,8 @@ - Setting this parameter to true will not interpolate between - the depth steps and the color for a depth <= input is used. + Setting this parameter to true will not interpolate between + the depth steps and the color for a depth <= input is used. @@ -892,7 +966,7 @@ - The origin status colors (e.g. in event list). + The origin status colors, e.g., in event lists. @@ -908,12 +982,12 @@ - Text color of the message string. + Text color of the message string. - Text color of the version string. + Text color of the version string. @@ -922,7 +996,7 @@ - The line width of the marker (e.g. picks of manual picker). + The line width of the marker, e.g., picks of scolv picker). @@ -930,25 +1004,42 @@ - The line width of the records / traces. + The line width of the records/traces. - Configures antialiasing of records / traces. Antialiasing + Configures antialiasing of records/traces. Antialiasing needs more two times to storage space as non antialiasing but it improves visual quality. - Configures optimization of trace polylines. If activated - then lines on the same pixel line or same pixel row - collapse into single lines. + Configures optimization of trace polylines. If activated, + lines on the same pixel line or same pixel row collapse + into single lines. + + + + + Shows numerical values in the vertical axis and the min/mean + values between 0 and 1000 with a unit prefix such as + m (milli) or M (mega). + + Supported prefixes are: + Q (queta = 10**30), R (ronna = 10**27), Y (yotta = 10**24), + Z (zetta = 10**21), E (exa = 10**18), P (peta = 10**15), + T (tera = 10**12), G (giga = 10**9), M (mega = 10**6), + k (kilo = 10**3), + m (milli = 10**-3), µ (micro = 10**-6), n (nano = 10**-9), + p (pico = 10**-12), f (femto = 10**-15), a (atto = 10**-18), + z (zepto = 10**-21), y (yocto = 10**-24), r (ronto = 10**-27), + q (quekto = 10**-30). - + Mode for drawing record borders as box or line on top or bottom. @@ -962,17 +1053,18 @@ - The general base font of an application. This overrides - the default Qt application font. + The general base font of an application. This overrides + the default Qt application font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -982,16 +1074,18 @@ - The smallest available font. If undefined the point size is 2 points smaller than the base font. + The smallest available font. If undefined, the point + size is 2 points smaller than the base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font @@ -1001,16 +1095,18 @@ - The default text font. If undefined the point size is 2 points larger than the base font. + The default text font. If undefined, the point size is 2 + points larger than the base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -1020,16 +1116,18 @@ - The largest text font. If undefined the point size is 6 points larger than the base font. + The largest text font. If undefined, the point size is 6 + points larger than the base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font @@ -1039,16 +1137,18 @@ - Font used to highlight text. If undefined it equals the normal font except for a bold font face. + Font used to highlight text. If undefined, it equals the + normal font except for a bold font face. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font @@ -1058,16 +1158,18 @@ - The largest heading font. If undefined it uses a bold font face and a font size twice as large as the normal font. + The largest heading font. If undefined, it uses a bold + font face and a font size twice as large as the normal font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -1077,16 +1179,18 @@ - The second largest heading font. If undefined it uses a bold font face and a font size twice as large as the base font. + The second largest heading font. If undefined, it uses a + bold font face and a font size twice as large as the base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -1096,11 +1200,13 @@ - The smallest heading font. If undefined it uses a bold font face and a font size 4 points larger than the base font. + The smallest heading font. If undefined, it uses a bold + font face and a font size 4 points larger than the base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. @@ -1115,16 +1221,18 @@ - Font used for city labels. If undefined it equals the base font. + Font used for city labels. If undefined, it equals the + base font. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -1134,16 +1242,19 @@ - Font used for version string in the splash dialog shown at application startup. If undefined it equals the base font with a bold font face and a font size of 12. + Font used for version string in the splash dialog shown + at application startup. If undefined, it equals the base + font with a bold font face and a font size of 12. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font + Defines the point size of the font. @@ -1153,16 +1264,19 @@ - Font used for the message text in the splash dialog shown at application startup. If undefined it equals the base font with a font size of 12. + Font used for the message text in the splash dialog + shown at application startup. If undefined, it equals + the base font with a font size of 12. - Sets the family name of the font. The name is case insensitive and may include a foundry name. + Sets the family name of the font. The name is case + insensitive and may include a foundry name. - Defines the point size of the font. + Defines the point size of the font. @@ -1173,9 +1287,13 @@ + + Display values with the precisions configured as the number + of decimal places. + - The precision of depth values. + The precision of depth values. @@ -1186,51 +1304,56 @@ - The precision of lat/lon values. + The precision of latitude/longintude values. - The precision of magnitude values. + The precision of magnitude values. - The precision of origin times. + The precision of origin times and creation times of + origins and focal mechanisms. - The precision of pick times. + The precision of pick times. - Precision of displayed offset/amp in all trace widgets. + Precision of displayed offset/amplitude in all trace + widgets. - Precision of RMS values. + Precision of RMS values. - Precision of uncertainty values, e.g. latitude errors. + Precision of uncertainty values, e.g., for latitude and + longitude. - Display distances in km? + Display distances in km? - Display times in localtime or UTC (default). + Display times in localtime. If disabled all times are + shown in UTC which is also the time zone of all values + stored in the database. @@ -1293,7 +1416,8 @@ - Defines maximum age of events to load. The value of all parameters are aggregated. + Defines maximum age of events to load. The value of all + parameters are aggregated. @@ -1319,17 +1443,17 @@ - Configuration of special applications modes. + Configuration of special applications modes. - Defines if application interaction is allowed. + Defines if application interaction is allowed. - Defines if the application should be launched in fullscreen - mode hiding title bar, main menu and status bar. + Defines if the application should be launched in fullscreen + mode hiding title bar, main menu and status bar. @@ -1355,14 +1479,11 @@ Control the Events tab, e.g. in scolv, showing the list of loaded events. - + Configure the columns of the event list that are visible initially. The first column containing the origin time is - always visible and cannot be hidden. Possible values are: - - Certainty, Type, M, MType, RMS, AzGap, Phases, Lat, Lon, Depth, DType, - Stat, FM, Origins, Agency, Author, Region, ID. + always visible and cannot be hidden. Custom columns can be added by eventlist.customColumn.* or eventlist.scripts.* parameters. @@ -1415,6 +1536,13 @@ Custom columns showing parameters extracted by scripts e.g. from origins or events. + + + Defines the export script to be called with the selected + event IDs in stdin when the corresponding action is triggered + from the context menu of the list of events. + + Name of custom column profiles to be registered. Comma @@ -1428,7 +1556,7 @@ The scolv documentation provides an example script. - + External script to invoke for each event list entry. The object represented by the list entry is serialized to XML and passed to the script on stdin. If the return @@ -1596,6 +1724,16 @@ Maximum depth + + + Minimum number of phases + + + + + Maximum number of phases + + Minimum magnitude @@ -1608,6 +1746,35 @@ + + + Parameters controlling the appearance of the TimeAgo column. + + + + Update interval in seconds. + + + + + + Mapping of time values in seconds to colors used as + cell backgroud color. E.g., a mapping from green + over yellow (5min), red (10min) to + transparent (30min) can be expressed as + "0:40FF40FF,300:FFFF40FF,600:FF4040FF,1800:FF4040FF,1801:00000000". + + + + + If time ago value falls between two color positions, + the resulting color will be interpolated linearly + between both colors by default. When the discrete + flag is set, the interpolation will be disabled. + + + + @@ -1620,7 +1787,7 @@ - + Configure the columns of the event edit origin table that are visible initially. Origin creation time and @@ -1647,27 +1814,31 @@ - Position of the column. If the configured position is less than 0 or if it - exceeds the total number of columns then the column is appended to the right. + Position of the column. If the configured position + is less than 0 or if it exceeds the total number of + columns, then the column is appended to the right. - Default value to display if the specified origin or event comment id was not found. + Default value to display if the specified origin or + event comment id was not found. - Mapping of comment values to colors used as text color. E.g. "foo:#000,bar:red". + Mapping of comment values to colors used as text + color. E.g. "foo:#000,bar:red". - + Configure the columns of the event edit focal mechanism tab that are visible initially. + Possible values are: Depth, M, Count, Misfit, STDR, AzGap, Stat, DC, CLVD, ISO, S1, D1, R1, S2, D2, R2, Agency, Author @@ -1699,7 +1870,8 @@ - The text size of the time ago label in case of an active alert. + The text size of the time ago label in case of an active + alert. diff --git a/etc/descriptions/global_hypo71.xml b/etc/descriptions/global_hypo71.xml index 89521b3..cb23308 100644 --- a/etc/descriptions/global_hypo71.xml +++ b/etc/descriptions/global_hypo71.xml @@ -10,30 +10,32 @@ Locator parameters: Hypo71 - + Temporary file used by Hypo71 to store calculation logs. - + Temporary file to write Hypo71 input data to. - + Temporary output file to read Hypo71 location data from. - + - Hypo71 default profile. - If no custom profile is specified, this profile will be used by the plugin when proceeding to a localization. + Hypo71 default profile. If no custom profile is specified, + this profile will be used by the plugin when proceeding to a + localization. - + - Bash script executed when calling the Hypo71 locator plugin for locating the earthquake. + Bash script executed when calling the Hypo71 locator plugin + for locating the earthquake. @@ -45,7 +47,8 @@ - Profiles containing the profile-specific velocity model and the Hypo71 parameters. + Profiles containing the profile-specific velocity model and + the Hypo71 parameters. @@ -59,7 +62,7 @@ It is generally the locator's name (Hypo71). - + File containing the profile parameters. @@ -76,10 +79,15 @@ - Custom patternID to use when generating origin publicID + + Custom patternID to use when generating origin publicID. + - Specifies if the given publicD should be used for generating origin publicID + + Specifies if the given public ID shall be used for + generating origin publicID. + diff --git a/etc/descriptions/global_iloc.xml b/etc/descriptions/global_iloc.xml index 94dd874..34962a4 100644 --- a/etc/descriptions/global_iloc.xml +++ b/etc/descriptions/global_iloc.xml @@ -3,16 +3,16 @@ global - Locator in SeisComP implemented by the plugin lociloc. + Locator in SeisComP implemented by the plugin lociloc. Locator parameters: iLoc - + - iLoc directory for auxialiary files and directories. Some + iLoc directory for auxiliary files and directories. Some of them must be provided from the iLoc website. Read the documentation for their installation. @@ -32,9 +32,9 @@ - iLoc profile name. - Multiples names may be set separated by comma. - Each profile can have different velocity or parameters. + List of iLoc profile name(s). Separate multiple names by comma. + Each profile can have different velocity or parameters. The + must be defined separate by iLoc profiles. @@ -60,17 +60,17 @@ - Use regional seismic travel-time tables + Use regional seismic travel-time tables. - Use regional seismic travel-time tables for Pn and Sn + Use regional seismic travel-time tables for Pn and Sn. - Use regional seismic travel-time tables for Pg and Lg + Use regional seismic travel-time tables for Pg and Lg. @@ -78,7 +78,7 @@ Use local velocity model if defined in LocalVmodel. - + Full path to a file containing the local velocity model. Requires: UseLocalTT = true. Empty string or unset or @@ -98,124 +98,125 @@ - Perform neighbourhood algorithm + Perform neighbourhood algorithm. Neighbourhood Algorithm: Search radius around initial - epicentre + epicentre. Neighbourhood Algorithm: Search radius around initial - depth + depth. Neighbourhood Algorithm: Search radius around initial - origin time + origin time. - + Neighbourhood Algorithm: p-value for norm to compute - misfit [1,2] + misfit. - + - Neighbourhood Algorithm: Maximum number of iterations + Neighbourhood Algorithm: Maximum number of iterations. - + Neighbourhood Algorithm: Number of cells to be resampled - at each iteration + at each iteration. - + - Neighbourhood Algorithm: Size of initial sample + Neighbourhood Algorithm: Size of initial sample. - + - Neighbourhood Algorithm: Size of subsequent samples + Neighbourhood Algorithm: Size of subsequent samples. - + - Depth resolution: Minimum number of depth phases for depdp + Depth resolution: Minimum number of depth phases. - Depth resolution: Maximum local distance + Depth resolution: Maximum local distance. - + - Depth resolution: Minimum number of local defining stations + Depth resolution: Minimum number of local defining stations. - Depth resolution: Maximum distance for using S-P travel-time differences. + Depth resolution: Maximum epicentral distance for + using S-P travel-time differences. - + - Depth resolution: Minimum number of defining S-P phase pairs + Depth resolution: Minimum number of defining S-P phase pairs. - + Depth resolution: Minimum number of defining core reflection phases - Depth resolution: Maximum depth error for crustal free-depth + Depth resolution: Maximum depth error for crustal free-depth. - Depth resolution: Maximum depth error for deep free-depth + Depth resolution: Maximum depth error for deep free-depth. - Linearized inversion: Account for correlated errors + Linearized inversion: Account for correlated errors. - Linearized inversion: Used to exclude big residuals from solution + Linearized inversion: Used to exclude big residuals from solution. - Linearized inversion: Allow damping of model vector + Linearized inversion: Allow damping of model vector. - + - Linearized inversion: Minimum number of iterations + Linearized inversion: Minimum number of iterations. - + - Linearized inversion: Maximum number of iterations + Linearized inversion: Maximum number of iterations. - + - Linearized inversion: Minimum number of defining phases + Linearized inversion: Minimum number of defining phases. diff --git a/etc/descriptions/global_locrouter.xml b/etc/descriptions/global_locrouter.xml index 9d1b56e..b1ca2ee 100644 --- a/etc/descriptions/global_locrouter.xml +++ b/etc/descriptions/global_locrouter.xml @@ -12,7 +12,7 @@ Locator parameters: Router. This locator requires the plugin "locrouter" to be loaded. - + A GeoJSON or BNA file defining locator profiles by region. Supported polygon attributes: diff --git a/etc/descriptions/global_mb.xml b/etc/descriptions/global_mb.xml index dc3167d..9b1dc1c 100644 --- a/etc/descriptions/global_mb.xml +++ b/etc/descriptions/global_mb.xml @@ -11,25 +11,23 @@ Body wave magnitude at teleseismic distances measured at 1 s period. - - + + + Parameters for computing mb magnitudes from mb amplitudes. + + - Parameters for computing mb magnitudes from mb amplitudes. + Minimum epicentral distance for computing mb. Note: According + to the IASPEI recommendations in 2013, the minimum distance + should be 20 deg. - - - Minimum epicentral distance for computing mb. Note: According - to the IASPEI recommendations in 2013, the minimum distance - should be 20 deg. - - - - - Maximum epicentral distance for computing mb. - - - - + + + + Maximum epicentral distance for computing mb. + + + diff --git a/etc/descriptions/global_mb_bb.xml b/etc/descriptions/global_mb_bb.xml index f3b6d64..eab5ab1 100644 --- a/etc/descriptions/global_mb_bb.xml +++ b/etc/descriptions/global_mb_bb.xml @@ -11,25 +11,23 @@ The body wave magnitude at teleseismic distances similar to mb. - - + + + Parameters for computing mB magnitudes from mB amplitudes. + + - Parameters for computing mB magnitudes from mB amplitudes. + Minimum epicentral distance for computing mB. Note: According + to the IASPEI recommendations in 2013, the minimum distance + should be 20 deg. - - - Minimum epicentral distance for computing mB. Note: According - to the IASPEI recommendations in 2013, the minimum distance - should be 20 deg. - - - - - Maximum epicentral distance for computing mB. - - - - + + + + Maximum epicentral distance for computing mB. + + + diff --git a/etc/descriptions/global_mb_idc.xml b/etc/descriptions/global_mb_idc.xml index 1bc94a7..3e272ea 100644 --- a/etc/descriptions/global_mb_idc.xml +++ b/etc/descriptions/global_mb_idc.xml @@ -17,19 +17,17 @@ from the event. - - - - - Location of the station specific Q table. If not - specified then @DATADIR@/magnitudes/IDC/qfvc.ml will be - used as fallback. {net}, {sta} and {loc} are placeholders - which will be replaced with the concrete network code, - station code and location code. - - - - + + + + Location of the station specific Q table. If not + specified then @DATADIR@/magnitudes/IDC/qfvc.ml will be + used as fallback. {net}, {sta} and {loc} are placeholders + which will be replaced with the concrete network code, + station code and location code. + + + diff --git a/etc/descriptions/global_md.xml b/etc/descriptions/global_md.xml index 81f1c44..bed4560 100644 --- a/etc/descriptions/global_md.xml +++ b/etc/descriptions/global_md.xml @@ -1,115 +1,113 @@ - - global - - Duration magnitude plugin - + + global + + Duration magnitude plugin + Duration magnitude plugin - - - - - Default filter type to use before processing and after deconvolution. It's possible to set : - 1 for a Wood-Anderson seismometer - 2 for a 5sec generic Seismometer - 3 for a WWSSN LP seismometer - 4 for a WSSN SP seismometer - 5 for a Generic Seismometer - 6 for a Butterworth Low pass filter - 7 for a Butterworth High pass filter - 8 for a Butterworth Band pass filter - 9 for a 1Hz eigen-frequency L4C seismometer - - - - - taper applied to the signal - - - - - signal length used to compute the duration magnitude - - - - - Butterworth filter parameter applied to the signal - - - - - Maximum depth at which duration magnitude is valid - - - - - Maximum distance between earthquake and station at which duration magnitude is valid - - - - - Signal to noise ratio below which the coda is reached - - - - - Maximum expected duration magnitude value - This is used to find how much data should be loaded for a given station by reversing the formula - - - - - FMA regional coefficient - See Hypo2000 manual - - - - - FMB regional coefficient - See Hypo2000 manual - - - - - FMD regional coefficient - See Hypo2000 manual - - - - - FMF regional coefficient - See Hypo2000 manual - - - - - FMZ regional coefficient - See Hypo2000 manual - - - - - Linear correction - - - - - Offset - - - - - Station correction - - - - + + + + Default filter type to use before processing and after deconvolution. It's possible to set : + 1 for a Wood-Anderson seismometer + 2 for a 5sec generic Seismometer + 3 for a WWSSN LP seismometer + 4 for a WSSN SP seismometer + 5 for a Generic Seismometer + 6 for a Butterworth Low pass filter + 7 for a Butterworth High pass filter + 8 for a Butterworth Band pass filter + 9 for a 1Hz eigen-frequency L4C seismometer + + + + + taper applied to the signal + + + + + signal length used to compute the duration magnitude + + + + + Butterworth filter parameter applied to the signal + + + + + Maximum depth at which duration magnitude is valid + + + + + Maximum distance between earthquake and station at which duration magnitude is valid + + + + + Signal to noise ratio below which the coda is reached + + + + + Maximum expected duration magnitude value + This is used to find how much data should be loaded for a given station by reversing the formula + + + + + FMA regional coefficient + See Hypo2000 manual + + + + + FMB regional coefficient + See Hypo2000 manual + + + + + FMD regional coefficient + See Hypo2000 manual + + + + + FMF regional coefficient + See Hypo2000 manual + + + + + FMZ regional coefficient + See Hypo2000 manual + + + + + Linear correction + + + + + Offset + + + + + Station correction + + + diff --git a/etc/descriptions/global_ml.xml b/etc/descriptions/global_ml.xml index 4304d53..0259856 100644 --- a/etc/descriptions/global_ml.xml +++ b/etc/descriptions/global_ml.xml @@ -25,74 +25,76 @@ Standard local (Richter) magnitude - - + + + Parameters for measuring ML amplitudes. Add more parameters + by adding an amplitude type profile 'ML', + + + + + + - Parameters for measuring ML amplitudes. Add more parameters - by adding an amplitude type profile 'ML', + The filter applied to raw records before applying + Wood-Anderson simulation. Default: no pre-filtering. - - - The filter applied to raw records before applying - Wood-Anderson simulation. Default: no pre-filtering. - - - - - This parameter allows to set how the amplitude is measured. - Either by finding the absolute maximum of the demeaned - trace (AbsMax), the difference of maximum and minimum of - the signal window (MinMax) or the maximum peak-trough - of one cycle (PeakTrough). - - Note that if absMax is already explicitly configured, this - parameter has no effect. - - - - - Defines the combiner operation for the amplitudes measured - on either both horizontal component. The default is to - use the average. Allowed values are: "average", - "min", "max" and "geometric_mean". - "geometric_mean" corresponds to averaging single-trace - magnitudes instead of their amplitudes. - - - - - - - + + - Parameters for computing ML magnitudes from ML amplitudes. + Applying Wood-Anderson simulation. To achieve displacement + records without WA simulation, an integration filter can + be applied with the pre-filter. - - - The calibration function log10(A0). + + + + This parameter allows to set how the amplitude is measured. + Either by finding the absolute maximum of the demeaned + trace (AbsMax), the difference of maximum and minimum of + the signal window (MinMax) or the maximum peak-trough + of one cycle (PeakTrough). - Format: any list of distance-value pairs separated by - comma. Values within pairs are separated by colon. + Note that if absMax is already explicitly configured, this + parameter has no effect. + + + + + Defines the combiner operation for the amplitudes measured + on either both horizontal component. The default is to + use the average. Allowed values are: "average", + "min", "max" and "geometric_mean". + "geometric_mean" corresponds to averaging single-trace + magnitudes instead of their amplitudes. + + + + + + Parameters for computing ML magnitudes from ML amplitudes. + + + + + + The calibration function log10(A0). - Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" - specifies 4 distance intervals from - 0...60, 60...100, 100...400 and 400...1000 km distance. - Within these intervals log10(A0) is interpolated linearly - between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, - respectively. + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. - Note: The first and last distance samples limit the - maximum distance range for computing ML. - - - - - Maximum epicentral distance for computing ML. - No distance limitation for maxDistanceKm = -1. - - - - + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. + + Note: The first and last distance samples limit the + maximum distance range for computing ML. + + + diff --git a/etc/descriptions/global_ml_idc.xml b/etc/descriptions/global_ml_idc.xml index f65de8f..36e562d 100644 --- a/etc/descriptions/global_ml_idc.xml +++ b/etc/descriptions/global_ml_idc.xml @@ -11,19 +11,17 @@ CTBTO/IDC local magnitude. - - - - - Location of the station specific attenuation table. If not - specified then @DATADIR@/magnitudes/IDC/global.ml will be - used as fallback. {net}, {sta} and {loc} are placeholders - which will be replaced with the concrete network code, - station code and location code. - - - - + + + + Location of the station specific attenuation table. If not + specified then @DATADIR@/magnitudes/IDC/global.ml will be + used as fallback. {net}, {sta} and {loc} are placeholders + which will be replaced with the concrete network code, + station code and location code. + + + diff --git a/etc/descriptions/global_mlc.xml b/etc/descriptions/global_mlc.xml index e856a0e..632035d 100644 --- a/etc/descriptions/global_mlc.xml +++ b/etc/descriptions/global_mlc.xml @@ -101,188 +101,219 @@ Custom magnitude for local events measured on horizontal components - - + + + Parameters for measuring MLc amplitudes. Add more parameters + by adding an amplitude type profile 'MLc', + + + + + + + - Parameters for measuring MLc amplitudes. Add more parameters - by adding an amplitude type profile 'MLc', + The filter applied to raw records before applying + Wood-Anderson simulation. - + + + + Applying Wood-Anderson simulation. To achieve displacement + records without WA simulation, an integration filter can + be applied with the pre-filter. + + + + + Scaling value multiplied to the measured amplitudes to + match the amplitude units expected by the magnitude + calibration function. + + Expected amplitudes are + in units of mym but actual amplitudes provided from + Wood-Anderson-corrected seismograms are in units of mm: + amplitudeScale = 1000. + + If data are not corrected for WA, measured amplitudes + take the unit of gain-corrected data considering the + preFilter: + amplitudeScale converts between units of measured and + excpected amplitude. + + + + + Type for measuring amplitudes. + + AbsMax: absolute maximum + + MinMax: half difference between absolute maximum and minimum + + PeakTrough: half difference between maximum and minimum + on a half cycle + + + + + Define how to combine the amplitudes measured on both + horizontals components: + + min: take the minimum + + max: take the maxium + + avgerage: form the average + + geometric_mean: form the geometric mean + + + + + + Parameters for computing MLc magnitudes from MLc amplitudes. + + + + + + Considered distance measure between source and receiver. + + hypocentral: hypocentral distance + + epicentral: epicentral distance + + + + + The minimum distance for computing magnitudes from amplitudes. + Negative values deactivate the check. + + + + + The maximum distance for computing magnitudes from amplitudes. + Negative values deactivate the check. + + + + + The minimum source depth below which magnitudes are computed. + + + + + The maximum source depth up to which magnitudes are computed. + + + + + Type of magnitude calibration formula to be considered. + The calibration parameters are considered accordingly. + + "parametric": consider parameters of parametric + configuration in parametric section + + "A0": consider parameters of non-parametric + configuration in A0 section. + + + + + Parameters for A0, non-parametric magnitude calibration. + Considered if magnitude.MLc.calibrationType = "A0". + + - The filter applied to raw records before applying - Wood-Anderson simulation. - - - - - Applying Wood-Anderson simulation. To achieve displacement - records without WA simulation, an integration filter can - be applied with the pre-filter. - - - - - Scaling value multiplied to the measured amplitudes to - match the amplitude units expected by the magnitude - calibration function. + The non-parametric calibration function log10(A0). - Expected amplitudes are - in units of mym but actual amplitudes provided from - Wood-Anderson-corrected seismograms are in units of mm: - amplitudeScale = 1000. + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. - If data are not corrected for WA, measured amplitudes - take the unit of gain-corrected data considering the - preFilter: - amplitudeScale converts between units of measured and - excpected amplitude. - - - - - Type for measuring amplitudes. Available: + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. - AbsMax: absolute maximum - - MinMax: half difference between absolute maximum and minimum - - PeakTrough: half difference between maximum and minimum - on a half cycle - - - - - Define how to combine the amplitudes measured on both - horizontals components: - - min: take the minimum - - max: take the maxium - - avgerage: form the average - - geometric_mean: form the geometric mean + Note: The first and last distance samples limit the + maximum distance range for computing MLv. - - - + - Parameters for computing MLc magnitudes from MLc amplitudes. + Parameters for parametric magnitude calibration: + MLc = log10(A) + c7 * e^(c8 * r) + c6 * h + c3 * log10(r / c5) + c2 * (r + c4) + c1 + c0(station) + + Considered if magnitude.MLc.calibrationType = "parametric". - + - Considered distance measure between source and receiver. - Possible values are - - hypocentral: hypocentral distance - - epicentral: epicentral distance + Station correction. This is the calibration value 'c0' + applied in the + parametric magnitude calibration formula. - + - The minimum distance for computing magnitudes from amplitudes. - Negative values deactivate the check. + The calibration value 'c1' applied in the + parametric magnitude calibration formula. - + - The maximum distance for computing magnitudes from amplitudes. - Negative values deactivate the check. + The calibration value 'c2' applied in the + magnitude calibration formula + parametric magnitude calibration formula. - + - The maximum source depth up to which magnitudes are computed. + The calibration value 'c3' applied in the + parametric magnitude calibration formula. - + - Type of magnitude calibration formula to be considered. - The calibration parameters are considered accordingly. - Currently supported are - - "parametric": consider parameters of parametric - configuration in parametric section - - "A0": consider parameters of non-parametric - configuration in A0 section. + The calibration value 'c4' applied in the + parametric magnitude calibration formula. - + - Parameters for A0, non-parametric magnitude calibration. - Considered if magnitude.MLc.calibrationType = "A0". + The calibration value 'c5' applied in the + parametric magnitude calibration formula. - - - The non-parametric calibration function log10(A0). - - Format: any list of distance-value pairs separated by - comma. Values within pairs are separated by colon. - - Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" - specifies 4 distance intervals from - 0...60, 60...100, 100...400 and 400...1000 km distance. - Within these intervals log10(A0) is interpolated linearly - between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, - respectively. - - Note: The first and last distance samples limit the - maximum distance range for computing MLv. - - - - + + - Parameters for parametric magnitude calibration: - MLc = log10(A) + c3 * log10(r/c5) + c2 * (r + c4) + c1 + c0(station) - - Considered if magnitude.MLc.calibrationType = "parametric". + The calibration value 'c6' applied in the + parametric magnitude calibration formula. - - - Station correction. This is the calibration value 'c0' - applied in the - parametric magnitude calibration formula. - - - - - The calibration value 'c1' applied in the - parametric magnitude calibration formula. - - - - - The calibration value 'c2' applied in the - magnitude calibration formula - parametric magnitude calibration formula. - - - - - The calibration value 'c3' applied in the - parametric magnitude calibration formula. - - - - - The calibration value 'c4' applied in the - parametric magnitude calibration formula. - - - - - The calibration value 'c5' applied in the - parametric magnitude calibration formula. - - - + + + + The calibration value 'c7' applied in the + parametric magnitude calibration formula. + + + + + The calibration value 'c8' applied in the + parametric magnitude calibration formula. + + + + + The calibration value 'H' applied in the + parametric magnitude calibration formula for forming + h. Read the documentation. + + - + diff --git a/etc/descriptions/global_mlh.xml b/etc/descriptions/global_mlh.xml index 19ad41b..29b047f 100644 --- a/etc/descriptions/global_mlh.xml +++ b/etc/descriptions/global_mlh.xml @@ -16,42 +16,38 @@ at the Liverpool developer meeting. - - - - - Define combiner operation for both horizontals (min, max, avg). - - - - - MLh clipping level, in raw counts, eg. 80% of 2^23 = 6710886. - - - - - - - - - Defines attenuation parameters for MLh. - Format: "UpToKilometers A B; UpToNextKilometers A B;". - Example: "30 nomag; 60 0.018 2.17; 700 0.0038 3.02". - The first parameter set "30 nomag" means that up to 30km - from the sensor the magnitude should not be calculated. + + + + Define combiner operation for both horizontals (min, max, avg). + + + + + MLh clipping level, in raw counts, eg. 80% of 2^23 = 6710886. + + + + + + + Defines attenuation parameters for MLh. + Format: "UpToKilometers A B; UpToNextKilometers A B;". + Example: "30 nomag; 60 0.018 2.17; 700 0.0038 3.02". + The first parameter set "30 nomag" means that up to 30km + from the sensor the magnitude should not be calculated. - Note: No MLh computation if params is empty. - - - - - Maximum depth allowed to compute the magnitude. Make sure to - also update the MLh amplitude bindings accordingly to this - value. maxDepth should not be greater than 80km. - - - - + Note: No MLh computation if params is empty. + + + + + Maximum depth allowed to compute the magnitude. Make sure to + also update the MLh amplitude bindings accordingly to this + value. maxDepth should not be greater than 80km. + + + diff --git a/etc/descriptions/global_mlr.xml b/etc/descriptions/global_mlr.xml index 65ff6e1..416dcb7 100644 --- a/etc/descriptions/global_mlr.xml +++ b/etc/descriptions/global_mlr.xml @@ -8,26 +8,24 @@ - - + + + Parameter for computing MLr magnitudes for GNS/Geonet from MLv amplitudes. + + - Parameter for computing MLr magnitudes for GNS/Geonet from MLv amplitudes. - - - - Defines Stations Corrections parameters for MLr (GNS/Geonet Local magnitude). - Format: "UpToKilometers A ; UpToNextKilometers A ;". - Example: "30 nomag; 60 0.018 ; 700 0.0038 ". - The first parameter set "30 nomag" means that up to 30km - from the sensor the magnitude should not be calculated. - A is used as station correction. - "nomag" is used to disable station magnitudes. + Defines Stations Corrections parameters for MLr (GNS/Geonet Local magnitude). + Format: "UpToKilometers A ; UpToNextKilometers A ;". + Example: "30 nomag; 60 0.018 ; 700 0.0038 ". + The first parameter set "30 nomag" means that up to 30km + from the sensor the magnitude should not be calculated. + A is used as station correction. + "nomag" is used to disable station magnitudes. - Note: No MLr computation if params is empty. - - - - + Note: No MLr computation if params is empty. + + + diff --git a/etc/descriptions/global_mlv.xml b/etc/descriptions/global_mlv.xml index 1be3f78..28c3090 100644 --- a/etc/descriptions/global_mlv.xml +++ b/etc/descriptions/global_mlv.xml @@ -25,64 +25,67 @@ Local (Richter) magnitude measured on the vertical component - - + + + Parameters for measuring MLv amplitudes. Add more parameters + by adding an amplitude type profile 'MLv', + + + + + + + - Parameters for measuring MLv amplitudes. Add more parameters - by adding an amplitude type profile 'MLv', + The filter applied to raw records before applying + Wood-Anderson simulation. Default: no pre-filtering. - - - The filter applied to raw records before applying - Wood-Anderson simulation. Default: no pre-filtering. - - - - - This parameter allows to set how the amplitude is measured. - Either by finding the absolute maximum of the demeaned - trace (AbsMax), the difference of maximum and minimum of - the signal window (MinMax) or the maximum peak-trough - of one cycle (PeakTrough). - - Note that if absMax is already explicitly configured, this - parameter has no effect. - - - - - - - + + - Parameters for computing MLv magnitudes from MLv amplitudes. + Applying Wood-Anderson simulation. To achieve displacement + records without WA simulation, an integration filter can + be applied with the pre-filter. - - - The calibration function log10(A0). + + + + This parameter allows to set how the amplitude is measured. + Either by finding the absolute maximum of the demeaned + trace (AbsMax), the difference of maximum and minimum of + the signal window (MinMax) or the maximum peak-trough + of one cycle (PeakTrough). - Format: any list of distance-value pairs separated by - comma. Values within pairs are separated by colon. + Note that if absMax is already explicitly configured, this + parameter has no effect. + + + + + + Parameters for computing MLv magnitudes from MLv amplitudes. + + + + + + The calibration function log10(A0). - Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" - specifies 4 distance intervals from - 0...60, 60...100, 100...400 and 400...1000 km distance. - Within these intervals log10(A0) is interpolated linearly - between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, - respectively. + Format: any list of distance-value pairs separated by + comma. Values within pairs are separated by colon. - Note: The first and last distance samples limit the - maximum distance range for computing MLv. - - - - - Maximum epicentral distance for computing MLv. - No distance limitation for maxDistanceKm=-1 - - - - + Example: "0:-1.3,60:-2.8,100:-3.0,400:-4.5,1000:-5.85" + specifies 4 distance intervals from + 0...60, 60...100, 100...400 and 400...1000 km distance. + Within these intervals log10(A0) is interpolated linearly + between -1.3...-2.8, -2.8...-3.0, -3.0...-4.5 and -4.5...-5.8, + respectively. + + Note: The first and last distance samples limit the + maximum distance range for computing MLv. + + + diff --git a/etc/descriptions/global_mn.xml b/etc/descriptions/global_mn.xml index 242f70d..b1ca1c6 100644 --- a/etc/descriptions/global_mn.xml +++ b/etc/descriptions/global_mn.xml @@ -6,171 +6,163 @@ Nuttli magnitude for Canada and other Cratonic regions - - + + + Amplitude control parameters for MN (Nuttli magnitude). + + - Amplitude control parameters for MN (Nuttli magnitude). + The travel time table set compiled for LocSAT. The tables + are located in "share/locsat/tables/[vmodel].*". - - - The travel time table set compiled for LocSAT. The tables - are located in "share/locsat/tables/[vmodel].*". - - - - - - + + + + + Regionalization of MN (Nuttli magnitude). + + - Regionalization of MN (Nuttli magnitude). + The path to the BNA file which defines the valid region + for the MN magnitude. Note that the entire path from + source to receiver must lie entirely within the polygon(s). - - - The path to the BNA file which defines the valid region - for the MN magnitude. Note that the entire path from - source to receiver must lie entirely within the polygon(s). - - - - - The offset applied to the MN network magnitude to - estimate Mw(MN). If not configured then no Mw estimation - will be applied. - - - - + + + + The offset applied to the MN network magnitude to + estimate Mw(MN). If not configured then no Mw estimation + will be applied. + + + - - + + + Parameters for measuring AMN amplitudes. + + - Parameters for measuring AMN amplitudes. + Whether to use RMS ratio of signal and noise window for + SNR computation or the ration of the peak-trough + amplitudes of either window. - - - Whether to use RMS ratio of signal and noise window for - SNR computation or the ration of the peak-trough - amplitudes of either window. - - - - - The configurable filter such that the V measurement is - made on a filtered trace. By default, filtering is not - enabled. - - See https://docs.gempa.de/seiscomp/current/base/filter-grammar.html - for how to specify the filter. - - - - - The minimum phase velocity used to determine the signal - window end. - - - - - The maximum phase velocity used to determine the signal - window start. - - - - - The length of the SNR window. - - - - - The offset of the noise window. A positive value will move - the computed noise window to the left on the time axis, - a negative value will move it to the right on the time axis. - - - - - The priority list of phase onsets to compute the signal - start window. Except for Vmin and Vmax, associated phases - (arrivals) must be present in the origin for this - particular phase. Picked phases are only considered if - the origin is a manual origin or the pick is a - manual pick. The first value in the list which can be - retrieved or computed, is selected. - - Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax - - - - - The priority list of phase onsets to compute the signal - end window. Except for Vmin and Vmax, associated phases - (arrivals) must be present in the origin for this - particular phase. Picked phases are only considered if - the origin is a manual origin or the pick is a - manual pick. The first value in the list which can be - retrieved or computed, is selected. - - Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax - - - - - - + + - Parameters for computing MN magnitudes from AMN amplitudes. + The configurable filter such that the V measurement is + made on a filtered trace. By default, filtering is not + enabled. + + See https://docs.gempa.de/seiscomp/current/base/filter-grammar.html + for how to specify the filter. - - - The minimum SNR required for a magnitude to pass - the QC check. The station magnitude will be computed - anyway but if the SNR is below this threshold it will - be associated with weight zero and will not contribute - to the network magnitude. - - - - - The minimum period required for a magnitude to pass - the QC check. The station magnitude will be computed - anyway but if the period is below this threshold it will - be associated with weight zero and will not contribute - to the network magnitude. - - - - - The maximum period allowed for a magnitude to pass - the QC check. The station magnitude will be computed - anyway but if the period is above this threshold it will - be associated with weight zero and will not contribute - to the network magnitude. - - - - - The minimum distance required for a magnitude to pass - the QC check. The station magnitude will be computed - anyway but if the distance is below this threshold it - will be associated with weight zero and will not contribute - to the network magnitude. - - - - - The maximum distance allowed for a magnitude to be - computed. If the distance exceeds this threshold then - the computation will be canceled and no station - magnitude will be available at all. - - - - + + + + The minimum phase velocity used to determine the signal + window end. + + + + + The maximum phase velocity used to determine the signal + window start. + + + + + The length of the SNR window. + + + + + The offset of the noise window. A positive value will move + the computed noise window to the left on the time axis, + a negative value will move it to the right on the time axis. + + + + + The priority list of phase onsets to compute the signal + start window. Except for Vmin and Vmax, associated phases + (arrivals) must be present in the origin for this + particular phase. Picked phases are only considered if + the origin is a manual origin or the pick is a + manual pick. The first value in the list which can be + retrieved or computed, is selected. + + Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax + + + + + The priority list of phase onsets to compute the signal + end window. Except for Vmin and Vmax, associated phases + (arrivals) must be present in the origin for this + particular phase. Picked phases are only considered if + the origin is a manual origin or the pick is a + manual pick. The first value in the list which can be + retrieved or computed, is selected. + + Allowed tokens: Pg, Pn, P, Sg, Sn, S, Lg, Rg, Vmin, Vmax + + + + + + Parameters for computing MN magnitudes from AMN amplitudes. + + + + The minimum SNR required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the SNR is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The minimum period required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is below this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The maximum period allowed for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the period is above this threshold it will + be associated with weight zero and will not contribute + to the network magnitude. + + + + + The minimum distance required for a magnitude to pass + the QC check. The station magnitude will be computed + anyway but if the distance is below this threshold it + will be associated with weight zero and will not contribute + to the network magnitude. + + + + + The maximum distance allowed for a magnitude to be + computed. If the distance exceeds this threshold then + the computation will be canceled and no station + magnitude will be available at all. + + + diff --git a/etc/descriptions/global_ms_20.xml b/etc/descriptions/global_ms_20.xml index 80d4a97..8df12bd 100644 --- a/etc/descriptions/global_ms_20.xml +++ b/etc/descriptions/global_ms_20.xml @@ -12,55 +12,51 @@ at around 20 s period. - - + + + Parameters for computing Ms_20 amplitudes. + + - Parameters for computing Ms_20 amplitudes. + Minimum group velocity used to compute signal time window. - - - Minimum group velocity used to compute signal time window. - - - - - Maximum group velocity used to compute signal time window. - - - - - - + + - Parameters for computing Ms_20 magnitudes from Ms_20 amplitudes. + Maximum group velocity used to compute signal time window. - - - Lower period limit of the signal for computing Ms_20. - - - - - Upper period limit of the signal for computing Ms_20. - - - - - Minimum epicentral distance for computing Ms_20. - - - - - Maximum epicentral distance for computing Ms_20. - - - - - Maximum depth for computing Ms_20. - - - - + + + + + Parameters for computing Ms_20 magnitudes from Ms_20 amplitudes. + + + + Lower period limit of the signal for computing Ms_20. + + + + + Upper period limit of the signal for computing Ms_20. + + + + + Minimum epicentral distance for computing Ms_20. + + + + + Maximum epicentral distance for computing Ms_20. + + + + + Maximum depth for computing Ms_20. + + + diff --git a/etc/descriptions/global_nonlinloc.xml b/etc/descriptions/global_nonlinloc.xml index 1afed79..6cde74d 100644 --- a/etc/descriptions/global_nonlinloc.xml +++ b/etc/descriptions/global_nonlinloc.xml @@ -3,148 +3,161 @@ global - NonLinLoc locator wrapper plugin for SeisComP. - NonLinLoc was written by Anthony Lomax (http://alomax.free.fr/nlloc). + NonLinLoc locator wrapper plugin for SeisComP. + NonLinLoc was written by Anthony Lomax (http://alomax.free.fr/nlloc). - PublicID creation pattern for an origin created by NonLinLoc. + PublicID creation pattern for an origin created by NonLinLoc. - + - Defines the output path for all native NonLinLoc input and output files. + Defines the output directory for all native NonLinLoc input and output files. - Save input files *.obs in outputPath for later processing. - Setting to false reduces file i/o and saves disk space. + Save input files *.obs in outputPath for later processing. + Setting to false reduces file i/o and saves disk space. - Save output files in outputPath for later processing or - for viewing by the Seismicity Viewer. - Setting to false reduces file i/o and saves disk space. + Save output files in outputPath for later processing or + for viewing by the Seismicity Viewer. + Setting to false reduces file i/o and saves disk space. - + - The default NonLinLoc control file to use. + The default NonLinLoc control file to use. Parameters + therein are overridden per profile. - The default pick error in seconds passed to NonLinLoc if a SeisComP pick - object does not provide pick time uncertainties. + The default pick error in seconds passed to NonLinLoc if a SeisComP pick + object does not provide pick time uncertainties. - Since NLL does not support fixing the depth natively so this - feature is emulated by settings the Z grid very tight around - the depth to be fixed. This value sets the Z grid spacing. + Since NLL does not support fixing the depth natively so this + feature is emulated by settings the Z grid very tight around + the depth to be fixed. This value sets the Z grid spacing. - Picks from stations with missing configuration will be - ignored. The origin will be relocated without that pick - if possible. + Picks from stations with missing configuration will be + ignored. The origin will be relocated without that pick + if possible. - If set to false, the plug-in throws - an excepection without locating. + If set to false, the plug-in throws + an excepection without locating. - Defines a list of active profiles to be used by the plugin. + Defines a list of active profiles to be used by the plugin. - Defines a regional profile that is used if a prelocation falls - inside the configured region. + Defines a regional profile that is used if a prelocation falls + inside the configured region. - earthModelID that is stored in the created origin. + Earth model ID stored in the created origin. - methodID that is stored in the created origin. + Method ID stored in the created origin. - Path to travel time tables (grids). + Path to travel time tables (grids) including the + full path and the names of the table files before + the phase name. + + Example: + + @DATADIR@/nonlinloc/iasp91/iasp91 for P and S tables + files + seiscomp/share/nonlinloc/iasp91/iasp91.[PS].* - Format of the station name used to select the right travel time table (grid) file - for a station. - By default only the station code is used (e.g. tablePath.P.@STA@.time.*), but - that doesn't allow to distinguish between multiple network codes or location codes - that use the same station code. - To overcome this limitation this parameter could be set in a more general way, for - example @NET@_@STA@_@LOC@. In this way NonLinLoc will look for - travel time table (grid) files of the form: tablePath.P.@NET@_@STA@_@LOC@.time.* - Where @NET@ @STA@ @LOC@ are just placeholder for the actual codes + Format of the station name used to select the right travel time table (grid) file + for a station. + By default only the station code is used (e.g. tablePath.P.@STA@.time.*), but + that doesn't allow to distinguish between multiple network codes or location codes + that use the same station code. + To overcome this limitation this parameter could be set in a more general way, for + example @NET@_@STA@_@LOC@. In this way NonLinLoc will look for + travel time table (grid) files of the form: tablePath.P.@NET@_@STA@_@LOC@.time.* + Where @NET@ @STA@ @LOC@ are just placeholder for the actual codes. - + - Control file of the current profile. If not set, the default - control file will be used instead. + Control file of the current profile overriding + parameters of the default control file. - Transformation type of the configured region. Supported are - SIMPLE and GLOBAL. - - Default: GLOBAL is assumed. + Transformation type of the configured region. + Supported are SIMPLE and GLOBAL. - Defines the 4 corner values of the epicentral region for selecting the profile. - The original epicentre must be within the region. + Defines the 4 corner values of the epicentral region for selecting the profile. + The original epicentre must be within the region. - If transform is GLOBAL: min_lat, min_lon, max_lat, max_lon. - The values define the geographic corner coordinates. Unit is degree. + If transform is GLOBAL: min_lat, min_lon, max_lat, max_lon. + The values define the geographic corner coordinates. + Unit: degree. - If transform is SIMPLE: xmin, ymin, xmax, ymax. - The values define the region relative to the configured origin. - Unit is km. + If transform is SIMPLE: min_x, min_y, max_x, max_y. + The values define the region relative to the origin + configured with "origin". + Unit: km. + + When this parameter is empty, the generated NonLinLoc + configuration is automatically overwritten with + TRANS GLOBAL. + Otherwise TRANS from "controlFile" applies. - Only used for transformation SIMPLE. Expects 2 values: latitude, longitude. - The value define the geographic origin of the area spanned by region. - Unit is degree. + Only used for transformation SIMPLE. Expects 2 values: latitude, longitude. + The value define the geographic origin of the area spanned by region. - Only used for transformation SIMPLE. Defines the rotation around the - origin of the defined region. + Only used for transformation SIMPLE. Defines the + rotation around the origin of the defined region. diff --git a/etc/descriptions/global_stdloc.xml b/etc/descriptions/global_stdloc.xml index 2c212d5..85da4f7 100644 --- a/etc/descriptions/global_stdloc.xml +++ b/etc/descriptions/global_stdloc.xml @@ -74,7 +74,7 @@ applies. - + Confidence level, between 0.5 and 1.0, used in computing the hypocenter confidence ellipsoid. diff --git a/etc/descriptions/inv2dlsv.xml b/etc/descriptions/inv2dlsv.xml deleted file mode 100644 index bb6753a..0000000 --- a/etc/descriptions/inv2dlsv.xml +++ /dev/null @@ -1,16 +0,0 @@ - - - - Converts SC3 inventory XML to dataless SEED. - - - inv2dlsv [in_xml [out_dataless]] - - - If in_xml is not given, stdin is used. If out_dataless is not given, - stdout is used. - - - - - diff --git a/etc/descriptions/invextr.xml b/etc/descriptions/invextr.xml index 52f2b21..e92ee4d 100644 --- a/etc/descriptions/invextr.xml +++ b/etc/descriptions/invextr.xml @@ -1,7 +1,9 @@ - Extract channels from inventory. + + Extract and clean or remove streams from inventory. + @@ -52,11 +54,15 @@ + + + diff --git a/etc/descriptions/msrtsimul.xml b/etc/descriptions/msrtsimul.xml index 346e061..e82d741 100644 --- a/etc/descriptions/msrtsimul.xml +++ b/etc/descriptions/msrtsimul.xml @@ -1,18 +1,18 @@ - + MiniSEED real time playback and simulation msrtsimul [OPTION] miniSEED-file - - - diff --git a/etc/descriptions/ql2sc.xml b/etc/descriptions/ql2sc.xml index 69bd984..8bb98ca 100644 --- a/etc/descriptions/ql2sc.xml +++ b/etc/descriptions/ql2sc.xml @@ -8,11 +8,6 @@ Number of seconds to fetch missed updates on start up. - - - Number of public objects to cache. - - Maximum number of notifiers to batch in one message. If set @@ -117,7 +112,7 @@ Include only preferred origin and magnitude information - + Request server to send keep alive message every 30s to prevent connection reset by firewalls on long idle @@ -127,12 +122,20 @@ - SQL like WHERE clause to filter the result set. + Server-side SQL like WHERE clause to filter the result set. The actual + available parameters depend on the QuakeLink server version. Use + 'telnet host port' followed by 'help select' to connect to a QuakeLink + server an request available parameters. - clause := condition[ AND|OR [(]clause[)]] __ - condition := MAG|DEPTH|LAT|LON|PHASES|OTIME|UPDATED [op float|time]|[IS [NOT] NULL] __ - op := =|>|>=|<|<=|eq|gt|ge|lt|ge __ - time := %Y,%m,%d[,%H,%M,%S,%f] + clause := condition[ AND|OR [(]clause[)]] + condition := MAG|DEPTH|LAT|LON|PHASES|DIST(lat,lon) op {float} | + DIST(lat,lon) IN [{float}, {float}] | + UPDATED|OTIME op time | + AGENCY|AUTHOR|STATUS|ESTATUS|EMODE|TYPE|CTYPE|DTYPE|REGION|MAG_T op 'string' | + MAG|DEPTH|LAT|LON|PHASES|OTIME|UPDATED IS [NOT] NULL + FELT|NOT FELT + op := =|!=|>|>=|<|<=|eq|gt|ge|lt|ge + time := %Y,%m,%d[,%H,%M,%S[,%f]] diff --git a/etc/descriptions/scalert.xml b/etc/descriptions/scalert.xml index 1877246..22b72c4 100644 --- a/etc/descriptions/scalert.xml +++ b/etc/descriptions/scalert.xml @@ -19,14 +19,14 @@ empty list (=""): all agencies are allowed - - - List of authors to consider picks and origins. The author - is extracted from the pick or the preferred origin of the event - and compared with the configured authors. - empty list (=""): all authors are allowed - - + + + List of authors to consider picks and origins. The author + is extracted from the pick or the preferred origin of the event + and compared with the configured authors. + empty list (=""): all authors are allowed + + @@ -56,28 +56,28 @@ - + The script to be called when a pick arrives. Network-, station code, pick publicID are passed as parameters $1, $2, $3. - + The script to be called when an amplitude arrives. Network-, station code, amplitude and amplitude public ID are passed as parameters $1, $2, $3, $4. - + The script to be called when a preliminary origin arrives. Latitude and longitude are passed as parameters $1 and $2. - + The script to be called when an event has been declared. The message string, a flag (1=new event, diff --git a/etc/descriptions/scamp.xml b/etc/descriptions/scamp.xml index 80f6a68..8f5eab9 100644 --- a/etc/descriptions/scamp.xml +++ b/etc/descriptions/scamp.xml @@ -97,7 +97,8 @@ + + + diff --git a/etc/descriptions/scanloc_multifeature.xml b/etc/descriptions/scanloc_multifeature.xml index 1699f1b..729d072 100644 --- a/etc/descriptions/scanloc_multifeature.xml +++ b/etc/descriptions/scanloc_multifeature.xml @@ -25,6 +25,23 @@ depth and RMS. + + + + This is the maximum allowed depth. Origins with + depth greater then this value get a score equal to + "defaultScore". + + + + + + This is the maximum allowed RMS. Origins with + residual greater than this value score of equal to + "defaultScore". + + + @@ -79,9 +96,6 @@ Origin depth is normalized to this value for computing the score contribution. Shallower depths contribute to larger score. - This is also the maximum allowed depth. Origins with - depth greater then this value get a score equal to - "defaultScore". @@ -96,9 +110,6 @@ Origin RMS is normalized to this value for computing the score contribution. Lower RMS contribute to larger score. - This is also the maximum allowed RMS. Origins with - residual greater than this value score equal to - "defaultScore". diff --git a/etc/descriptions/scautoloc.xml b/etc/descriptions/scautoloc.xml index 55b3b70..f6a5008 100644 --- a/etc/descriptions/scautoloc.xml +++ b/etc/descriptions/scautoloc.xml @@ -44,12 +44,12 @@ Time to keep picks in the buffer with respect to pick time, not creation time. - + Time to keep origins in buffer. - + Clean-up interval for removing old/unused objects. @@ -69,7 +69,7 @@ Maximum travel-time residual (unweighted) for a pick at a station to be used. - + Minimum number of phases for reporting origins. @@ -92,7 +92,7 @@ Maximum epicntral distance to stations for accepting picks. - + If the station count for stations at < 105 degrees distance exceeds this number, no picks at > 105 degrees will be @@ -114,17 +114,17 @@ amplitude object. If it is empty, the pick SNR is 10. - + Location of the grid file for nucleating origins. - + Location of the station configuration file for nucleating origins. - + The station file to be used when in offline mode. If no file is given the database is used. An example is given @@ -188,7 +188,7 @@ Activate for writing pick log files to "pickLog". - + Location of pick log file containing information about received picks. Activate "pickLogEnable" for writing the files. @@ -222,7 +222,7 @@ BOTH minAmplitude and minSNR need to be exceeded! - + Minimum number of XXL picks for forming an origin. Must be >= 4. @@ -323,9 +323,9 @@ @@ -483,6 +483,14 @@ + + + diff --git a/etc/descriptions/scautopick.xml b/etc/descriptions/scautopick.xml index 688b726..a09e712 100644 --- a/etc/descriptions/scautopick.xml +++ b/etc/descriptions/scautopick.xml @@ -10,41 +10,41 @@ - The leadTime defines the time in seconds to start picking on - waveforms before current time. + The leadTime defines the time in seconds to start picking on + waveforms before current time. - If enabled, picks can be made on waveforms which are older than - current time - "leadTime". Current time is the time - when the module was started. This allows to pick - historic data in real-time playbacks which are preserving the - record times. See e.g. the "msrtsimul" module. - This option deactivates "leadTime". Activate only for playbacks. + If enabled, picks can be made on waveforms which are older than + current time - "leadTime". Current time is the time + when the module was started. This allows to pick + historic data in real-time playbacks which are preserving the + record times. See e.g. the "msrtsimul" module. + This option deactivates "leadTime". Activate only for playbacks. - The initTime defines a time span in seconds for that the picker - is blind after initialization. This time is needed to initialize - the filter and depends on it. + The initTime defines a time span in seconds for that the picker + is blind after initialization. This time is needed to initialize + the filter and depends on it. - Interpolate gaps linearly? This is valid for gaps shorter - than thresholds.maxGapLength. + Interpolate gaps linearly? This is valid for gaps shorter + than thresholds.maxGapLength. - If enabled, all streams that are received by the picker are - used for picking. This option has only effect if a - file is used as input which contains more data than the - picker requests. If connected to a waveform server such as - SeedLink, the picker will only receive the data it is - subscribed to. + If enabled, all streams that are received by the picker are + used for picking. This option has only effect if a + file is used as input which contains more data than the + picker requests. If connected to a waveform server such as + SeedLink, the picker will only receive the data it is + subscribed to. @@ -59,15 +59,12 @@ values (bindings) override this value. - + The re-picker to use. By default only simple detections are emitted as picks. To enable re-picking on a time window around the detection, an algorithm (plugin) can be defined with this parameter. - Currently available: "AIC", "BK" or - "GFZ". - More options may be available by plugins. Configure related parameters in global bindings. @@ -87,12 +84,12 @@ their evaluation status. - + The secondary picker to use, e.g., for picking S-phases. - Currently available is: "S-L2". More options may - be available by plugins. Configure related parameters - in global bindings. + + More options may be available by plugins. Configure related + parameters in global bindings. @@ -108,34 +105,42 @@ - If enabled and "picker" or "spicker" is - configured, extra comments will be added to the resulting pick. + If enabled and "picker" or "spicker" is + configured, extra comments will be added to the resulting pick. - Supported comments: + Supported comments: - SNR: added if SNR >= 0, comment id is "SNR" + SNR: added if SNR >= 0, comment id is "SNR" - duration: added if the duration has been computed at the time - of the pick creation, which actually requires - "thresholds.maxDuration" to be configured - with a non-negative value. + duration: added if the duration has been computed at the time + of the pick creation, which actually requires + "thresholds.maxDuration" to be configured + with a non-negative value. - + - Configures the feature extraction type to use. Currently - available: "DFX". Configure related parameters - in global bindings. + If enabled then simplified pick and amplitude IDs will be created. + Rather than the configured publicID pattern, the following pattern + will be used: "%Y%m%d.%H%M%S.%f-@net.sta.loc.cha@". + + + + + Configures the feature extraction type to use. Currently + available: "DFX". Configure related parameters + in global bindings. - When configured, the usability of the features for locating - events depends on the used locator, e.g. LOCSAT. Read the - locator's documentation and configuration parameters. + When configured, the usability of the features for locating + events depends on the used locator, e.g. LOCSAT will by default + consider slowness and backazimuth when measured. Read the + locator's documentation and configuration parameters. - The amplitude types to be computed by the picker based on - picks. + The amplitude types to be computed by the picker based on + picks. @@ -144,35 +149,35 @@ - For which value on the filtered waveforms is a pick - detected. Station specific values override this value. + For which value on the filtered waveforms is a pick + detected. Station specific values override this value. - The value the filtered waveforms must reach to enable - detection again. Between triggerOn and triggerOff the - picker is blind and does not produce picks. Station - specific values override this value. + The value the filtered waveforms must reach to enable + detection again. Between triggerOn and triggerOff the + picker is blind and does not produce picks. Station + specific values override this value. - The maximum gap length in seconds to handle. - Gaps larger than this will cause the picker to be reset. + The maximum gap length in seconds to handle. + Gaps larger than this will cause the picker to be reset. - The time window used to compute a maximum (snr) amplitude - on the filtered waveforms. + The time window used to compute a maximum (snr) amplitude + on the filtered waveforms. - The time used together with measured amplitude and - `thresholds.minAmplOffset` for scaling the amplitude below which - the picker is inactive after a P pick. Read the documentation! + The time used together with measured amplitude and + `thresholds.minAmplOffset` for scaling the amplitude below which + the picker is inactive after a P pick. Read the documentation! @@ -183,40 +188,49 @@ similar to the trigger threshold. Read the documentation! - + The minimum duration to reach. The duration is measured as the time between trigger on and trigger off. If this value - is configured the detection (pick) will be delayed in order + is configured, the detection (pick) will be delayed in order to compute and check the duration. + + The duration will be reported as comment to the pick when + activating "extraPickComments" allowing the tuning + of the acceptable duration range. - + The maximum duration allowed. The duration is measured as the time between trigger on and trigger off. If this value - is configured the detection (pick) will be delayed in order - to compute and check the duration. + is configured, the detection (pick) will be delayed in order + to compute and check the duration. Negative values + deactivate the check. + + The duration will be reported as comment to the pick when + activating "extraPickComments" allowing the tuning + of the acceptable duration range. - Configure a list of magnitude types. - Update and send amplitudes for these magnitudes as soon as data are - available. Do not wait for complete time windows. - Only magnitudes computed by scautopick as given by the amplitudes parameter are considered. - This option is for rapid magnitude estimation and EEW. + Configure a list of magnitude types. + Update and send amplitudes for these magnitudes as soon as data are + available. Do not wait for complete time windows. + Only magnitudes computed by scautopick as given by the amplitudes parameter are considered. + This option is for rapid magnitude estimation and EEW. - WARNING: This option increases the load on the system! + WARNING: This option increases the load on the system! - Message group for sending amplitudes to. + Message group for sending amplitudes to. @@ -303,8 +317,10 @@ diff --git a/etc/descriptions/scdb.xml b/etc/descriptions/scdb.xml index 520414e..5d49b9f 100644 --- a/etc/descriptions/scdb.xml +++ b/etc/descriptions/scdb.xml @@ -103,6 +103,18 @@ + + + + diff --git a/etc/descriptions/scdbstrip.xml b/etc/descriptions/scdbstrip.xml index 939073b..4f33083 100644 --- a/etc/descriptions/scdbstrip.xml +++ b/etc/descriptions/scdbstrip.xml @@ -137,11 +137,25 @@ + + diff --git a/etc/descriptions/scevent.xml b/etc/descriptions/scevent.xml index 58fa7f5..cd8be94 100644 --- a/etc/descriptions/scevent.xml +++ b/etc/descriptions/scevent.xml @@ -2,8 +2,9 @@ - Associates an Origin to an Event or forms a new Event if no match is found. - Selects the preferred origin, magnitude and focal mechanism. + Associate an Origin to an Event or form a new Event if no match is found. + Select the preferred origin, magnitude and focal mechanism. + Prefix for all Event IDs @@ -29,13 +30,14 @@ [w] is an optional width parameter. - + Configures the number of event ID slots to look back and forth when an event ID is already taken. The default in previous versions was 5. Now -1 means that the margin is - determined automatically based on "eventAssociation.eventTimeBefore" - and "eventAssociation.eventTimeAfter". According to the + determined automatically based on + "eventAssociation.eventTimeBefore" and + "eventAssociation.eventTimeAfter". According to the configured "eventIDPattern" a fixed time range per slot can be computed and with that width the number of look ahead slots and look back slots can be computed based on @@ -44,15 +46,16 @@ - If enabled, then the EventDescription with type - 'Flinn-Engdahl region' will be populated with the - Flinn-Engdahl region name. + If enabled, then the EventDescription with type + 'Flinn-Engdahl region' will be populated with the + Flinn-Engdahl region name. - Defines the bind adress of the REST API endpoint. This API - allows to query for possible event associations of origin + Defines the bind address of the REST API of form [address:]port. + This API currently provides one endpoint, /api/1/try-to-associate, + which allows to query for possible event associations of origin candidates. @@ -78,306 +81,373 @@ - Minimum number of Picks for an Origin that is automatic and cannot be - associated with an Event to be allowed to form an new Event. + Minimum number of Picks for an Origin that is automatic + and cannot be associated with an Event to be allowed to + form an new Event. - Minimum score of an automatic Origin to be allowed to - form an new Event. This requires an activated score - plugin and a score processor. Configure "score" - for defining the score processor and the score processor - parameters. If minimumScore is defined, "minimumDefiningPhases" - has no effect on association as this phase check will be - superseded by the score check. It is the task of the score - processor to evaluate a proper score for all input Origins. + Minimum score of an automatic Origin to be allowed to + form an new Event. This requires an activated score + plugin and a score processor. Configure "score" + for defining the score processor and the score processor + parameters. If minimumScore is defined, "minimumDefiningPhases" + has no effect on association as this phase check will be + superseded by the score check. It is the task of the score + processor to evaluate a proper score for all input Origins. - Ignore and do not associate Origins derived - from CMT/MT inversions. + Ignore and do not associate Origins derived + from CMT/MT inversions. - Time range before the Origin time of an incoming Origin to search for - matching events. + Time range before the Origin time of an incoming Origin to + search for matching events. - Time range after the Origin time of an incoming Origin to search for - matching events. + Time range after the Origin time of an incoming Origin to + search for matching events. - Minimum number of matching picks between two Origins to be associated - to the same event. + Minimum number of matching picks between two Origins to be + associated to the same event. - Negative time window: compare only pickIDs to find - matching arrivals. A non negative - value (including 0) compares pick times regardless - of the pickID. Pass: |pick1.time - pick2.time| <= threshold + Negative time window: compare only pickIDs to find + matching arrivals. A non negative + value (including 0) compares pick times regardless + of the pickID. Pass: |pick1.time - pick2.time| <= threshold - This parameter is only used in conjunction with - eventAssociation.maximumMatchingArrivalTimeDiff. - If a station has multiple associated arrivals for a - particular event, this flag defines if the time distance - of a new pick to all arrivals must be within - eventAssociation.maximumMatchingArrivalTimeDiff - or if one matching arrival is enough. + This parameter is only used in conjunction with + "eventAssociation.maximumMatchingArrivalTimeDiff". + If a station has multiple associated arrivals for a + particular event, this flag defines if the time distance + of a new pick to all arrivals must be within + "eventAssociation.maximumMatchingArrivalTimeDiff" + or if one matching arrival is enough. - Allows to match picks that are associated with weight 0. + Allows to match picks that are associated with weight 0. - Associates an Origin with an existing event if the Origin - time differs not more than 60 seconds unless the - minimumMatchingArrivals criteria matches. + Associates an Origin with an existing event if the Origin + time differs not more than 60 seconds unless + "eventAssociation.minimumMatchingArrivals" matches. - Allowed location difference between an incoming Origin compared with - preferred Origins to get associated. + Allowed location difference between an incoming Origin + compared with preferred Origins to get associated. - Magnitude type priority list for becoming a preferred magnitude for an - event. + List of magnitude types considered for computing priorities + in order to declare the preferred magnitude of an event. + Magnitudes not listed have a priority of 0 and are ranked + by station count only. - Example: + Magnitudes must meet + "eventAssociation.minimumMagnitudes" and + "eventAssociation.minMwCount" unless + "eventAssociation.enableFallbackMagnitude" is true. + If listed, Mw-/Mw()-type magnitudes are preferred over all + others. Next, the magnitude type with the largest station + count wins (default) unless + "eventAssociation.magPriorityOverStationCount" is + true. If the station count is equal, priority is highest + for the magnitude earliest in the list. - M, mBc, Mw(mB), Mwp, ML, MLh, MLv, mb - - + If "eventAssociation.magPriorityOverStationCount" is + true, highest priority is first given to magnitude types + earliest in the list before testing the station count. - - - If true, one magnitude will be preferred even if magnitude criteria are - not fullfilled. + Example: + + M, mBc, Mw(mB), Mwp, ML, MLv, mb - Minimum number of station magnitudes referenced to a network magnitude - to become a preferred magnitude. + Minimum number of station magnitudes required for + considering non-Mw or non-Mw() magnitudes, as + preferred magnitude. + + Also consider + "eventAssociation.enableFallbackMagnitude". - Minimum number of station magnitudes required for Mw(mB) to be considered as - preferred magnitude. + Minimum number of station magnitudes required for + considering Mw or any derived moment magnitude, Mw(), as + preferred magnitude. The moment magnitudes must also meet + "eventAssociation.minimumMagnitudes". + + Also consider + "eventAssociation.enableFallbackMagnitude". + + + + + + If true, one magnitude will be preferred even if magnitude + criteria ("eventAssociation.minimumMagnitudes", + "eventAssociation.minMwCount") are not fulfilled + by any magnitude. - Minimum number of station magnitudes which ensures that Mw(mB) will be - preferred and not mb. + Minimum number of station magnitudes which ensures that + Mw(mB) will be preferred and not mb. - Average between mb and Mw(mB) which must be exceeded to become Mw(mB) - preferred. + Average between mb and Mw(mB) which must be exceeded to + become Mw(mB) preferred. - If false then the station count rules out the magnitude priority - which is only taken into account if two magnitudes have the - same station count. + If false, then the station count rules out the magnitude priority + which is only taken into account if two magnitudes have the + same station count. - If true then the priority rules out the station count - which is only taken into account if two magnitudes have the - same priority. + If true, then the priority rules out the station count + which is only taken into account if two magnitudes have the + same priority. - + - The general priority list to decide if an Origin becomes preferred. - The priority decreases in the order of the parameters. - This list is not used unless this parameter is activated. + The general priority list to decide if new Origins become + preferred. - Empty priority list: scevent replicates the default hard wired behaviour: - AGENCY, STATUS, PHASES_AUTOMATIC, TIME_AUTOMATIC + Tokens are processed in the given order. Each token in the + list corresponds to a check that is performed. Each check + computes a score of the incoming Origin (s1) and the + currently preferred Origin (s2). The origin with the higher + score becomes preferred for the event. If s1 equals s2, the + next check in the list is performed. Once a check prefers an + origin, all subsequent checks are ignored. - Each item in the list corresponds to a check that is performed. - Each check computes a score of the incoming Origin (s1) and the - current preferred Origin (s2). If the s1 is lower than s2, - the incoming Origin is rejected and does not become preferred. - All subsequent checks are ignored. - If s1 is equal to s2, the next check in the list is performed. - If s1 is larger than s2, the Origin becomes preferred and - all subsequent checks are ignored. + Available tokens (checks): - Available tokens: + * AGENCY: check based on agency priorities - AGENCY: check based on agency priorities + * AUTHOR: check based on author priorities - AUTHOR: check based on author priorities + * MODE: priority from evaluation mode. Priority values are - MODE: evaluation mode priority: 0 = unset, 1 = automatic, 2 = manual, manual over-rules automatic + 0 : unset - STATUS: priority combined from evaluation status and evaluation mode: - -100 = status is rejected, -1 = status is reported, - 0 = status is preliminary or status is unset and mode is automatic, - 1 = status is confirmed or status is unset and mode is manual, - 2 = status is reviewed, 3 = status is final, + 1 : automatic - METHOD: check based on the method priorities + 2 : manual, manual over-rules automatic - PHASES: higher phase count = higher priority + * STATUS: priority combined from evaluation status and + evaluation mode. Priority values are - PHASES_AUTOMATIC: only checks phase priorities for incoming automatic Origins + -100 : status is rejected - RMS: lower rms = higher priority + -1 : status is reported - RMS_AUTOMATIC: only check RMS on incoming automatic Origins + 0 : status is preliminary or status is unset and mode is automatic - TIME: more recent Origins (creationTime) have higher priorities + 1 : status is confirmed or status is unset and mode is manual - TIME_AUTOMATIC: only check creationTime priority on incoming automatic Origins + 2 : status is reviewed - SCORE: evaluates the score according to a configured ScoreProcessor and - prefers the Origin/Focalmechanism with the highest score. + 3 : status is final + + * METHOD: priority based on the methods defined in "eventAssociation.methods" + + * PHASES: higher phase count = higher priority + + * PHASES_AUTOMATIC: only checks phase priorities for incoming + automatic Origins. Higher phase count = higher priority. + + * RMS: lower RMS = higher priority. + + * RMS_AUTOMATIC: only check RMS on incoming automatic Origins. + Lower RMS = higher priority. + + * TIME: more recent Origins (creationTime) have higher priorities. + + * TIME_AUTOMATIC: only check creationTime of incoming + automatic Origins. More recent Origins (creationTime) have higher priorities. + + * SCORE: priority based on the score according to a configured + ScoreProcessor and prefers the Origin/Focalmechanism with + the highest score. + + Empty priority list replicates the hard-wired default + behaviour: AGENCY,STATUS,PHASES_AUTOMATIC,TIME_AUTOMATIC - The agencyID priority list. When the eventtool comes to the point to - select a preferred Origin based on AGENCY it orders all Origins by its agency priority and - selects then the best one among the highest priority agency. It also defines the - agency priority for custom priority checks - (eventAssociation.priorities). + The priority list of agency IDs. When scevent comes to the + point to select a preferred Origin based on AGENCY it orders + all Origins by its agency priority selecting the best one + among the highest priority agency. It also defines the + agency priority for custom priority checks. - The parameter is only considered when defined in "priorities". + The parameter is only considered when AGENCY is used in + "eventAssociation.priorities". - The author priority list. When the eventtool comes to the point to - select a preferred Origin based on AUTHOR it orders all Origins by its author priority and - selects then the best one among the highest priority author. It also defines the - author priority for custom priority checks (eventAssociation.priorities). + The author priority list. When scevent comes to the + point to select a preferred Origin based on AUTHOR it orders + all Origins by its author priority and selects then the best + one among the highest priority author. It also defines the + author priority for custom priority checks. - The parameter is only considered when defined in "priorities". + The parameter is only considered when AUTHOR is used + in "eventAssociation.priorities". - The method priority list. When the eventtool comes to the point to - select a preferred Origin based on METHOD it orders all Origins by its methodID priority and - selects then the best one among the highest priority method. It also defines the - method priority for custom priority checks (eventAssociation.priorities). - A defined method string must match exactly the string in Origin.methodID. + The method priority list. When the scevent comes to the + point to select a preferred Origin based on METHOD it orders + all Origins by its methodID priority and selects then the + best one among the highest priority method. It also defines + the method priority for custom priority checks. A defined + method string must match exactly the string in + Origin.methodID. - The parameter is only considered when defined in "priorities". + The parameter is only considered when METHODS is used + in "eventAssociation.priorities". - Defines the ScoreProcessor interface to be used along - with priority "SCORE". + Defines the ScoreProcessor interface to be used along + with priority "SCORE" when defined in + "eventAssociation.priorities". + + - The parameter is only considered when defined in "priorities". + + + Enables the selection of the preferred focalmechanism. If set + to false then only explicit commands will have effect on the + preferred focalmechanism selection, namely the EvPrefFocMecID + command. - If the preferred Origin has evaluation status 'rejected', the - Event type will be set to 'not existing' unless the Event - type has been fixed by an operator or the preferred Origin - has been fixed. + If the preferred Origin has evaluation status 'rejected', the + Event type will be set to 'not existing' unless the Event + type has been fixed by an operator or the preferred Origin + has been fixed. - + - Configures a timespan to delay Event creation. If a new Origin arrives - which cannot be associated to an existing Event, delay the Event creation for a certain - timespan. + The timespan to delay Event creation from new Origins which + cannot be associated to an existing Event. Region filter for creating events. Use with care! Origins - outside may be ignored even if they would - become preferred otherwise. + outside may be ignored even if they would become preferred + otherwise. Empty value deactivates testing this parameter. - + - Region by geographic coordinates. + Region by geographic coordinates. Empty value + deactivates testing this parameter. - Format: "South, East, North, West" + Format: "South,East,North,West" - + - Minimum depth. + Minimum depth. Empty value deactivates testing this + parameter. - Maximum depth. + Maximum depth. Empty value deactivates testing this + parameter. - The delayFilter group configures an Origin filter to activate the delay feature for - this Origin. If more than one filter is given they are combined with AND. + Configure an Origin filter to delay Origin for being + processed. If more than one filter is given they are + combined with AND. The application requires + eventAssociation.delayTimeSpan > 0. - The agencyID of the Origin to be delayed. + + The agency ID of the Origin to be delayed. + - The author of the Origin to be delayed. - - - The evaluation mode of the Origin to be delayed. Can be either "manual" - or "automatic". + The author of the Origin to be delayed. + + + + + The evaluation mode of the Origin to be delayed. @@ -398,7 +468,7 @@ @@ -433,6 +503,44 @@ database#inventory-db database#db-disable + + + + + + + + + + diff --git a/etc/descriptions/scevent_multifeature.xml b/etc/descriptions/scevent_multifeature.xml index 9bad8c7..00305c1 100644 --- a/etc/descriptions/scevent_multifeature.xml +++ b/etc/descriptions/scevent_multifeature.xml @@ -27,6 +27,23 @@ depth and RMS. + + + + This is the maximum allowed depth. Origins with + depth greater then this value get a score equal to + "defaultScore". + + + + + + This is the maximum allowed RMS. Origins with + residual greater than this value score of equal to + "defaultScore". + + + @@ -76,14 +93,11 @@ - + Origin depth is normalized to this value for computing the score contribution. Shallower depths contribute to larger score. - This is also the maximum allowed depth. Origins with - depth greater then this value get a score equal to - "defaultScore". @@ -93,14 +107,11 @@ - + Origin RMS is normalized to this value for computing the score contribution. Lower RMS contribute to larger score. - This is also the maximum allowed RMS. Origins with - residual greater than this value score of equal to - "defaultScore". diff --git a/etc/descriptions/scevtlog.xml b/etc/descriptions/scevtlog.xml index 18ac721..43312a7 100644 --- a/etc/descriptions/scevtlog.xml +++ b/etc/descriptions/scevtlog.xml @@ -9,12 +9,11 @@ directory structure and the event files are stored. - + - Specify output event format (default is autoloc3). For completeness - it is recommended to switch to xml as storage format. The autoloc3 - format can be easily reconstructed with scbulletin but not the other - way around. + Event output format. For completeness it is recommended to stick + with the default 'xml' as it can be converted to 'autoloc1' and + 'autoloc3' by scbulletin but not the other way around. diff --git a/etc/descriptions/scevtstreams.xml b/etc/descriptions/scevtstreams.xml index 27ce4d5..6516588 100644 --- a/etc/descriptions/scevtstreams.xml +++ b/etc/descriptions/scevtstreams.xml @@ -2,7 +2,8 @@ - Extract stream information with time windows from picks of an event. + Extract stream information and time windows from picks of an event or + solitary picks. @@ -42,8 +43,8 @@ diff --git a/etc/descriptions/scmaster.xml b/etc/descriptions/scmaster.xml index b580c06..031331a 100644 --- a/etc/descriptions/scmaster.xml +++ b/etc/descriptions/scmaster.xml @@ -189,6 +189,15 @@ consequences are. + + + Enables removing the entire object tree from + the database if a parent object is being deleted. + This will also decrease the numbers of notifiers + sent to scmaster. Depending on the database + backend, it can improve the performance. + + diff --git a/etc/descriptions/scmsdemux.xml b/etc/descriptions/scmsdemux.xml new file mode 100644 index 0000000..119c403 --- /dev/null +++ b/etc/descriptions/scmsdemux.xml @@ -0,0 +1,23 @@ + + + + Demultiplex miniSEED record creating files per stream + + + scmsdemux [OPTION] source + + + + + + + + diff --git a/etc/descriptions/scmvx.xml b/etc/descriptions/scmvx.xml new file mode 100644 index 0000000..81de92d --- /dev/null +++ b/etc/descriptions/scmvx.xml @@ -0,0 +1,181 @@ + + + + + Map view showing maps with stations and events. Issues related to + configuration of stations are indicated. + + + + + Set one of the available display modes used during startup: + groundmotion or qualitycontrol. When empty, the Network tab is + shown. + + + + + Sets the time span before current time to read events initially + from database. + + + + + If a new origin/event is set/selected, this option defines if + the map is centered or not at the origin location. + + + + + Enables/disabled drawing of station annotations at startup. + + + + + Enable/disable drawing of station annotations with + location/channel codes as NET.STA.LOC.CHA. + + + + + Enable/disable drawing of stations which are not bound with global bindings. + + + + + The legend location for station symbols (network, QC, ground motion). + + + + + The legend location for event symbols. + + + + + + + + + Time to keep waveform data in memory + + + + + + + + Sets the filter applied to determine ground motion. + + + + + + The initial rectangular region for the map. The eventual region + is also scaled to the dimension of the opened map widget. + + + Minimum latitude in degrees. + + + Minimum longitude in degrees. + + + Maximum latitude in degrees. + + + Maximum longitude in degrees. + + + + + + scmvx [options] + + + verbosity#verbosity + verbosity#v + verbosity#quiet + verbosity#component + verbosity#syslog + verbosity#lockfile + verbosity#console + verbosity#debug + verbosity#log-file + verbosity#print-context + verbosity#print-component + verbosity#log-utc + verbosity#trace + + + + generic#help + generic#version + generic#config-file + generic#plugins + generic#auto-shutdown + generic#shutdown-master-module + generic#shutdown-master-username + generic#print-config-vars + generic#validate-schema-params + generic#dump-settings + + + + messaging#user + messaging#host + messaging#timeout + messaging#primary-group + messaging#subscribe-group + messaging#content-type + messaging#start-stop-msg + + + + database#db-driver-list + database#database + database#config-module + database#inventory-db + database#db-disable + + + + records#record-driver-list + records#record-url + records#record-file + records#record-type + + + + cities#city-xml + + + + gui#full-screen + gui#non-interactive + + + + + + + + + + + diff --git a/etc/descriptions/scolv.xml b/etc/descriptions/scolv.xml index 2759c4b..990cf4e 100644 --- a/etc/descriptions/scolv.xml +++ b/etc/descriptions/scolv.xml @@ -89,12 +89,13 @@ list of the "Commit with additional options" dialog. - + - Sets the default magnitude aggregation method. It can be either "mean", - "trimmed mean" or "median". If not set, the default - behavior is used which computes the mean if less than 4 stations are available, - trimmed mean otherwise. + Sets the default magnitude aggregation method. + If not set, the default behavior is used which computes the + mean if less than 4 stations are available, trimmed mean otherwise. + Neither of the above values can take an additional parameter. + The default parameter values (if supported) will be used. @@ -146,6 +147,12 @@ The default value for adding unassociated stations in the picker. + + + If enabled then all station within the configured distance will + be loaded if a new origin is loaded. + + If enabled, all traces without data and arrivals are @@ -237,6 +244,44 @@ + + + Set default options for the "Import picks" dialog. + + + + Defines the default mode of import picks. For more information + see the "Import picks" dialog. + + + + + Defines a list of accepted or denied phases when importing picks. + This list will be used to populate the corresponding input + field in the "Import picks" dialog. + + A phase which is prepended with a minus, e.g. "-P", + will be denied. + + + + + + Configures the default for "Import picks from all agencies ...". + + + + + Configures the default for "Import all phases ...". + + + + + Configures the default for "Prefer phases of target ...". + + + + @@ -266,6 +311,12 @@ + + + Define a list of agencyIDs which can be used as override + for the origin agencyID when committing with options. + + A list of origin comment profiles. These profiles will be @@ -407,6 +458,71 @@ + + + Configuration of custom commands shown in a menu when pressing + the Run button next to the custom script buttons. Actions + defined here will appear in order of listing. A dash may be + use to insert a separator. + + + Definition of a command menu action. + + + Controls whether this action will be available. + + + + + Command to be executed. Similar to the custom script + buttons the command is launched with the current + origin ID as first argument and the eventID as + second argument if available. Optionally the entire + origin may be written to stdin if an exporter is + defined. + + + + + Name of the SeisComP exporter used to serialize + the current origin when writing it to stdin of + the specified command. If empty no data will be + send to stdin. See 'sccnv --list-formats' for a + list of available options. + + Note: Some of the exporters are implemented + as plugins which need to be loaded before they + can be used. + + + + + Automatically open process manager for progress + monitoring. The process manager may also be started + manually via the View menu bar entry or via an icon + at the right of the status bar. + + + + + Name of the action. Use in the launch menu and in + the process manager. + + + + Path to an icon to show for this action. + + + + Keyboard shortcut to run this action without + the need to open the menu. E.g., 'Ctrl+Alt+A' + + + + Tool tip for this action. + + + A list of magnitude comment profiles. These profiles will be @@ -470,7 +586,7 @@ - Limit the data acquisituion to the given number of + Limit the data acquisition to the given number of the nearest stations. Requires to activate "picker.limitStationAcquisition". @@ -542,6 +658,16 @@ side of the trace window. + + + The initially applied rotation component waveform rotation. + + + + + The unit the waveform data is converted to initially. + + Start of the re-picker time window relative the cursor position on the trace. @@ -558,6 +684,15 @@ "name1;filter-definition1", "name2;filter-definition2" + + + Whether to apply the current filter only to the zoom trace or + all traces. If enabled then the current filter will only be + applied to another trace if it becomes the current trace. This + is mainly for performance reasons as filtering hundreds or + thousands of channels can take much time. + + A list of channel codes to be used when searching for @@ -605,6 +740,16 @@ component will stay the same. It must be changed explicitely. + + + If enabled then the picker will show in the upper right corner of + the zoomtrace an amplitude measure ranging from 0 to 100 and mapping + the current amplitude at the cursor to this range where 0 is the bottom + of the widget and 100 the top of the widget. Screen readers should + fetch this value change and read them. This is especially important + to support visually impaired users. + + Define the notion and behaviour of auxiliary channels. @@ -617,6 +762,9 @@ A list of wildcard patterns to identify auxiliary channels. A pattern is simply checked against a stream ID, the concatenation of NSLC separated by a period. + + This is an obsolete and deprecated setting. Please use + profiles. @@ -625,6 +773,9 @@ unassociated auxiliary channel. Loading a channel / station explicitely (F3) will not respect this setting. + + This is an obsolete and deprecated setting. Please use + profiles. @@ -633,8 +784,43 @@ unassociated auxiliary channel. Loading a channel / station explicitely (F3) will not respect this setting. + + This is an obsolete and deprecated setting. Please use + profiles. + + + + + A list of auxiliary channel profiles which are enabled. + + + + + A list of wildcard patterns to identify auxiliary + channels. A pattern is simply checked against a stream + ID, the concatenation of NSLC separated by a period. + + + + + The minimum distance in degrees from origin to load an + unassociated auxiliary channel. Loading a + channel / station explicitely (F3) will not respect + this setting. + + + + + The maximum distance in degrees from origin to load an + unassociated auxiliary channel. Loading a + channel / station explicitely (F3) will not respect + this setting. + + + + @@ -731,6 +917,34 @@ amplitude time window. + + + In case the amplitude time window cannot be computed due + to errors, e.g. computing travel times, this default noise + window start time will be used instead. + + + + + In case the amplitude time window cannot be computed due + to errors, e.g. computing travel times, this default noise + window end time will be used instead. + + + + + In case the amplitude time window cannot be computed due + to errors, e.g. computing travel times, this default signal + window start time will be used instead. + + + + + In case the amplitude time window cannot be computed due + to errors, e.g. computing travel times, this default signal + window end time will be used instead. + + A list of filters used for the amplitude picker. Format: diff --git a/etc/descriptions/scqcv.xml b/etc/descriptions/scqcv.xml index bae166f..b4bbcbd 100644 --- a/etc/descriptions/scqcv.xml +++ b/etc/descriptions/scqcv.xml @@ -118,11 +118,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -157,11 +152,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -204,11 +194,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -243,11 +228,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -289,11 +269,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -328,11 +303,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -375,11 +345,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -414,11 +379,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -461,11 +421,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream. - - Range parameters overriding parameters from the general @@ -500,11 +455,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -546,11 +496,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -585,11 +530,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -631,11 +571,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -670,11 +605,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -718,11 +648,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -757,11 +682,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -803,11 +723,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -842,11 +757,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -888,11 +798,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -927,11 +832,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -974,11 +874,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -1013,11 +908,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1059,11 +949,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - @@ -1099,11 +984,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1146,11 +1026,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - @@ -1186,11 +1061,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1233,11 +1103,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -1272,11 +1137,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1319,11 +1179,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -1358,11 +1213,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1405,11 +1255,6 @@ Activate to display absolute values (modulus). - - - Possible values: enableStream, disableStream - - Range parameters overriding parameters from the general @@ -1444,11 +1289,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - @@ -1568,11 +1408,6 @@ A color defined by the color definitions below. - - - Possible values: enableStream, disableStream - - diff --git a/etc/descriptions/screloc.xml b/etc/descriptions/screloc.xml index 000c93c..d41f4bb 100644 --- a/etc/descriptions/screloc.xml +++ b/etc/descriptions/screloc.xml @@ -72,6 +72,25 @@ + + + Parameters specific to picks. + + + + List of streams from which picks are set to unsed by their + referencing arrivals before locating. The concerned arrivals + are kept but the picks will not be used for locating. + Streams take the format NET.STA.LOC.CHA. Wildcards * and ? + are supported. + + Example: + + GE.MOX.*.HH? : All arrivals on any HH stream of station MOX + will be kept but not used for locating. + + + @@ -116,30 +135,46 @@ + - - - - - + + + + + + + + diff --git a/etc/descriptions/screpick.xml b/etc/descriptions/screpick.xml index ea24957..6685d7e 100644 --- a/etc/descriptions/screpick.xml +++ b/etc/descriptions/screpick.xml @@ -61,7 +61,7 @@ @@ -78,6 +78,14 @@ + + + diff --git a/etc/descriptions/scrttv.xml b/etc/descriptions/scrttv.xml index 5881b10..239761c 100644 --- a/etc/descriptions/scrttv.xml +++ b/etc/descriptions/scrttv.xml @@ -51,7 +51,7 @@ This parameter exists mainly for backward compatibility. - + Define a list of filters that is cycles through when pressing 'G' or 'D'. Filtering is toggled with 'F'. @@ -104,10 +104,9 @@ Configure the initial stream sorting. - + - The sort mode applied initially. Allowed values - are: config, distance, station, network, group. + The mode applied initially for sorting traces. @@ -158,7 +157,7 @@ e.g. "GE.MORC..BHZ". - + Number of rows to show at once in one windows. If more traces than rows are loaded, the are accessible by a scroll bar. @@ -228,7 +227,8 @@ - Define the trace pen of the group. + Define the trace pen of the group. Read the SeisComP + GUI documenation for full details. @@ -259,16 +259,14 @@ 0:FFBF00,1:C70039 - + - The style of the pen. Supported values are: NoPen, - SolidLine, DashLine, DotLine, DashDotLine, - DashDotDotLine. + The line style of the pen. - The width of the pen. + The line width of the pen. @@ -336,7 +334,7 @@ - + @@ -347,9 +345,10 @@ - Properties of the area below the minimum. + Properties of the area below the minimum. Read + the SeisComP GUI documenation for full details. - + @@ -370,13 +369,14 @@ - Line properties. + Line properties. Read the SeisComP GUI + documenation for full details. - + @@ -387,9 +387,10 @@ - Properties of the area above the maximum. + Properties of the area above the maximum. Read + the SeisComP GUI documenation for full details. - + @@ -439,15 +440,14 @@ Whether to use logarithmic frequency scale. - + - Which amplitude normalization mode to use: "fixed", - "frequency" or "time". + The amplitude normalization mode to use. - Whether to show the frequency axis. + Show the frequency axis. @@ -593,10 +593,12 @@ - + Specify the script to be called if an amplitude arrives, network-, stationcode and amplitude are passed as parameters $1, $2 and $3. - + Specify the script to be called if a preliminary origin arrives, latitude and longitude are passed as parameters $1 and $2. - + Specify the script to be called when an event has been declared; the message string, a flag (1=new event, diff --git a/etc/descriptions/scwfas.xml b/etc/descriptions/scwfas.xml index f809d0c..0ce1ab0 100644 --- a/etc/descriptions/scwfas.xml +++ b/etc/descriptions/scwfas.xml @@ -51,15 +51,6 @@ as input to setSource(). - - - - The server port for Arclink connections. -1 - deactivates the Arclink server. The standard Arclink port is - 18001. - - - diff --git a/etc/descriptions/scxmldump.xml b/etc/descriptions/scxmldump.xml index b070556..0552d15 100644 --- a/etc/descriptions/scxmldump.xml +++ b/etc/descriptions/scxmldump.xml @@ -2,7 +2,7 @@ - Dump database objects to XML files. + Dump objects from database or messaging to XML. @@ -50,12 +50,6 @@ - @@ -79,7 +73,7 @@ @@ -95,25 +89,28 @@ - - + + + + +